]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/s390/s390.c
builtins.c, [...]: Avoid "`" as left quote, using "'" or %q, %< and %> as appropriate.
[thirdparty/gcc.git] / gcc / config / s390 / s390.c
CommitLineData
9db1d521 1/* Subroutines used for code generation on IBM S/390 and zSeries
283334f0
KH
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
9db1d521 4 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
f314b9b1 5 Ulrich Weigand (uweigand@de.ibm.com).
9db1d521 6
58add37a 7This file is part of GCC.
9db1d521 8
58add37a
UW
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
11Software Foundation; either version 2, or (at your option) any later
12version.
9db1d521 13
58add37a
UW
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
9db1d521
HP
18
19You should have received a copy of the GNU General Public License
58add37a
UW
20along with GCC; see the file COPYING. If not, write to the Free
21Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2202111-1307, USA. */
9db1d521
HP
23
24#include "config.h"
9db1d521 25#include "system.h"
4977bab6
ZW
26#include "coretypes.h"
27#include "tm.h"
9db1d521
HP
28#include "rtl.h"
29#include "tree.h"
30#include "tm_p.h"
31#include "regs.h"
32#include "hard-reg-set.h"
33#include "real.h"
34#include "insn-config.h"
35#include "conditions.h"
36#include "output.h"
37#include "insn-attr.h"
38#include "flags.h"
39#include "except.h"
40#include "function.h"
41#include "recog.h"
42#include "expr.h"
7c82a1ed 43#include "reload.h"
9db1d521
HP
44#include "toplev.h"
45#include "basic-block.h"
4023fb28 46#include "integrate.h"
9db1d521
HP
47#include "ggc.h"
48#include "target.h"
49#include "target-def.h"
0d3c08b6 50#include "debug.h"
f1e639b1 51#include "langhooks.h"
a41c6c53 52#include "optabs.h"
63694b5e 53#include "tree-gimple.h"
9db1d521 54
114278e7
RH
55/* Machine-specific symbol_ref flags. */
56#define SYMBOL_FLAG_ALIGN1 (SYMBOL_FLAG_MACH_DEP << 0)
57
58
9c808aad 59static bool s390_assemble_integer (rtx, unsigned int, int);
9c808aad
AJ
60static void s390_encode_section_info (tree, rtx, int);
61static bool s390_cannot_force_const_mem (rtx);
62static rtx s390_delegitimize_address (rtx);
8c17530e 63static bool s390_return_in_memory (tree, tree);
9c808aad
AJ
64static void s390_init_builtins (void);
65static rtx s390_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
66static void s390_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
67 HOST_WIDE_INT, tree);
68static enum attr_type s390_safe_attr_type (rtx);
69
9c808aad
AJ
70static int s390_adjust_priority (rtx, int);
71static int s390_issue_rate (void);
9c808aad 72static int s390_first_cycle_multipass_dfa_lookahead (void);
9bb86f41 73static bool s390_cannot_copy_insn_p (rtx);
9c808aad
AJ
74static bool s390_rtx_costs (rtx, int, int, int *);
75static int s390_address_cost (rtx);
76static void s390_reorg (void);
77static bool s390_valid_pointer_mode (enum machine_mode);
c35d187f 78static tree s390_build_builtin_va_list (void);
63694b5e 79static tree s390_gimplify_va_arg (tree, tree, tree *, tree *);
ed9676cf
AK
80static bool s390_function_ok_for_sibcall (tree, tree);
81static bool s390_call_saved_register_used (tree);
8cd5a4e0
RH
82static bool s390_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode mode,
83 tree, bool);
ffdda752 84static bool s390_fixed_condition_code_regs (unsigned int *, unsigned int *);
52609473 85
301d03af
RS
86#undef TARGET_ASM_ALIGNED_HI_OP
87#define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
88#undef TARGET_ASM_ALIGNED_DI_OP
89#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
90#undef TARGET_ASM_INTEGER
91#define TARGET_ASM_INTEGER s390_assemble_integer
92
f314b9b1
UW
93#undef TARGET_ASM_OPEN_PAREN
94#define TARGET_ASM_OPEN_PAREN ""
95
96#undef TARGET_ASM_CLOSE_PAREN
97#define TARGET_ASM_CLOSE_PAREN ""
98
fb49053f
RH
99#undef TARGET_ENCODE_SECTION_INFO
100#define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
fd3cd001
UW
101
102#ifdef HAVE_AS_TLS
103#undef TARGET_HAVE_TLS
104#define TARGET_HAVE_TLS true
105#endif
106#undef TARGET_CANNOT_FORCE_CONST_MEM
107#define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
108
69bd9368
RS
109#undef TARGET_DELEGITIMIZE_ADDRESS
110#define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
111
8c17530e
UW
112#undef TARGET_RETURN_IN_MEMORY
113#define TARGET_RETURN_IN_MEMORY s390_return_in_memory
114
fd3cd001
UW
115#undef TARGET_INIT_BUILTINS
116#define TARGET_INIT_BUILTINS s390_init_builtins
117#undef TARGET_EXPAND_BUILTIN
118#define TARGET_EXPAND_BUILTIN s390_expand_builtin
fb49053f 119
3961e8fe
RH
120#undef TARGET_ASM_OUTPUT_MI_THUNK
121#define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
122#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
123#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
c590b625 124
52609473
HP
125#undef TARGET_SCHED_ADJUST_PRIORITY
126#define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
077dab3b
HP
127#undef TARGET_SCHED_ISSUE_RATE
128#define TARGET_SCHED_ISSUE_RATE s390_issue_rate
52609473
HP
129#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
130#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
077dab3b 131
9bb86f41
UW
132#undef TARGET_CANNOT_COPY_INSN_P
133#define TARGET_CANNOT_COPY_INSN_P s390_cannot_copy_insn_p
3c50106f
RH
134#undef TARGET_RTX_COSTS
135#define TARGET_RTX_COSTS s390_rtx_costs
dcefdf67
RH
136#undef TARGET_ADDRESS_COST
137#define TARGET_ADDRESS_COST s390_address_cost
077dab3b 138
18dbd950
RS
139#undef TARGET_MACHINE_DEPENDENT_REORG
140#define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
141
c7453384
EC
142#undef TARGET_VALID_POINTER_MODE
143#define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
144
c35d187f
RH
145#undef TARGET_BUILD_BUILTIN_VA_LIST
146#define TARGET_BUILD_BUILTIN_VA_LIST s390_build_builtin_va_list
63694b5e
UW
147#undef TARGET_GIMPLIFY_VA_ARG_EXPR
148#define TARGET_GIMPLIFY_VA_ARG_EXPR s390_gimplify_va_arg
c35d187f 149
101ad855
KH
150#undef TARGET_PROMOTE_FUNCTION_ARGS
151#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
152#undef TARGET_PROMOTE_FUNCTION_RETURN
153#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
8cd5a4e0
RH
154#undef TARGET_PASS_BY_REFERENCE
155#define TARGET_PASS_BY_REFERENCE s390_pass_by_reference
101ad855 156
ed9676cf
AK
157#undef TARGET_FUNCTION_OK_FOR_SIBCALL
158#define TARGET_FUNCTION_OK_FOR_SIBCALL s390_function_ok_for_sibcall
159
ffdda752
AK
160#undef TARGET_FIXED_CONDITION_CODE_REGS
161#define TARGET_FIXED_CONDITION_CODE_REGS s390_fixed_condition_code_regs
162
9db1d521
HP
163struct gcc_target targetm = TARGET_INITIALIZER;
164
165extern int reload_completed;
166
4023fb28
UW
167/* The alias set for prologue/epilogue register save/restore. */
168static int s390_sr_alias_set = 0;
169
9db1d521
HP
170/* Save information from a "cmpxx" operation until the branch or scc is
171 emitted. */
172rtx s390_compare_op0, s390_compare_op1;
173
994fe660
UW
174/* Structure used to hold the components of a S/390 memory
175 address. A legitimate address on S/390 is of the general
176 form
177 base + index + displacement
178 where any of the components is optional.
179
180 base and index are registers of the class ADDR_REGS,
181 displacement is an unsigned 12-bit immediate constant. */
9db1d521
HP
182
183struct s390_address
184{
185 rtx base;
186 rtx indx;
187 rtx disp;
f3e9edff 188 int pointer;
9db1d521
HP
189};
190
be2c2a4b 191/* Which cpu are we tuning for. */
f13e0d4e
UW
192enum processor_type s390_tune;
193enum processor_flags s390_tune_flags;
1fec52be
HP
194/* Which instruction set architecture to use. */
195enum processor_type s390_arch;
f13e0d4e 196enum processor_flags s390_arch_flags;
1fec52be
HP
197
198/* Strings to hold which cpu and instruction set architecture to use. */
be2c2a4b 199const char *s390_tune_string; /* for -mtune=<xxx> */
1fec52be
HP
200const char *s390_arch_string; /* for -march=<xxx> */
201
064e93c2
UW
202/* String to specify backchain mode:
203 "" no-backchain, "1" backchain, "2" kernel-backchain. */
204const char *s390_backchain_string = TARGET_DEFAULT_BACKCHAIN;
adf39f8f 205
d75f90f1
AK
206const char *s390_warn_framesize_string;
207const char *s390_warn_dynamicstack_string;
208const char *s390_stack_size_string;
209const char *s390_stack_guard_string;
210
211HOST_WIDE_INT s390_warn_framesize = 0;
212bool s390_warn_dynamicstack_p = 0;
213HOST_WIDE_INT s390_stack_size = 0;
214HOST_WIDE_INT s390_stack_guard = 0;
215
adf39f8f
AK
216/* The following structure is embedded in the machine
217 specific part of struct function. */
218
219struct s390_frame_layout GTY (())
220{
221 /* Offset within stack frame. */
222 HOST_WIDE_INT gprs_offset;
223 HOST_WIDE_INT f0_offset;
224 HOST_WIDE_INT f4_offset;
225 HOST_WIDE_INT f8_offset;
226 HOST_WIDE_INT backchain_offset;
227
29742ba4 228 /* Number of first and last gpr to be saved, restored. */
4023fb28
UW
229 int first_save_gpr;
230 int first_restore_gpr;
231 int last_save_gpr;
b767fc11 232 int last_restore_gpr;
4023fb28 233
adf39f8f
AK
234 /* Bits standing for floating point registers. Set, if the
235 respective register has to be saved. Starting with reg 16 (f0)
236 at the rightmost bit.
237 Bit 15 - 8 7 6 5 4 3 2 1 0
238 fpr 15 - 8 7 5 3 1 6 4 2 0
239 reg 31 - 24 23 22 21 20 19 18 17 16 */
240 unsigned int fpr_bitmap;
241
242 /* Number of floating point registers f8-f15 which must be saved. */
243 int high_fprs;
244
245 /* Set if return address needs to be saved. */
246 bool save_return_addr_p;
247
248 /* Set if backchain needs to be saved. */
249 bool save_backchain_p;
250
29742ba4 251 /* Size of stack frame. */
4023fb28 252 HOST_WIDE_INT frame_size;
adf39f8f
AK
253};
254
255/* Define the structure for the machine field in struct function. */
256
257struct machine_function GTY(())
258{
259 struct s390_frame_layout frame_layout;
fd3cd001 260
585539a1
UW
261 /* Literal pool base register. */
262 rtx base_reg;
263
91086990
UW
264 /* True if we may need to perform branch splitting. */
265 bool split_branches_pending_p;
266
fd3cd001
UW
267 /* Some local-dynamic TLS symbol name. */
268 const char *some_ld_name;
4023fb28
UW
269};
270
adf39f8f
AK
271/* Few accessor macros for struct cfun->machine->s390_frame_layout. */
272
273#define cfun_frame_layout (cfun->machine->frame_layout)
274#define cfun_save_high_fprs_p (!!cfun_frame_layout.high_fprs)
275#define cfun_gprs_save_area_size ((cfun_frame_layout.last_save_gpr - \
276 cfun_frame_layout.first_save_gpr + 1) * UNITS_PER_WORD)
277#define cfun_set_fpr_bit(BITNUM) (cfun->machine->frame_layout.fpr_bitmap |= \
278 (1 << (BITNUM)))
279#define cfun_fpr_bit_p(BITNUM) (!!(cfun->machine->frame_layout.fpr_bitmap & \
280 (1 << (BITNUM))))
281
9c808aad
AJ
282static int s390_match_ccmode_set (rtx, enum machine_mode);
283static int s390_branch_condition_mask (rtx);
284static const char *s390_branch_condition_mnemonic (rtx, int);
285static int check_mode (rtx, enum machine_mode *);
9c808aad
AJ
286static int s390_short_displacement (rtx);
287static int s390_decompose_address (rtx, struct s390_address *);
288static rtx get_thread_pointer (void);
289static rtx legitimize_tls_address (rtx, rtx);
ac32b25e 290static void print_shift_count_operand (FILE *, rtx);
9c808aad
AJ
291static const char *get_some_local_dynamic_name (void);
292static int get_some_local_dynamic_name_1 (rtx *, void *);
293static int reg_used_in_mem_p (int, rtx);
294static int addr_generation_dependency_p (rtx, rtx);
545d16ff 295static int s390_split_branches (void);
585539a1 296static void annotate_constant_pool_refs (rtx *x);
9c808aad
AJ
297static void find_constant_pool_ref (rtx, rtx *);
298static void replace_constant_pool_ref (rtx *, rtx, rtx);
299static rtx find_ltrel_base (rtx);
585539a1 300static void replace_ltrel_base (rtx *);
91086990 301static void s390_optimize_prologue (void);
9c808aad 302static int find_unused_clobbered_reg (void);
adf39f8f 303static void s390_frame_area (int *, int *);
91086990
UW
304static void s390_register_info (int []);
305static void s390_frame_info (void);
306static void s390_init_frame_layout (void);
307static void s390_update_frame_layout (void);
9c808aad
AJ
308static rtx save_fpr (rtx, int, int);
309static rtx restore_fpr (rtx, int, int);
310static rtx save_gprs (rtx, int, int, int);
311static rtx restore_gprs (rtx, int, int, int);
312static int s390_function_arg_size (enum machine_mode, tree);
313static bool s390_function_arg_float (enum machine_mode, tree);
314static struct machine_function * s390_init_machine_status (void);
d3632d41
UW
315
316/* Check whether integer displacement is in range. */
317#define DISP_IN_RANGE(d) \
318 (TARGET_LONG_DISPLACEMENT? ((d) >= -524288 && (d) <= 524287) \
319 : ((d) >= 0 && (d) <= 4095))
c7453384 320
994fe660 321/* Return true if SET either doesn't set the CC register, or else
c7453384 322 the source and destination have matching CC modes and that
994fe660 323 CC mode is at least as constrained as REQ_MODE. */
c7453384 324
9db1d521 325static int
9c808aad 326s390_match_ccmode_set (rtx set, enum machine_mode req_mode)
9db1d521 327{
994fe660 328 enum machine_mode set_mode;
9db1d521
HP
329
330 if (GET_CODE (set) != SET)
994fe660 331 abort ();
9db1d521
HP
332
333 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
334 return 1;
335
336 set_mode = GET_MODE (SET_DEST (set));
337 switch (set_mode)
338 {
9db1d521 339 case CCSmode:
07893d4f 340 case CCSRmode:
9db1d521 341 case CCUmode:
07893d4f 342 case CCURmode:
ba956982 343 case CCLmode:
07893d4f
UW
344 case CCL1mode:
345 case CCL2mode:
5d880bd2 346 case CCL3mode:
07893d4f
UW
347 case CCT1mode:
348 case CCT2mode:
349 case CCT3mode:
350 if (req_mode != set_mode)
ba956982
UW
351 return 0;
352 break;
07893d4f 353
9db1d521 354 case CCZmode:
07893d4f
UW
355 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
356 && req_mode != CCSRmode && req_mode != CCURmode)
9db1d521
HP
357 return 0;
358 break;
0a3bdf9d
UW
359
360 case CCAPmode:
361 case CCANmode:
362 if (req_mode != CCAmode)
363 return 0;
364 break;
c7453384 365
9db1d521
HP
366 default:
367 abort ();
368 }
c7453384 369
9db1d521
HP
370 return (GET_MODE (SET_SRC (set)) == set_mode);
371}
372
c7453384
EC
373/* Return true if every SET in INSN that sets the CC register
374 has source and destination with matching CC modes and that
375 CC mode is at least as constrained as REQ_MODE.
07893d4f 376 If REQ_MODE is VOIDmode, always return false. */
c7453384 377
9db1d521 378int
9c808aad 379s390_match_ccmode (rtx insn, enum machine_mode req_mode)
9db1d521
HP
380{
381 int i;
382
07893d4f
UW
383 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
384 if (req_mode == VOIDmode)
385 return 0;
386
9db1d521
HP
387 if (GET_CODE (PATTERN (insn)) == SET)
388 return s390_match_ccmode_set (PATTERN (insn), req_mode);
389
390 if (GET_CODE (PATTERN (insn)) == PARALLEL)
391 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
392 {
393 rtx set = XVECEXP (PATTERN (insn), 0, i);
394 if (GET_CODE (set) == SET)
395 if (!s390_match_ccmode_set (set, req_mode))
396 return 0;
397 }
398
399 return 1;
400}
401
c7453384 402/* If a test-under-mask instruction can be used to implement
07893d4f 403 (compare (and ... OP1) OP2), return the CC mode required
c7453384 404 to do that. Otherwise, return VOIDmode.
07893d4f
UW
405 MIXED is true if the instruction can distinguish between
406 CC1 and CC2 for mixed selected bits (TMxx), it is false
407 if the instruction cannot (TM). */
408
409enum machine_mode
9c808aad 410s390_tm_ccmode (rtx op1, rtx op2, int mixed)
07893d4f
UW
411{
412 int bit0, bit1;
413
414 /* ??? Fixme: should work on CONST_DOUBLE as well. */
415 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
416 return VOIDmode;
417
418 /* Selected bits all zero: CC0. */
419 if (INTVAL (op2) == 0)
420 return CCTmode;
421
422 /* Selected bits all one: CC3. */
423 if (INTVAL (op2) == INTVAL (op1))
424 return CCT3mode;
425
426 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. */
427 if (mixed)
428 {
429 bit1 = exact_log2 (INTVAL (op2));
430 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
431 if (bit0 != -1 && bit1 != -1)
432 return bit0 > bit1 ? CCT1mode : CCT2mode;
433 }
434
435 return VOIDmode;
436}
437
c7453384
EC
438/* Given a comparison code OP (EQ, NE, etc.) and the operands
439 OP0 and OP1 of a COMPARE, return the mode to be used for the
ba956982
UW
440 comparison. */
441
442enum machine_mode
9c808aad 443s390_select_ccmode (enum rtx_code code, rtx op0, rtx op1)
ba956982
UW
444{
445 switch (code)
446 {
447 case EQ:
448 case NE:
0a3bdf9d 449 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
f19a9af7 450 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (XEXP (op0, 1)), 'K', "K"))
0a3bdf9d 451 return CCAPmode;
3ef093a8
AK
452 if ((GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
453 || GET_CODE (op1) == NEG)
454 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
ba956982
UW
455 return CCLmode;
456
07893d4f
UW
457 if (GET_CODE (op0) == AND)
458 {
459 /* Check whether we can potentially do it via TM. */
460 enum machine_mode ccmode;
461 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
462 if (ccmode != VOIDmode)
463 {
464 /* Relax CCTmode to CCZmode to allow fall-back to AND
465 if that turns out to be beneficial. */
466 return ccmode == CCTmode ? CCZmode : ccmode;
467 }
468 }
469
c7453384 470 if (register_operand (op0, HImode)
07893d4f
UW
471 && GET_CODE (op1) == CONST_INT
472 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
473 return CCT3mode;
c7453384 474 if (register_operand (op0, QImode)
07893d4f
UW
475 && GET_CODE (op1) == CONST_INT
476 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
477 return CCT3mode;
478
ba956982
UW
479 return CCZmode;
480
481 case LE:
482 case LT:
483 case GE:
484 case GT:
0a3bdf9d 485 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
f19a9af7 486 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (XEXP (op0, 1)), 'K', "K"))
0a3bdf9d
UW
487 {
488 if (INTVAL (XEXP((op0), 1)) < 0)
489 return CCANmode;
490 else
491 return CCAPmode;
492 }
ba956982
UW
493 case UNORDERED:
494 case ORDERED:
495 case UNEQ:
496 case UNLE:
497 case UNLT:
498 case UNGE:
499 case UNGT:
500 case LTGT:
07893d4f
UW
501 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
502 && GET_CODE (op1) != CONST_INT)
503 return CCSRmode;
ba956982
UW
504 return CCSmode;
505
ba956982
UW
506 case LTU:
507 case GEU:
3ef093a8
AK
508 if (GET_CODE (op0) == PLUS
509 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
07893d4f
UW
510 return CCL1mode;
511
512 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
513 && GET_CODE (op1) != CONST_INT)
514 return CCURmode;
515 return CCUmode;
516
517 case LEU:
ba956982 518 case GTU:
3ef093a8
AK
519 if (GET_CODE (op0) == MINUS
520 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
07893d4f
UW
521 return CCL2mode;
522
523 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
524 && GET_CODE (op1) != CONST_INT)
525 return CCURmode;
ba956982
UW
526 return CCUmode;
527
528 default:
529 abort ();
530 }
531}
532
68f9c5e2
UW
533/* Replace the comparison OP0 CODE OP1 by a semantically equivalent one
534 that we can implement more efficiently. */
535
536void
537s390_canonicalize_comparison (enum rtx_code *code, rtx *op0, rtx *op1)
538{
539 /* Convert ZERO_EXTRACT back to AND to enable TM patterns. */
540 if ((*code == EQ || *code == NE)
541 && *op1 == const0_rtx
542 && GET_CODE (*op0) == ZERO_EXTRACT
543 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
544 && GET_CODE (XEXP (*op0, 2)) == CONST_INT
545 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
546 {
547 rtx inner = XEXP (*op0, 0);
548 HOST_WIDE_INT modesize = GET_MODE_BITSIZE (GET_MODE (inner));
549 HOST_WIDE_INT len = INTVAL (XEXP (*op0, 1));
550 HOST_WIDE_INT pos = INTVAL (XEXP (*op0, 2));
551
552 if (len > 0 && len < modesize
553 && pos >= 0 && pos + len <= modesize
554 && modesize <= HOST_BITS_PER_WIDE_INT)
555 {
556 unsigned HOST_WIDE_INT block;
557 block = ((unsigned HOST_WIDE_INT) 1 << len) - 1;
558 block <<= modesize - pos - len;
559
560 *op0 = gen_rtx_AND (GET_MODE (inner), inner,
561 gen_int_mode (block, GET_MODE (inner)));
562 }
563 }
564
565 /* Narrow AND of memory against immediate to enable TM. */
566 if ((*code == EQ || *code == NE)
567 && *op1 == const0_rtx
568 && GET_CODE (*op0) == AND
569 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
570 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
571 {
572 rtx inner = XEXP (*op0, 0);
573 rtx mask = XEXP (*op0, 1);
574
575 /* Ignore paradoxical SUBREGs if all extra bits are masked out. */
576 if (GET_CODE (inner) == SUBREG
577 && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (inner)))
578 && (GET_MODE_SIZE (GET_MODE (inner))
579 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner))))
580 && ((INTVAL (mask)
581 & GET_MODE_MASK (GET_MODE (inner))
582 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (inner))))
583 == 0))
584 inner = SUBREG_REG (inner);
585
586 /* Do not change volatile MEMs. */
587 if (MEM_P (inner) && !MEM_VOLATILE_P (inner))
588 {
589 int part = s390_single_part (XEXP (*op0, 1),
590 GET_MODE (inner), QImode, 0);
591 if (part >= 0)
592 {
593 mask = gen_int_mode (s390_extract_part (mask, QImode, 0), QImode);
594 inner = adjust_address_nv (inner, QImode, part);
595 *op0 = gen_rtx_AND (QImode, inner, mask);
596 }
597 }
598 }
599
600 /* Narrow comparisons against 0xffff to HImode if possible. */
68f9c5e2
UW
601 if ((*code == EQ || *code == NE)
602 && GET_CODE (*op1) == CONST_INT
603 && INTVAL (*op1) == 0xffff
604 && SCALAR_INT_MODE_P (GET_MODE (*op0))
605 && (nonzero_bits (*op0, GET_MODE (*op0))
606 & ~(unsigned HOST_WIDE_INT) 0xffff) == 0)
607 {
608 *op0 = gen_lowpart (HImode, *op0);
609 *op1 = constm1_rtx;
610 }
5b022de5
UW
611
612
613 /* Remove redundant UNSPEC_CMPINT conversions if possible. */
614 if (GET_CODE (*op0) == UNSPEC
615 && XINT (*op0, 1) == UNSPEC_CMPINT
616 && XVECLEN (*op0, 0) == 1
617 && GET_MODE (XVECEXP (*op0, 0, 0)) == CCUmode
618 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
619 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
620 && *op1 == const0_rtx)
621 {
622 enum rtx_code new_code = UNKNOWN;
623 switch (*code)
624 {
625 case EQ: new_code = EQ; break;
626 case NE: new_code = NE; break;
627 case LT: new_code = LTU; break;
628 case GT: new_code = GTU; break;
629 case LE: new_code = LEU; break;
630 case GE: new_code = GEU; break;
631 default: break;
632 }
633
634 if (new_code != UNKNOWN)
635 {
636 *op0 = XVECEXP (*op0, 0, 0);
637 *code = new_code;
638 }
639 }
68f9c5e2
UW
640}
641
6590e19a
UW
642/* Emit a compare instruction suitable to implement the comparison
643 OP0 CODE OP1. Return the correct condition RTL to be placed in
644 the IF_THEN_ELSE of the conditional branch testing the result. */
645
646rtx
647s390_emit_compare (enum rtx_code code, rtx op0, rtx op1)
648{
649 enum machine_mode mode = s390_select_ccmode (code, op0, op1);
650 rtx cc = gen_rtx_REG (mode, CC_REGNUM);
651
652 emit_insn (gen_rtx_SET (VOIDmode, cc, gen_rtx_COMPARE (mode, op0, op1)));
653 return gen_rtx_fmt_ee (code, VOIDmode, cc, const0_rtx);
654}
655
656/* Emit a jump instruction to TARGET. If COND is NULL_RTX, emit an
657 unconditional jump, else a conditional jump under condition COND. */
658
659void
660s390_emit_jump (rtx target, rtx cond)
661{
662 rtx insn;
663
664 target = gen_rtx_LABEL_REF (VOIDmode, target);
665 if (cond)
666 target = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, target, pc_rtx);
667
668 insn = gen_rtx_SET (VOIDmode, pc_rtx, target);
669 emit_jump_insn (insn);
670}
671
5b022de5
UW
672/* Return nonzero if OP is a valid comparison operator
673 for a branch condition in mode MODE. */
674
675int
676s390_comparison (rtx op, enum machine_mode mode)
677{
678 if (mode != VOIDmode && mode != GET_MODE (op))
679 return 0;
680
681 if (!COMPARISON_P (op))
682 return 0;
683
684 if (GET_CODE (XEXP (op, 0)) != REG
685 || REGNO (XEXP (op, 0)) != CC_REGNUM
686 || XEXP (op, 1) != const0_rtx)
687 return 0;
688
689 return s390_branch_condition_mask (op) >= 0;
690}
691
e69166de
UW
692/* Return nonzero if OP is a valid comparison operator
693 for an ALC condition in mode MODE. */
694
695int
696s390_alc_comparison (rtx op, enum machine_mode mode)
697{
698 if (mode != VOIDmode && mode != GET_MODE (op))
699 return 0;
700
5d880bd2
UW
701 while (GET_CODE (op) == ZERO_EXTEND || GET_CODE (op) == SIGN_EXTEND)
702 op = XEXP (op, 0);
703
ec8e098d 704 if (!COMPARISON_P (op))
e69166de
UW
705 return 0;
706
707 if (GET_CODE (XEXP (op, 0)) != REG
708 || REGNO (XEXP (op, 0)) != CC_REGNUM
709 || XEXP (op, 1) != const0_rtx)
710 return 0;
711
712 switch (GET_MODE (XEXP (op, 0)))
713 {
714 case CCL1mode:
715 return GET_CODE (op) == LTU;
716
717 case CCL2mode:
718 return GET_CODE (op) == LEU;
719
5d880bd2
UW
720 case CCL3mode:
721 return GET_CODE (op) == GEU;
722
e69166de
UW
723 case CCUmode:
724 return GET_CODE (op) == GTU;
725
726 case CCURmode:
727 return GET_CODE (op) == LTU;
728
729 case CCSmode:
730 return GET_CODE (op) == UNGT;
731
732 case CCSRmode:
733 return GET_CODE (op) == UNLT;
734
735 default:
736 return 0;
737 }
738}
739
740/* Return nonzero if OP is a valid comparison operator
741 for an SLB condition in mode MODE. */
742
743int
744s390_slb_comparison (rtx op, enum machine_mode mode)
745{
746 if (mode != VOIDmode && mode != GET_MODE (op))
747 return 0;
748
5d880bd2
UW
749 while (GET_CODE (op) == ZERO_EXTEND || GET_CODE (op) == SIGN_EXTEND)
750 op = XEXP (op, 0);
751
ec8e098d 752 if (!COMPARISON_P (op))
e69166de
UW
753 return 0;
754
755 if (GET_CODE (XEXP (op, 0)) != REG
756 || REGNO (XEXP (op, 0)) != CC_REGNUM
757 || XEXP (op, 1) != const0_rtx)
758 return 0;
759
760 switch (GET_MODE (XEXP (op, 0)))
761 {
762 case CCL1mode:
763 return GET_CODE (op) == GEU;
764
765 case CCL2mode:
766 return GET_CODE (op) == GTU;
767
5d880bd2
UW
768 case CCL3mode:
769 return GET_CODE (op) == LTU;
770
e69166de
UW
771 case CCUmode:
772 return GET_CODE (op) == LEU;
773
774 case CCURmode:
775 return GET_CODE (op) == GEU;
776
777 case CCSmode:
778 return GET_CODE (op) == LE;
779
780 case CCSRmode:
781 return GET_CODE (op) == GE;
782
783 default:
784 return 0;
785 }
786}
787
c7453384 788/* Return branch condition mask to implement a branch
5b022de5 789 specified by CODE. Return -1 for invalid comparisons. */
ba956982
UW
790
791static int
9c808aad 792s390_branch_condition_mask (rtx code)
c7453384 793{
ba956982
UW
794 const int CC0 = 1 << 3;
795 const int CC1 = 1 << 2;
796 const int CC2 = 1 << 1;
797 const int CC3 = 1 << 0;
798
799 if (GET_CODE (XEXP (code, 0)) != REG
800 || REGNO (XEXP (code, 0)) != CC_REGNUM
801 || XEXP (code, 1) != const0_rtx)
802 abort ();
803
804 switch (GET_MODE (XEXP (code, 0)))
805 {
806 case CCZmode:
807 switch (GET_CODE (code))
808 {
809 case EQ: return CC0;
810 case NE: return CC1 | CC2 | CC3;
5b022de5 811 default: return -1;
ba956982
UW
812 }
813 break;
814
07893d4f
UW
815 case CCT1mode:
816 switch (GET_CODE (code))
817 {
818 case EQ: return CC1;
819 case NE: return CC0 | CC2 | CC3;
5b022de5 820 default: return -1;
07893d4f
UW
821 }
822 break;
823
824 case CCT2mode:
825 switch (GET_CODE (code))
826 {
827 case EQ: return CC2;
828 case NE: return CC0 | CC1 | CC3;
5b022de5 829 default: return -1;
07893d4f
UW
830 }
831 break;
832
833 case CCT3mode:
834 switch (GET_CODE (code))
835 {
836 case EQ: return CC3;
837 case NE: return CC0 | CC1 | CC2;
5b022de5 838 default: return -1;
07893d4f
UW
839 }
840 break;
841
ba956982
UW
842 case CCLmode:
843 switch (GET_CODE (code))
844 {
845 case EQ: return CC0 | CC2;
846 case NE: return CC1 | CC3;
5b022de5 847 default: return -1;
07893d4f
UW
848 }
849 break;
850
851 case CCL1mode:
852 switch (GET_CODE (code))
853 {
854 case LTU: return CC2 | CC3; /* carry */
855 case GEU: return CC0 | CC1; /* no carry */
5b022de5 856 default: return -1;
07893d4f
UW
857 }
858 break;
859
860 case CCL2mode:
861 switch (GET_CODE (code))
862 {
863 case GTU: return CC0 | CC1; /* borrow */
864 case LEU: return CC2 | CC3; /* no borrow */
5b022de5 865 default: return -1;
ba956982
UW
866 }
867 break;
868
5d880bd2
UW
869 case CCL3mode:
870 switch (GET_CODE (code))
871 {
872 case EQ: return CC0 | CC2;
873 case NE: return CC1 | CC3;
874 case LTU: return CC1;
875 case GTU: return CC3;
876 case LEU: return CC1 | CC2;
877 case GEU: return CC2 | CC3;
5b022de5 878 default: return -1;
5d880bd2
UW
879 }
880
ba956982
UW
881 case CCUmode:
882 switch (GET_CODE (code))
883 {
884 case EQ: return CC0;
885 case NE: return CC1 | CC2 | CC3;
886 case LTU: return CC1;
887 case GTU: return CC2;
888 case LEU: return CC0 | CC1;
889 case GEU: return CC0 | CC2;
5b022de5 890 default: return -1;
ba956982
UW
891 }
892 break;
893
07893d4f
UW
894 case CCURmode:
895 switch (GET_CODE (code))
896 {
897 case EQ: return CC0;
898 case NE: return CC2 | CC1 | CC3;
899 case LTU: return CC2;
900 case GTU: return CC1;
901 case LEU: return CC0 | CC2;
902 case GEU: return CC0 | CC1;
5b022de5 903 default: return -1;
07893d4f
UW
904 }
905 break;
906
0a3bdf9d
UW
907 case CCAPmode:
908 switch (GET_CODE (code))
909 {
910 case EQ: return CC0;
911 case NE: return CC1 | CC2 | CC3;
912 case LT: return CC1 | CC3;
913 case GT: return CC2;
914 case LE: return CC0 | CC1 | CC3;
915 case GE: return CC0 | CC2;
5b022de5 916 default: return -1;
0a3bdf9d
UW
917 }
918 break;
919
920 case CCANmode:
921 switch (GET_CODE (code))
922 {
923 case EQ: return CC0;
924 case NE: return CC1 | CC2 | CC3;
925 case LT: return CC1;
926 case GT: return CC2 | CC3;
927 case LE: return CC0 | CC1;
928 case GE: return CC0 | CC2 | CC3;
5b022de5 929 default: return -1;
0a3bdf9d
UW
930 }
931 break;
932
ba956982
UW
933 case CCSmode:
934 switch (GET_CODE (code))
935 {
936 case EQ: return CC0;
937 case NE: return CC1 | CC2 | CC3;
938 case LT: return CC1;
939 case GT: return CC2;
940 case LE: return CC0 | CC1;
941 case GE: return CC0 | CC2;
942 case UNORDERED: return CC3;
943 case ORDERED: return CC0 | CC1 | CC2;
944 case UNEQ: return CC0 | CC3;
945 case UNLT: return CC1 | CC3;
946 case UNGT: return CC2 | CC3;
947 case UNLE: return CC0 | CC1 | CC3;
948 case UNGE: return CC0 | CC2 | CC3;
949 case LTGT: return CC1 | CC2;
5b022de5 950 default: return -1;
ba956982 951 }
07893d4f
UW
952 break;
953
954 case CCSRmode:
955 switch (GET_CODE (code))
956 {
957 case EQ: return CC0;
958 case NE: return CC2 | CC1 | CC3;
959 case LT: return CC2;
960 case GT: return CC1;
961 case LE: return CC0 | CC2;
962 case GE: return CC0 | CC1;
963 case UNORDERED: return CC3;
964 case ORDERED: return CC0 | CC2 | CC1;
965 case UNEQ: return CC0 | CC3;
966 case UNLT: return CC2 | CC3;
967 case UNGT: return CC1 | CC3;
968 case UNLE: return CC0 | CC2 | CC3;
969 case UNGE: return CC0 | CC1 | CC3;
970 case LTGT: return CC2 | CC1;
5b022de5 971 default: return -1;
07893d4f
UW
972 }
973 break;
ba956982
UW
974
975 default:
5b022de5 976 return -1;
ba956982
UW
977 }
978}
979
c7453384
EC
980/* If INV is false, return assembler mnemonic string to implement
981 a branch specified by CODE. If INV is true, return mnemonic
ba956982
UW
982 for the corresponding inverted branch. */
983
984static const char *
9c808aad 985s390_branch_condition_mnemonic (rtx code, int inv)
ba956982 986{
0139adca 987 static const char *const mnemonic[16] =
ba956982
UW
988 {
989 NULL, "o", "h", "nle",
990 "l", "nhe", "lh", "ne",
991 "e", "nlh", "he", "nl",
992 "le", "nh", "no", NULL
993 };
994
995 int mask = s390_branch_condition_mask (code);
5b022de5 996 gcc_assert (mask >= 0);
ba956982
UW
997
998 if (inv)
999 mask ^= 15;
1000
1001 if (mask < 1 || mask > 14)
1002 abort ();
1003
1004 return mnemonic[mask];
1005}
1006
f19a9af7
AK
1007/* Return the part of op which has a value different from def.
1008 The size of the part is determined by mode.
38899e29 1009 Use this function only if you already know that op really
f19a9af7 1010 contains such a part. */
4023fb28 1011
f19a9af7
AK
1012unsigned HOST_WIDE_INT
1013s390_extract_part (rtx op, enum machine_mode mode, int def)
4023fb28 1014{
f19a9af7
AK
1015 unsigned HOST_WIDE_INT value = 0;
1016 int max_parts = HOST_BITS_PER_WIDE_INT / GET_MODE_BITSIZE (mode);
1017 int part_bits = GET_MODE_BITSIZE (mode);
1018 unsigned HOST_WIDE_INT part_mask = (1 << part_bits) - 1;
1019 int i;
38899e29 1020
f19a9af7 1021 for (i = 0; i < max_parts; i++)
4023fb28 1022 {
f19a9af7
AK
1023 if (i == 0)
1024 value = (unsigned HOST_WIDE_INT) INTVAL (op);
4023fb28 1025 else
f19a9af7 1026 value >>= part_bits;
38899e29 1027
f19a9af7
AK
1028 if ((value & part_mask) != (def & part_mask))
1029 return value & part_mask;
4023fb28 1030 }
38899e29 1031
4023fb28
UW
1032 abort ();
1033}
1034
1035/* If OP is an integer constant of mode MODE with exactly one
f19a9af7
AK
1036 part of mode PART_MODE unequal to DEF, return the number of that
1037 part. Otherwise, return -1. */
4023fb28
UW
1038
1039int
38899e29
EC
1040s390_single_part (rtx op,
1041 enum machine_mode mode,
f19a9af7
AK
1042 enum machine_mode part_mode,
1043 int def)
1044{
1045 unsigned HOST_WIDE_INT value = 0;
1046 int n_parts = GET_MODE_SIZE (mode) / GET_MODE_SIZE (part_mode);
1047 unsigned HOST_WIDE_INT part_mask = (1 << GET_MODE_BITSIZE (part_mode)) - 1;
1048 int i, part = -1;
1049
1050 if (GET_CODE (op) != CONST_INT)
1051 return -1;
38899e29 1052
f19a9af7
AK
1053 for (i = 0; i < n_parts; i++)
1054 {
1055 if (i == 0)
1056 value = (unsigned HOST_WIDE_INT) INTVAL (op);
4023fb28 1057 else
f19a9af7 1058 value >>= GET_MODE_BITSIZE (part_mode);
38899e29 1059
f19a9af7
AK
1060 if ((value & part_mask) != (def & part_mask))
1061 {
1062 if (part != -1)
1063 return -1;
1064 else
1065 part = i;
1066 }
4023fb28 1067 }
f19a9af7 1068 return part == -1 ? -1 : n_parts - 1 - part;
4023fb28
UW
1069}
1070
c7453384
EC
1071/* Check whether we can (and want to) split a double-word
1072 move in mode MODE from SRC to DST into two single-word
dc65c307
UW
1073 moves, moving the subword FIRST_SUBWORD first. */
1074
1075bool
9c808aad 1076s390_split_ok_p (rtx dst, rtx src, enum machine_mode mode, int first_subword)
dc65c307
UW
1077{
1078 /* Floating point registers cannot be split. */
1079 if (FP_REG_P (src) || FP_REG_P (dst))
1080 return false;
1081
fae778eb 1082 /* We don't need to split if operands are directly accessible. */
dc65c307
UW
1083 if (s_operand (src, mode) || s_operand (dst, mode))
1084 return false;
1085
1086 /* Non-offsettable memory references cannot be split. */
1087 if ((GET_CODE (src) == MEM && !offsettable_memref_p (src))
1088 || (GET_CODE (dst) == MEM && !offsettable_memref_p (dst)))
1089 return false;
1090
1091 /* Moving the first subword must not clobber a register
1092 needed to move the second subword. */
1093 if (register_operand (dst, mode))
1094 {
1095 rtx subreg = operand_subword (dst, first_subword, 0, mode);
1096 if (reg_overlap_mentioned_p (subreg, src))
1097 return false;
1098 }
1099
1100 return true;
1101}
1102
19b63d8e
UW
1103/* Check whether the address of memory reference MEM2 equals exactly
1104 the address of memory reference MEM1 plus DELTA. Return true if
1105 we can prove this to be the case, false otherwise. */
1106
1107bool
1108s390_offset_p (rtx mem1, rtx mem2, rtx delta)
1109{
1110 rtx addr1, addr2, addr_delta;
1111
1112 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
1113 return false;
1114
1115 addr1 = XEXP (mem1, 0);
1116 addr2 = XEXP (mem2, 0);
1117
1118 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
1119 if (!addr_delta || !rtx_equal_p (addr_delta, delta))
1120 return false;
1121
1122 return true;
1123}
1124
8cb66696
UW
1125/* Expand logical operator CODE in mode MODE with operands OPERANDS. */
1126
1127void
1128s390_expand_logical_operator (enum rtx_code code, enum machine_mode mode,
1129 rtx *operands)
1130{
1131 enum machine_mode wmode = mode;
1132 rtx dst = operands[0];
1133 rtx src1 = operands[1];
1134 rtx src2 = operands[2];
1135 rtx op, clob, tem;
1136
1137 /* If we cannot handle the operation directly, use a temp register. */
1138 if (!s390_logical_operator_ok_p (operands))
1139 dst = gen_reg_rtx (mode);
1140
1141 /* QImode and HImode patterns make sense only if we have a destination
1142 in memory. Otherwise perform the operation in SImode. */
1143 if ((mode == QImode || mode == HImode) && GET_CODE (dst) != MEM)
1144 wmode = SImode;
1145
1146 /* Widen operands if required. */
1147 if (mode != wmode)
1148 {
1149 if (GET_CODE (dst) == SUBREG
1150 && (tem = simplify_subreg (wmode, dst, mode, 0)) != 0)
1151 dst = tem;
1152 else if (REG_P (dst))
1153 dst = gen_rtx_SUBREG (wmode, dst, 0);
1154 else
1155 dst = gen_reg_rtx (wmode);
1156
1157 if (GET_CODE (src1) == SUBREG
1158 && (tem = simplify_subreg (wmode, src1, mode, 0)) != 0)
1159 src1 = tem;
1160 else if (GET_MODE (src1) != VOIDmode)
1161 src1 = gen_rtx_SUBREG (wmode, force_reg (mode, src1), 0);
1162
1163 if (GET_CODE (src2) == SUBREG
1164 && (tem = simplify_subreg (wmode, src2, mode, 0)) != 0)
1165 src2 = tem;
1166 else if (GET_MODE (src2) != VOIDmode)
1167 src2 = gen_rtx_SUBREG (wmode, force_reg (mode, src2), 0);
1168 }
1169
1170 /* Emit the instruction. */
1171 op = gen_rtx_SET (VOIDmode, dst, gen_rtx_fmt_ee (code, wmode, src1, src2));
1172 clob = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
1173 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, op, clob)));
1174
1175 /* Fix up the destination if needed. */
1176 if (dst != operands[0])
1177 emit_move_insn (operands[0], gen_lowpart (mode, dst));
1178}
1179
1180/* Check whether OPERANDS are OK for a logical operation (AND, IOR, XOR). */
1181
1182bool
1183s390_logical_operator_ok_p (rtx *operands)
1184{
1185 /* If the destination operand is in memory, it needs to coincide
1186 with one of the source operands. After reload, it has to be
1187 the first source operand. */
1188 if (GET_CODE (operands[0]) == MEM)
1189 return rtx_equal_p (operands[0], operands[1])
1190 || (!reload_completed && rtx_equal_p (operands[0], operands[2]));
1191
1192 return true;
1193}
1194
0dfa6c5e
UW
1195/* Narrow logical operation CODE of memory operand MEMOP with immediate
1196 operand IMMOP to switch from SS to SI type instructions. */
1197
1198void
1199s390_narrow_logical_operator (enum rtx_code code, rtx *memop, rtx *immop)
1200{
1201 int def = code == AND ? -1 : 0;
1202 HOST_WIDE_INT mask;
1203 int part;
1204
1205 gcc_assert (GET_CODE (*memop) == MEM);
1206 gcc_assert (!MEM_VOLATILE_P (*memop));
1207
1208 mask = s390_extract_part (*immop, QImode, def);
1209 part = s390_single_part (*immop, GET_MODE (*memop), QImode, def);
1210 gcc_assert (part >= 0);
1211
1212 *memop = adjust_address (*memop, QImode, part);
1213 *immop = gen_int_mode (mask, QImode);
1214}
1215
ba956982 1216
c7453384 1217/* Change optimizations to be performed, depending on the
994fe660
UW
1218 optimization level.
1219
1220 LEVEL is the optimization level specified; 2 if `-O2' is
1221 specified, 1 if `-O' is specified, and 0 if neither is specified.
1222
5e7a8ee0 1223 SIZE is nonzero if `-Os' is specified and zero otherwise. */
9db1d521
HP
1224
1225void
9c808aad 1226optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
9db1d521 1227{
8e509cf9
UW
1228 /* ??? There are apparently still problems with -fcaller-saves. */
1229 flag_caller_saves = 0;
2120e3cd
UW
1230
1231 /* By default, always emit DWARF-2 unwind info. This allows debugging
1232 without maintaining a stack frame back-chain. */
1233 flag_asynchronous_unwind_tables = 1;
9db1d521
HP
1234}
1235
4023fb28 1236void
9c808aad 1237override_options (void)
4023fb28 1238{
1fec52be 1239 int i;
1fec52be
HP
1240 static struct pta
1241 {
1242 const char *const name; /* processor name or nickname. */
1243 const enum processor_type processor;
f13e0d4e 1244 const enum processor_flags flags;
1fec52be
HP
1245 }
1246 const processor_alias_table[] =
1247 {
f13e0d4e
UW
1248 {"g5", PROCESSOR_9672_G5, PF_IEEE_FLOAT},
1249 {"g6", PROCESSOR_9672_G6, PF_IEEE_FLOAT},
1250 {"z900", PROCESSOR_2064_Z900, PF_IEEE_FLOAT | PF_ZARCH},
c7453384 1251 {"z990", PROCESSOR_2084_Z990, PF_IEEE_FLOAT | PF_ZARCH
f13e0d4e 1252 | PF_LONG_DISPLACEMENT},
1fec52be
HP
1253 };
1254
1255 int const pta_size = ARRAY_SIZE (processor_alias_table);
1256
4023fb28
UW
1257 /* Acquire a unique set number for our register saves and restores. */
1258 s390_sr_alias_set = new_alias_set ();
4023fb28 1259
29742ba4
HP
1260 /* Set up function hooks. */
1261 init_machine_status = s390_init_machine_status;
f13e0d4e
UW
1262
1263 /* Architecture mode defaults according to ABI. */
1264 if (!(target_flags_explicit & MASK_ZARCH))
1265 {
1266 if (TARGET_64BIT)
1267 target_flags |= MASK_ZARCH;
1268 else
1269 target_flags &= ~MASK_ZARCH;
1270 }
1271
1272 /* Determine processor architectural level. */
1fec52be 1273 if (!s390_arch_string)
f13e0d4e 1274 s390_arch_string = TARGET_ZARCH? "z900" : "g5";
1fec52be
HP
1275
1276 for (i = 0; i < pta_size; i++)
1277 if (! strcmp (s390_arch_string, processor_alias_table[i].name))
1278 {
1279 s390_arch = processor_alias_table[i].processor;
f13e0d4e 1280 s390_arch_flags = processor_alias_table[i].flags;
1fec52be
HP
1281 break;
1282 }
1fec52be 1283 if (i == pta_size)
f13e0d4e 1284 error ("Unknown cpu used in -march=%s.", s390_arch_string);
1fec52be 1285
f13e0d4e
UW
1286 /* Determine processor to tune for. */
1287 if (!s390_tune_string)
1fec52be 1288 {
f13e0d4e
UW
1289 s390_tune = s390_arch;
1290 s390_tune_flags = s390_arch_flags;
1291 s390_tune_string = s390_arch_string;
1292 }
1293 else
1294 {
1295 for (i = 0; i < pta_size; i++)
1296 if (! strcmp (s390_tune_string, processor_alias_table[i].name))
1297 {
1298 s390_tune = processor_alias_table[i].processor;
1299 s390_tune_flags = processor_alias_table[i].flags;
1300 break;
1301 }
1302 if (i == pta_size)
1303 error ("Unknown cpu used in -mtune=%s.", s390_tune_string);
1fec52be
HP
1304 }
1305
f13e0d4e
UW
1306 /* Sanity checks. */
1307 if (TARGET_ZARCH && !(s390_arch_flags & PF_ZARCH))
1308 error ("z/Architecture mode not supported on %s.", s390_arch_string);
1309 if (TARGET_64BIT && !TARGET_ZARCH)
1310 error ("64-bit ABI not supported in ESA/390 mode.");
d75f90f1
AK
1311
1312 if (s390_warn_framesize_string)
1313 {
1314 if (sscanf (s390_warn_framesize_string, HOST_WIDE_INT_PRINT_DEC,
1315 &s390_warn_framesize) != 1)
1316 error ("invalid value for -mwarn-framesize");
1317 }
1318
1319 if (s390_warn_dynamicstack_string)
1320 s390_warn_dynamicstack_p = 1;
1321
1322 if (s390_stack_size_string)
1323 {
1324 if (sscanf (s390_stack_size_string, HOST_WIDE_INT_PRINT_DEC,
1325 &s390_stack_size) != 1)
1326 error ("invalid value for -mstack-size");
1327
1328 if (exact_log2 (s390_stack_size) == -1)
1329 error ("stack size must be an exact power of 2");
1330
1331 if (s390_stack_guard_string)
1332 {
1333 if (sscanf (s390_stack_guard_string, HOST_WIDE_INT_PRINT_DEC,
1334 &s390_stack_guard) != 1)
1335 error ("invalid value for -mstack-guard");
1336
1337 if (s390_stack_guard >= s390_stack_size)
1338 error ("stack size must be greater than the stack guard value");
1339
1340 if (exact_log2 (s390_stack_guard) == -1)
1341 error ("stack guard value must be an exact power of 2");
1342 }
1343 else
1344 error ("-mstack-size implies use of -mstack-guard");
1345 }
1346
1347 if (s390_stack_guard_string && !s390_stack_size_string)
1348 error ("-mstack-guard implies use of -mstack-size");
29742ba4 1349}
9db1d521
HP
1350
1351/* Map for smallest class containing reg regno. */
1352
0139adca 1353const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
9db1d521
HP
1354{ GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1355 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1356 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1357 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1358 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1359 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1360 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1361 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
c5aa1d12
UW
1362 ADDR_REGS, CC_REGS, ADDR_REGS, ADDR_REGS,
1363 ACCESS_REGS, ACCESS_REGS
9db1d521
HP
1364};
1365
077dab3b
HP
1366/* Return attribute type of insn. */
1367
1368static enum attr_type
9c808aad 1369s390_safe_attr_type (rtx insn)
077dab3b
HP
1370{
1371 if (recog_memoized (insn) >= 0)
1372 return get_attr_type (insn);
1373 else
1374 return TYPE_NONE;
1375}
9db1d521 1376
994fe660
UW
1377/* Return true if OP a (const_int 0) operand.
1378 OP is the current operation.
1379 MODE is the current operation mode. */
c7453384 1380
9db1d521 1381int
9c808aad 1382const0_operand (register rtx op, enum machine_mode mode)
9db1d521
HP
1383{
1384 return op == CONST0_RTX (mode);
1385}
1386
b2ccb744
UW
1387/* Return true if OP is constant.
1388 OP is the current operation.
1389 MODE is the current operation mode. */
1390
1391int
9c808aad 1392consttable_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
b2ccb744
UW
1393{
1394 return CONSTANT_P (op);
1395}
1396
994fe660 1397/* Return true if the mode of operand OP matches MODE.
c7453384 1398 If MODE is set to VOIDmode, set it to the mode of OP. */
9db1d521
HP
1399
1400static int
9c808aad 1401check_mode (register rtx op, enum machine_mode *mode)
9db1d521
HP
1402{
1403 if (*mode == VOIDmode)
1404 *mode = GET_MODE (op);
1405 else
1406 {
1407 if (GET_MODE (op) != VOIDmode && GET_MODE (op) != *mode)
1408 return 0;
1409 }
1410 return 1;
1411}
1412
994fe660 1413/* Return true if OP a valid operand for the LARL instruction.
9db1d521
HP
1414 OP is the current operation.
1415 MODE is the current operation mode. */
1416
1417int
9c808aad 1418larl_operand (register rtx op, enum machine_mode mode)
9db1d521 1419{
9db1d521
HP
1420 if (! check_mode (op, &mode))
1421 return 0;
1422
1423 /* Allow labels and local symbols. */
1424 if (GET_CODE (op) == LABEL_REF)
1425 return 1;
114278e7
RH
1426 if (GET_CODE (op) == SYMBOL_REF)
1427 return ((SYMBOL_REF_FLAGS (op) & SYMBOL_FLAG_ALIGN1) == 0
1428 && SYMBOL_REF_TLS_MODEL (op) == 0
1429 && (!flag_pic || SYMBOL_REF_LOCAL_P (op)));
9db1d521
HP
1430
1431 /* Everything else must have a CONST, so strip it. */
1432 if (GET_CODE (op) != CONST)
1433 return 0;
1434 op = XEXP (op, 0);
1435
d3632d41 1436 /* Allow adding *even* in-range constants. */
9db1d521
HP
1437 if (GET_CODE (op) == PLUS)
1438 {
1439 if (GET_CODE (XEXP (op, 1)) != CONST_INT
1440 || (INTVAL (XEXP (op, 1)) & 1) != 0)
1441 return 0;
d3632d41
UW
1442#if HOST_BITS_PER_WIDE_INT > 32
1443 if (INTVAL (XEXP (op, 1)) >= (HOST_WIDE_INT)1 << 32
1444 || INTVAL (XEXP (op, 1)) < -((HOST_WIDE_INT)1 << 32))
1445 return 0;
c7453384 1446#endif
9db1d521
HP
1447 op = XEXP (op, 0);
1448 }
1449
1450 /* Labels and local symbols allowed here as well. */
1451 if (GET_CODE (op) == LABEL_REF)
1452 return 1;
114278e7
RH
1453 if (GET_CODE (op) == SYMBOL_REF)
1454 return ((SYMBOL_REF_FLAGS (op) & SYMBOL_FLAG_ALIGN1) == 0
1455 && SYMBOL_REF_TLS_MODEL (op) == 0
1456 && (!flag_pic || SYMBOL_REF_LOCAL_P (op)));
9db1d521 1457
fd3cd001
UW
1458 /* Now we must have a @GOTENT offset or @PLT stub
1459 or an @INDNTPOFF TLS offset. */
9db1d521 1460 if (GET_CODE (op) == UNSPEC
fd7643fb 1461 && XINT (op, 1) == UNSPEC_GOTENT)
9db1d521
HP
1462 return 1;
1463 if (GET_CODE (op) == UNSPEC
fd7643fb 1464 && XINT (op, 1) == UNSPEC_PLT)
9db1d521 1465 return 1;
fd3cd001
UW
1466 if (GET_CODE (op) == UNSPEC
1467 && XINT (op, 1) == UNSPEC_INDNTPOFF)
1468 return 1;
9db1d521
HP
1469
1470 return 0;
1471}
1472
575f7c2b 1473/* Return true if OP is a valid S-type operand.
994fe660 1474 OP is the current operation.
575f7c2b 1475 MODE is the current operation mode. */
9db1d521 1476
575f7c2b
UW
1477int
1478s_operand (rtx op, enum machine_mode mode)
9db1d521 1479{
4023fb28 1480 struct s390_address addr;
9db1d521 1481
4023fb28
UW
1482 /* Call general_operand first, so that we don't have to
1483 check for many special cases. */
9db1d521
HP
1484 if (!general_operand (op, mode))
1485 return 0;
1486
4023fb28
UW
1487 /* Just like memory_operand, allow (subreg (mem ...))
1488 after reload. */
c7453384
EC
1489 if (reload_completed
1490 && GET_CODE (op) == SUBREG
4023fb28
UW
1491 && GET_CODE (SUBREG_REG (op)) == MEM)
1492 op = SUBREG_REG (op);
9db1d521 1493
575f7c2b
UW
1494 if (GET_CODE (op) != MEM)
1495 return 0;
1496 if (!s390_decompose_address (XEXP (op, 0), &addr))
1497 return 0;
1498 if (addr.indx)
1499 return 0;
9db1d521 1500
575f7c2b 1501 return 1;
9db1d521
HP
1502}
1503
575f7c2b
UW
1504/* Return true if OP is a memory operand pointing to the
1505 literal pool, or an immediate operand. */
9db1d521 1506
575f7c2b
UW
1507bool
1508s390_pool_operand (rtx op)
9db1d521 1509{
575f7c2b 1510 struct s390_address addr;
9db1d521 1511
575f7c2b
UW
1512 /* Just like memory_operand, allow (subreg (mem ...))
1513 after reload. */
1514 if (reload_completed
1515 && GET_CODE (op) == SUBREG
1516 && GET_CODE (SUBREG_REG (op)) == MEM)
1517 op = SUBREG_REG (op);
9db1d521 1518
575f7c2b
UW
1519 switch (GET_CODE (op))
1520 {
1521 case CONST_INT:
1522 case CONST_DOUBLE:
1523 return true;
1524
1525 case MEM:
1526 if (!s390_decompose_address (XEXP (op, 0), &addr))
1527 return false;
1528 if (addr.base && REG_P (addr.base) && REGNO (addr.base) == BASE_REGNUM)
1529 return true;
1530 if (addr.indx && REG_P (addr.indx) && REGNO (addr.indx) == BASE_REGNUM)
1531 return true;
1532 return false;
1533
1534 default:
1535 return false;
1536 }
9db1d521
HP
1537}
1538
ac32b25e
UW
1539/* Return true if OP a valid shift count operand.
1540 OP is the current operation.
1541 MODE is the current operation mode. */
1542
1543int
1544shift_count_operand (rtx op, enum machine_mode mode)
1545{
1546 HOST_WIDE_INT offset = 0;
1547
1548 if (! check_mode (op, &mode))
1549 return 0;
1550
1551 /* We can have an integer constant, an address register,
1552 or a sum of the two. Note that reload already checks
1553 that any register present is an address register, so
1554 we just check for any register here. */
1555 if (GET_CODE (op) == CONST_INT)
1556 {
1557 offset = INTVAL (op);
1558 op = NULL_RTX;
1559 }
1560 if (op && GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
1561 {
1562 offset = INTVAL (XEXP (op, 1));
1563 op = XEXP (op, 0);
1564 }
1565 while (op && GET_CODE (op) == SUBREG)
1566 op = SUBREG_REG (op);
1567 if (op && GET_CODE (op) != REG)
1568 return 0;
1569
1570 /* Unfortunately we have to reject constants that are invalid
1571 for an address, or else reload will get confused. */
1572 if (!DISP_IN_RANGE (offset))
1573 return 0;
1574
1575 return 1;
1576}
1577
d3632d41
UW
1578/* Return true if DISP is a valid short displacement. */
1579
1580static int
9c808aad 1581s390_short_displacement (rtx disp)
d3632d41
UW
1582{
1583 /* No displacement is OK. */
1584 if (!disp)
1585 return 1;
1586
1587 /* Integer displacement in range. */
1588 if (GET_CODE (disp) == CONST_INT)
1589 return INTVAL (disp) >= 0 && INTVAL (disp) < 4096;
1590
1591 /* GOT offset is not OK, the GOT can be large. */
1592 if (GET_CODE (disp) == CONST
1593 && GET_CODE (XEXP (disp, 0)) == UNSPEC
fd7643fb 1594 && XINT (XEXP (disp, 0), 1) == UNSPEC_GOT)
d3632d41
UW
1595 return 0;
1596
1597 /* All other symbolic constants are literal pool references,
1598 which are OK as the literal pool must be small. */
1599 if (GET_CODE (disp) == CONST)
1600 return 1;
1601
1602 return 0;
1603}
1604
1605/* Return true if OP is a valid operand for a C constraint. */
ccfc6cc8
UW
1606
1607int
f19a9af7 1608s390_extra_constraint_str (rtx op, int c, const char * str)
ccfc6cc8
UW
1609{
1610 struct s390_address addr;
1611
f19a9af7
AK
1612 if (c != str[0])
1613 abort ();
1614
0dfa6c5e
UW
1615 /* Check for offsettable variants of memory constraints. */
1616 if (c == 'A')
1617 {
1618 /* Only accept non-volatile MEMs. */
1619 if (!MEM_P (op) || MEM_VOLATILE_P (op))
1620 return 0;
1621
1622 if ((reload_completed || reload_in_progress)
1623 ? !offsettable_memref_p (op)
1624 : !offsettable_nonstrict_memref_p (op))
1625 return 0;
1626
1627 c = str[1];
1628 }
1629
d3632d41
UW
1630 switch (c)
1631 {
1632 case 'Q':
1633 if (GET_CODE (op) != MEM)
1634 return 0;
1635 if (!s390_decompose_address (XEXP (op, 0), &addr))
1636 return 0;
1637 if (addr.indx)
1638 return 0;
ccfc6cc8 1639
d3632d41
UW
1640 if (TARGET_LONG_DISPLACEMENT)
1641 {
1642 if (!s390_short_displacement (addr.disp))
1643 return 0;
1644 }
1645 break;
ccfc6cc8 1646
d3632d41
UW
1647 case 'R':
1648 if (GET_CODE (op) != MEM)
1649 return 0;
1650
1651 if (TARGET_LONG_DISPLACEMENT)
1652 {
1653 if (!s390_decompose_address (XEXP (op, 0), &addr))
1654 return 0;
1655 if (!s390_short_displacement (addr.disp))
1656 return 0;
1657 }
1658 break;
1659
1660 case 'S':
1661 if (!TARGET_LONG_DISPLACEMENT)
1662 return 0;
1663 if (GET_CODE (op) != MEM)
1664 return 0;
1665 if (!s390_decompose_address (XEXP (op, 0), &addr))
1666 return 0;
1667 if (addr.indx)
1668 return 0;
1669 if (s390_short_displacement (addr.disp))
1670 return 0;
1671 break;
1672
1673 case 'T':
1674 if (!TARGET_LONG_DISPLACEMENT)
1675 return 0;
1676 if (GET_CODE (op) != MEM)
1677 return 0;
1678 /* Any invalid address here will be fixed up by reload,
1679 so accept it for the most generic constraint. */
1680 if (s390_decompose_address (XEXP (op, 0), &addr)
1681 && s390_short_displacement (addr.disp))
1682 return 0;
1683 break;
1684
1685 case 'U':
1686 if (TARGET_LONG_DISPLACEMENT)
1687 {
1688 if (!s390_decompose_address (op, &addr))
1689 return 0;
1690 if (!s390_short_displacement (addr.disp))
1691 return 0;
1692 }
1693 break;
1694
1695 case 'W':
1696 if (!TARGET_LONG_DISPLACEMENT)
1697 return 0;
1698 /* Any invalid address here will be fixed up by reload,
1699 so accept it for the most generic constraint. */
1700 if (s390_decompose_address (op, &addr)
1701 && s390_short_displacement (addr.disp))
1702 return 0;
1703 break;
1704
ac32b25e
UW
1705 case 'Y':
1706 return shift_count_operand (op, VOIDmode);
1707
d3632d41
UW
1708 default:
1709 return 0;
1710 }
ccfc6cc8
UW
1711
1712 return 1;
1713}
1714
f19a9af7
AK
1715/* Return true if VALUE matches the constraint STR. */
1716
1717int
1718s390_const_ok_for_constraint_p (HOST_WIDE_INT value,
1719 int c,
1720 const char * str)
1721{
1722 enum machine_mode mode, part_mode;
1723 int def;
0dfa6c5e 1724 int part, part_goal;
f19a9af7
AK
1725
1726 if (c != str[0])
1727 abort ();
1728
1729 switch (str[0])
1730 {
1731 case 'I':
1732 return (unsigned int)value < 256;
1733
1734 case 'J':
1735 return (unsigned int)value < 4096;
1736
1737 case 'K':
1738 return value >= -32768 && value < 32768;
1739
1740 case 'L':
38899e29
EC
1741 return (TARGET_LONG_DISPLACEMENT ?
1742 (value >= -524288 && value <= 524287)
f19a9af7
AK
1743 : (value >= 0 && value <= 4095));
1744 case 'M':
1745 return value == 2147483647;
1746
1747 case 'N':
0dfa6c5e
UW
1748 if (str[1] == 'x')
1749 part_goal = -1;
1750 else
1751 part_goal = str[1] - '0';
f19a9af7
AK
1752
1753 switch (str[2])
1754 {
1755 case 'H': part_mode = HImode; break;
1756 case 'Q': part_mode = QImode; break;
1757 default: return 0;
1758 }
38899e29 1759
f19a9af7
AK
1760 switch (str[3])
1761 {
1762 case 'H': mode = HImode; break;
1763 case 'S': mode = SImode; break;
1764 case 'D': mode = DImode; break;
1765 default: return 0;
1766 }
1767
1768 switch (str[4])
1769 {
1770 case '0': def = 0; break;
1771 case 'F': def = -1; break;
1772 default: return 0;
1773 }
1774
1775 if (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (part_mode))
1776 return 0;
1777
0dfa6c5e
UW
1778 part = s390_single_part (GEN_INT (value), mode, part_mode, def);
1779 if (part < 0)
1780 return 0;
1781 if (part_goal != -1 && part_goal != part)
f19a9af7
AK
1782 return 0;
1783
1784 break;
1785
1786 default:
1787 return 0;
1788 }
1789
1790 return 1;
1791}
1792
3c50106f
RH
1793/* Compute a (partial) cost for rtx X. Return true if the complete
1794 cost has been computed, and false if subexpressions should be
1795 scanned. In either case, *TOTAL contains the cost result. */
1796
1797static bool
9c808aad 1798s390_rtx_costs (rtx x, int code, int outer_code, int *total)
3c50106f
RH
1799{
1800 switch (code)
1801 {
1802 case CONST:
1803 if (GET_CODE (XEXP (x, 0)) == MINUS
1804 && GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
1805 *total = 1000;
1806 else
1807 *total = 0;
1808 return true;
1809
1810 case CONST_INT:
1811 /* Force_const_mem does not work out of reload, because the
1812 saveable_obstack is set to reload_obstack, which does not
1813 live long enough. Because of this we cannot use force_const_mem
1814 in addsi3. This leads to problems with gen_add2_insn with a
1815 constant greater than a short. Because of that we give an
1816 addition of greater constants a cost of 3 (reload1.c 10096). */
1817 /* ??? saveable_obstack no longer exists. */
1818 if (outer_code == PLUS
1819 && (INTVAL (x) > 32767 || INTVAL (x) < -32768))
1820 *total = COSTS_N_INSNS (3);
1821 else
1822 *total = 0;
1823 return true;
1824
1825 case LABEL_REF:
1826 case SYMBOL_REF:
1827 case CONST_DOUBLE:
1828 *total = 0;
1829 return true;
1830
1831 case ASHIFT:
1832 case ASHIFTRT:
1833 case LSHIFTRT:
1834 case PLUS:
1835 case AND:
1836 case IOR:
1837 case XOR:
1838 case MINUS:
1839 case NEG:
1840 case NOT:
1841 *total = COSTS_N_INSNS (1);
1842 return true;
1843
1844 case MULT:
1845 if (GET_MODE (XEXP (x, 0)) == DImode)
1846 *total = COSTS_N_INSNS (40);
1847 else
1848 *total = COSTS_N_INSNS (7);
1849 return true;
1850
1851 case DIV:
1852 case UDIV:
1853 case MOD:
1854 case UMOD:
1855 *total = COSTS_N_INSNS (33);
1856 return true;
1857
1858 default:
1859 return false;
1860 }
1861}
1862
dea09b1b
UW
1863/* Return the cost of an address rtx ADDR. */
1864
dcefdf67 1865static int
9c808aad 1866s390_address_cost (rtx addr)
dea09b1b
UW
1867{
1868 struct s390_address ad;
1869 if (!s390_decompose_address (addr, &ad))
1870 return 1000;
1871
1872 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
1873}
1874
994fe660 1875/* Return true if OP is a valid operand for the BRAS instruction.
9db1d521
HP
1876 OP is the current operation.
1877 MODE is the current operation mode. */
1878
1879int
9c808aad 1880bras_sym_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9db1d521
HP
1881{
1882 register enum rtx_code code = GET_CODE (op);
1883
1884 /* Allow SYMBOL_REFs. */
1885 if (code == SYMBOL_REF)
1886 return 1;
1887
1888 /* Allow @PLT stubs. */
1889 if (code == CONST
1890 && GET_CODE (XEXP (op, 0)) == UNSPEC
fd7643fb 1891 && XINT (XEXP (op, 0), 1) == UNSPEC_PLT)
9db1d521
HP
1892 return 1;
1893 return 0;
1894}
1895
fd3cd001
UW
1896/* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
1897 otherwise return 0. */
1898
1899int
9c808aad 1900tls_symbolic_operand (register rtx op)
fd3cd001 1901{
fd3cd001
UW
1902 if (GET_CODE (op) != SYMBOL_REF)
1903 return 0;
114278e7 1904 return SYMBOL_REF_TLS_MODEL (op);
fd3cd001 1905}
9db1d521 1906\f
994fe660 1907/* Return true if OP is a load multiple operation. It is known to be a
c7453384 1908 PARALLEL and the first section will be tested.
994fe660
UW
1909 OP is the current operation.
1910 MODE is the current operation mode. */
9db1d521
HP
1911
1912int
9c808aad 1913load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9db1d521 1914{
c19ec8f9 1915 enum machine_mode elt_mode;
9db1d521
HP
1916 int count = XVECLEN (op, 0);
1917 unsigned int dest_regno;
1918 rtx src_addr;
4023fb28 1919 int i, off;
9db1d521
HP
1920
1921
1922 /* Perform a quick check so we don't blow up below. */
1923 if (count <= 1
1924 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1925 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
1926 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
1927 return 0;
1928
1929 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
1930 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
c19ec8f9 1931 elt_mode = GET_MODE (SET_DEST (XVECEXP (op, 0, 0)));
9db1d521 1932
4023fb28
UW
1933 /* Check, is base, or base + displacement. */
1934
1935 if (GET_CODE (src_addr) == REG)
1936 off = 0;
1937 else if (GET_CODE (src_addr) == PLUS
c7453384 1938 && GET_CODE (XEXP (src_addr, 0)) == REG
4023fb28
UW
1939 && GET_CODE (XEXP (src_addr, 1)) == CONST_INT)
1940 {
1941 off = INTVAL (XEXP (src_addr, 1));
1942 src_addr = XEXP (src_addr, 0);
1943 }
1944 else
1945 return 0;
1946
9db1d521
HP
1947 for (i = 1; i < count; i++)
1948 {
1949 rtx elt = XVECEXP (op, 0, i);
1950
1951 if (GET_CODE (elt) != SET
1952 || GET_CODE (SET_DEST (elt)) != REG
c19ec8f9 1953 || GET_MODE (SET_DEST (elt)) != elt_mode
9db1d521
HP
1954 || REGNO (SET_DEST (elt)) != dest_regno + i
1955 || GET_CODE (SET_SRC (elt)) != MEM
c19ec8f9 1956 || GET_MODE (SET_SRC (elt)) != elt_mode
9db1d521
HP
1957 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
1958 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
1959 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
4023fb28 1960 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1))
c19ec8f9 1961 != off + i * GET_MODE_SIZE (elt_mode))
9db1d521
HP
1962 return 0;
1963 }
1964
1965 return 1;
1966}
1967
994fe660 1968/* Return true if OP is a store multiple operation. It is known to be a
c7453384 1969 PARALLEL and the first section will be tested.
994fe660
UW
1970 OP is the current operation.
1971 MODE is the current operation mode. */
9db1d521
HP
1972
1973int
9c808aad 1974store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9db1d521 1975{
c19ec8f9 1976 enum machine_mode elt_mode;
4023fb28 1977 int count = XVECLEN (op, 0);
9db1d521
HP
1978 unsigned int src_regno;
1979 rtx dest_addr;
4023fb28 1980 int i, off;
9db1d521
HP
1981
1982 /* Perform a quick check so we don't blow up below. */
1983 if (count <= 1
1984 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1985 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
1986 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
1987 return 0;
1988
1989 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
1990 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
c19ec8f9 1991 elt_mode = GET_MODE (SET_SRC (XVECEXP (op, 0, 0)));
9db1d521 1992
4023fb28
UW
1993 /* Check, is base, or base + displacement. */
1994
1995 if (GET_CODE (dest_addr) == REG)
1996 off = 0;
1997 else if (GET_CODE (dest_addr) == PLUS
c7453384 1998 && GET_CODE (XEXP (dest_addr, 0)) == REG
4023fb28
UW
1999 && GET_CODE (XEXP (dest_addr, 1)) == CONST_INT)
2000 {
2001 off = INTVAL (XEXP (dest_addr, 1));
2002 dest_addr = XEXP (dest_addr, 0);
2003 }
2004 else
2005 return 0;
2006
9db1d521
HP
2007 for (i = 1; i < count; i++)
2008 {
2009 rtx elt = XVECEXP (op, 0, i);
2010
2011 if (GET_CODE (elt) != SET
2012 || GET_CODE (SET_SRC (elt)) != REG
c19ec8f9 2013 || GET_MODE (SET_SRC (elt)) != elt_mode
9db1d521
HP
2014 || REGNO (SET_SRC (elt)) != src_regno + i
2015 || GET_CODE (SET_DEST (elt)) != MEM
c19ec8f9 2016 || GET_MODE (SET_DEST (elt)) != elt_mode
9db1d521
HP
2017 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
2018 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
2019 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
4023fb28 2020 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1))
c19ec8f9 2021 != off + i * GET_MODE_SIZE (elt_mode))
9db1d521
HP
2022 return 0;
2023 }
2024 return 1;
2025}
2026
c5aa1d12
UW
2027/* Split DImode access register reference REG (on 64-bit) into its constituent
2028 low and high parts, and store them into LO and HI. Note that gen_lowpart/
2029 gen_highpart cannot be used as they assume all registers are word-sized,
2030 while our access registers have only half that size. */
2031
2032void
2033s390_split_access_reg (rtx reg, rtx *lo, rtx *hi)
2034{
2035 gcc_assert (TARGET_64BIT);
2036 gcc_assert (ACCESS_REG_P (reg));
2037 gcc_assert (GET_MODE (reg) == DImode);
2038 gcc_assert (!(REGNO (reg) & 1));
2039
2040 *lo = gen_rtx_REG (SImode, REGNO (reg) + 1);
2041 *hi = gen_rtx_REG (SImode, REGNO (reg));
2042}
9db1d521 2043
994fe660 2044/* Return true if OP contains a symbol reference */
9db1d521
HP
2045
2046int
9c808aad 2047symbolic_reference_mentioned_p (rtx op)
9db1d521 2048{
994fe660 2049 register const char *fmt;
9db1d521
HP
2050 register int i;
2051
2052 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
2053 return 1;
2054
2055 fmt = GET_RTX_FORMAT (GET_CODE (op));
2056 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2057 {
2058 if (fmt[i] == 'E')
2059 {
2060 register int j;
2061
2062 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2063 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2064 return 1;
2065 }
2066
2067 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
2068 return 1;
2069 }
2070
2071 return 0;
2072}
2073
fd3cd001
UW
2074/* Return true if OP contains a reference to a thread-local symbol. */
2075
2076int
9c808aad 2077tls_symbolic_reference_mentioned_p (rtx op)
fd3cd001
UW
2078{
2079 register const char *fmt;
2080 register int i;
2081
2082 if (GET_CODE (op) == SYMBOL_REF)
2083 return tls_symbolic_operand (op);
2084
2085 fmt = GET_RTX_FORMAT (GET_CODE (op));
2086 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2087 {
2088 if (fmt[i] == 'E')
2089 {
2090 register int j;
2091
2092 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2093 if (tls_symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2094 return 1;
2095 }
2096
2097 else if (fmt[i] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op, i)))
2098 return 1;
2099 }
2100
2101 return 0;
2102}
2103
9db1d521 2104
c7453384
EC
2105/* Return true if OP is a legitimate general operand when
2106 generating PIC code. It is given that flag_pic is on
994fe660
UW
2107 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2108
9db1d521 2109int
9c808aad 2110legitimate_pic_operand_p (register rtx op)
9db1d521 2111{
4023fb28 2112 /* Accept all non-symbolic constants. */
9db1d521
HP
2113 if (!SYMBOLIC_CONST (op))
2114 return 1;
2115
c7453384 2116 /* Reject everything else; must be handled
fd3cd001 2117 via emit_symbolic_move. */
9db1d521
HP
2118 return 0;
2119}
2120
994fe660
UW
2121/* Returns true if the constant value OP is a legitimate general operand.
2122 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2123
9db1d521 2124int
9c808aad 2125legitimate_constant_p (register rtx op)
9db1d521 2126{
4023fb28 2127 /* Accept all non-symbolic constants. */
9db1d521
HP
2128 if (!SYMBOLIC_CONST (op))
2129 return 1;
2130
fd3cd001 2131 /* Accept immediate LARL operands. */
9e8327e3 2132 if (TARGET_CPU_ZARCH && larl_operand (op, VOIDmode))
fd3cd001
UW
2133 return 1;
2134
2135 /* Thread-local symbols are never legal constants. This is
2136 so that emit_call knows that computing such addresses
2137 might require a function call. */
2138 if (TLS_SYMBOLIC_CONST (op))
2139 return 0;
2140
9db1d521
HP
2141 /* In the PIC case, symbolic constants must *not* be
2142 forced into the literal pool. We accept them here,
fd3cd001 2143 so that they will be handled by emit_symbolic_move. */
9db1d521
HP
2144 if (flag_pic)
2145 return 1;
2146
9db1d521
HP
2147 /* All remaining non-PIC symbolic constants are
2148 forced into the literal pool. */
2149 return 0;
2150}
2151
fd3cd001
UW
2152/* Determine if it's legal to put X into the constant pool. This
2153 is not possible if X contains the address of a symbol that is
2154 not constant (TLS) or not known at final link time (PIC). */
2155
2156static bool
9c808aad 2157s390_cannot_force_const_mem (rtx x)
fd3cd001
UW
2158{
2159 switch (GET_CODE (x))
2160 {
2161 case CONST_INT:
2162 case CONST_DOUBLE:
2163 /* Accept all non-symbolic constants. */
2164 return false;
2165
2166 case LABEL_REF:
2167 /* Labels are OK iff we are non-PIC. */
2168 return flag_pic != 0;
2169
2170 case SYMBOL_REF:
2171 /* 'Naked' TLS symbol references are never OK,
2172 non-TLS symbols are OK iff we are non-PIC. */
2173 if (tls_symbolic_operand (x))
2174 return true;
2175 else
2176 return flag_pic != 0;
2177
2178 case CONST:
2179 return s390_cannot_force_const_mem (XEXP (x, 0));
2180 case PLUS:
2181 case MINUS:
2182 return s390_cannot_force_const_mem (XEXP (x, 0))
2183 || s390_cannot_force_const_mem (XEXP (x, 1));
2184
2185 case UNSPEC:
2186 switch (XINT (x, 1))
2187 {
2188 /* Only lt-relative or GOT-relative UNSPECs are OK. */
fd7643fb
UW
2189 case UNSPEC_LTREL_OFFSET:
2190 case UNSPEC_GOT:
2191 case UNSPEC_GOTOFF:
2192 case UNSPEC_PLTOFF:
fd3cd001
UW
2193 case UNSPEC_TLSGD:
2194 case UNSPEC_TLSLDM:
2195 case UNSPEC_NTPOFF:
2196 case UNSPEC_DTPOFF:
2197 case UNSPEC_GOTNTPOFF:
2198 case UNSPEC_INDNTPOFF:
2199 return false;
2200
9bb86f41
UW
2201 /* If the literal pool shares the code section, be put
2202 execute template placeholders into the pool as well. */
2203 case UNSPEC_INSN:
2204 return TARGET_CPU_ZARCH;
2205
fd3cd001
UW
2206 default:
2207 return true;
2208 }
2209 break;
2210
2211 default:
2212 abort ();
2213 }
2214}
2215
4023fb28 2216/* Returns true if the constant value OP is a legitimate general
c7453384 2217 operand during and after reload. The difference to
4023fb28
UW
2218 legitimate_constant_p is that this function will not accept
2219 a constant that would need to be forced to the literal pool
2220 before it can be used as operand. */
2221
2222int
9c808aad 2223legitimate_reload_constant_p (register rtx op)
4023fb28 2224{
d3632d41 2225 /* Accept la(y) operands. */
c7453384 2226 if (GET_CODE (op) == CONST_INT
d3632d41
UW
2227 && DISP_IN_RANGE (INTVAL (op)))
2228 return 1;
2229
4023fb28
UW
2230 /* Accept l(g)hi operands. */
2231 if (GET_CODE (op) == CONST_INT
f19a9af7 2232 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', "K"))
4023fb28
UW
2233 return 1;
2234
2235 /* Accept lliXX operands. */
9e8327e3 2236 if (TARGET_ZARCH
f19a9af7 2237 && s390_single_part (op, DImode, HImode, 0) >= 0)
4023fb28
UW
2238 return 1;
2239
2240 /* Accept larl operands. */
9e8327e3 2241 if (TARGET_CPU_ZARCH
4023fb28
UW
2242 && larl_operand (op, VOIDmode))
2243 return 1;
2244
4023fb28
UW
2245 /* Everything else cannot be handled without reload. */
2246 return 0;
2247}
2248
2249/* Given an rtx OP being reloaded into a reg required to be in class CLASS,
2250 return the class of reg to actually use. */
2251
2252enum reg_class
9c808aad 2253s390_preferred_reload_class (rtx op, enum reg_class class)
4023fb28 2254{
4023fb28
UW
2255 switch (GET_CODE (op))
2256 {
2257 /* Constants we cannot reload must be forced into the
0796c16a
UW
2258 literal pool. */
2259
4023fb28
UW
2260 case CONST_DOUBLE:
2261 case CONST_INT:
0796c16a 2262 if (legitimate_reload_constant_p (op))
4023fb28 2263 return class;
0796c16a
UW
2264 else
2265 return NO_REGS;
4023fb28
UW
2266
2267 /* If a symbolic constant or a PLUS is reloaded,
14b3e8ef
UW
2268 it is most likely being used as an address, so
2269 prefer ADDR_REGS. If 'class' is not a superset
2270 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
4023fb28
UW
2271 case PLUS:
2272 case LABEL_REF:
2273 case SYMBOL_REF:
2274 case CONST:
14b3e8ef
UW
2275 if (reg_class_subset_p (ADDR_REGS, class))
2276 return ADDR_REGS;
2277 else
2278 return NO_REGS;
4023fb28
UW
2279
2280 default:
2281 break;
2282 }
2283
2284 return class;
2285}
9db1d521 2286
f3e9edff
UW
2287/* Return the register class of a scratch register needed to
2288 load IN into a register of class CLASS in MODE.
2289
2290 We need a temporary when loading a PLUS expression which
2291 is not a legitimate operand of the LOAD ADDRESS instruction. */
2292
2293enum reg_class
9c808aad
AJ
2294s390_secondary_input_reload_class (enum reg_class class ATTRIBUTE_UNUSED,
2295 enum machine_mode mode, rtx in)
f3e9edff
UW
2296{
2297 if (s390_plus_operand (in, mode))
d58005c7 2298 return ADDR_REGS;
f3e9edff 2299
9dc62c00
AK
2300 if (GET_MODE_CLASS (mode) == MODE_CC)
2301 return GENERAL_REGS;
2302
f3e9edff
UW
2303 return NO_REGS;
2304}
2305
dc65c307
UW
2306/* Return the register class of a scratch register needed to
2307 store a register of class CLASS in MODE into OUT:
2308
c7453384 2309 We need a temporary when storing a double-word to a
dc65c307
UW
2310 non-offsettable memory address. */
2311
2312enum reg_class
9c808aad
AJ
2313s390_secondary_output_reload_class (enum reg_class class,
2314 enum machine_mode mode, rtx out)
dc65c307
UW
2315{
2316 if ((TARGET_64BIT ? mode == TImode
2317 : (mode == DImode || mode == DFmode))
2318 && reg_classes_intersect_p (GENERAL_REGS, class)
2319 && GET_CODE (out) == MEM
2320 && !offsettable_memref_p (out)
2321 && !s_operand (out, VOIDmode))
2322 return ADDR_REGS;
2323
9dc62c00
AK
2324 if (GET_MODE_CLASS (mode) == MODE_CC)
2325 return GENERAL_REGS;
2326
dc65c307
UW
2327 return NO_REGS;
2328}
2329
f3e9edff 2330/* Return true if OP is a PLUS that is not a legitimate
c7453384 2331 operand for the LA instruction.
f3e9edff
UW
2332 OP is the current operation.
2333 MODE is the current operation mode. */
2334
2335int
9c808aad 2336s390_plus_operand (register rtx op, enum machine_mode mode)
f3e9edff
UW
2337{
2338 if (!check_mode (op, &mode) || mode != Pmode)
2339 return FALSE;
2340
2341 if (GET_CODE (op) != PLUS)
2342 return FALSE;
2343
2344 if (legitimate_la_operand_p (op))
2345 return FALSE;
2346
2347 return TRUE;
2348}
2349
2350/* Generate code to load SRC, which is PLUS that is not a
2351 legitimate operand for the LA instruction, into TARGET.
2352 SCRATCH may be used as scratch register. */
2353
2354void
9c808aad
AJ
2355s390_expand_plus_operand (register rtx target, register rtx src,
2356 register rtx scratch)
f3e9edff 2357{
7974fe63 2358 rtx sum1, sum2;
b808c04c 2359 struct s390_address ad;
6a4e49c1 2360
6a4e49c1 2361 /* src must be a PLUS; get its two operands. */
f3e9edff
UW
2362 if (GET_CODE (src) != PLUS || GET_MODE (src) != Pmode)
2363 abort ();
2364
7c82a1ed
UW
2365 /* Check if any of the two operands is already scheduled
2366 for replacement by reload. This can happen e.g. when
2367 float registers occur in an address. */
2368 sum1 = find_replacement (&XEXP (src, 0));
2369 sum2 = find_replacement (&XEXP (src, 1));
ccfc6cc8 2370 src = gen_rtx_PLUS (Pmode, sum1, sum2);
ccfc6cc8 2371
7974fe63
UW
2372 /* If the address is already strictly valid, there's nothing to do. */
2373 if (!s390_decompose_address (src, &ad)
2374 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2375 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
f3e9edff 2376 {
7974fe63
UW
2377 /* Otherwise, one of the operands cannot be an address register;
2378 we reload its value into the scratch register. */
2379 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
2380 {
2381 emit_move_insn (scratch, sum1);
2382 sum1 = scratch;
2383 }
2384 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
2385 {
2386 emit_move_insn (scratch, sum2);
2387 sum2 = scratch;
2388 }
f3e9edff 2389
7974fe63
UW
2390 /* According to the way these invalid addresses are generated
2391 in reload.c, it should never happen (at least on s390) that
2392 *neither* of the PLUS components, after find_replacements
2393 was applied, is an address register. */
2394 if (sum1 == scratch && sum2 == scratch)
2395 {
2396 debug_rtx (src);
2397 abort ();
2398 }
f3e9edff 2399
7974fe63 2400 src = gen_rtx_PLUS (Pmode, sum1, sum2);
f3e9edff
UW
2401 }
2402
2403 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
2404 is only ever performed on addresses, so we can mark the
2405 sum as legitimate for LA in any case. */
a41c6c53 2406 s390_load_address (target, src);
f3e9edff
UW
2407}
2408
2409
994fe660 2410/* Decompose a RTL expression ADDR for a memory address into
b808c04c
UW
2411 its components, returned in OUT.
2412
994fe660
UW
2413 Returns 0 if ADDR is not a valid memory address, nonzero
2414 otherwise. If OUT is NULL, don't return the components,
2415 but check for validity only.
9db1d521 2416
994fe660
UW
2417 Note: Only addresses in canonical form are recognized.
2418 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
2419 canonical form so that they will be recognized. */
9db1d521
HP
2420
2421static int
9c808aad 2422s390_decompose_address (register rtx addr, struct s390_address *out)
9db1d521 2423{
585539a1 2424 HOST_WIDE_INT offset = 0;
9db1d521
HP
2425 rtx base = NULL_RTX;
2426 rtx indx = NULL_RTX;
2427 rtx disp = NULL_RTX;
585539a1 2428 rtx orig_disp;
f3e9edff 2429 int pointer = FALSE;
fd7643fb
UW
2430 int base_ptr = FALSE;
2431 int indx_ptr = FALSE;
9db1d521
HP
2432
2433 /* Decompose address into base + index + displacement. */
2434
2435 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
2436 base = addr;
2437
2438 else if (GET_CODE (addr) == PLUS)
2439 {
2440 rtx op0 = XEXP (addr, 0);
2441 rtx op1 = XEXP (addr, 1);
2442 enum rtx_code code0 = GET_CODE (op0);
2443 enum rtx_code code1 = GET_CODE (op1);
2444
2445 if (code0 == REG || code0 == UNSPEC)
2446 {
2447 if (code1 == REG || code1 == UNSPEC)
2448 {
2449 indx = op0; /* index + base */
2450 base = op1;
2451 }
2452
2453 else
2454 {
2455 base = op0; /* base + displacement */
2456 disp = op1;
2457 }
2458 }
2459
2460 else if (code0 == PLUS)
2461 {
2462 indx = XEXP (op0, 0); /* index + base + disp */
2463 base = XEXP (op0, 1);
2464 disp = op1;
2465 }
2466
2467 else
2468 {
2469 return FALSE;
2470 }
2471 }
2472
2473 else
2474 disp = addr; /* displacement */
2475
585539a1
UW
2476 /* Extract integer part of displacement. */
2477 orig_disp = disp;
2478 if (disp)
2479 {
2480 if (GET_CODE (disp) == CONST_INT)
2481 {
2482 offset = INTVAL (disp);
2483 disp = NULL_RTX;
2484 }
2485 else if (GET_CODE (disp) == CONST
2486 && GET_CODE (XEXP (disp, 0)) == PLUS
2487 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
2488 {
2489 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
2490 disp = XEXP (XEXP (disp, 0), 0);
2491 }
2492 }
2493
2494 /* Strip off CONST here to avoid special case tests later. */
2495 if (disp && GET_CODE (disp) == CONST)
2496 disp = XEXP (disp, 0);
2497
2498 /* We can convert literal pool addresses to
2499 displacements by basing them off the base register. */
2500 if (disp && GET_CODE (disp) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (disp))
2501 {
2502 /* Either base or index must be free to hold the base register. */
2503 if (!base)
490ceeb4 2504 base = gen_rtx_REG (Pmode, BASE_REGNUM);
585539a1 2505 else if (!indx)
490ceeb4 2506 indx = gen_rtx_REG (Pmode, BASE_REGNUM);
585539a1
UW
2507 else
2508 return FALSE;
2509
2510 /* Mark up the displacement. */
2511 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp),
2512 UNSPEC_LTREL_OFFSET);
2513 }
9db1d521
HP
2514
2515 /* Validate base register. */
2516 if (base)
2517 {
2518 if (GET_CODE (base) == UNSPEC)
585539a1
UW
2519 switch (XINT (base, 1))
2520 {
2521 case UNSPEC_LTREF:
2522 if (!disp)
2523 disp = gen_rtx_UNSPEC (Pmode,
2524 gen_rtvec (1, XVECEXP (base, 0, 0)),
2525 UNSPEC_LTREL_OFFSET);
2526 else
2527 return FALSE;
2528
490ceeb4 2529 base = gen_rtx_REG (Pmode, BASE_REGNUM);
585539a1
UW
2530 break;
2531
2532 case UNSPEC_LTREL_BASE:
490ceeb4 2533 base = gen_rtx_REG (Pmode, BASE_REGNUM);
585539a1
UW
2534 break;
2535
2536 default:
fd7643fb 2537 return FALSE;
585539a1 2538 }
9db1d521
HP
2539
2540 if (GET_CODE (base) != REG || GET_MODE (base) != Pmode)
fd7643fb 2541 return FALSE;
9db1d521 2542
490ceeb4 2543 if (REGNO (base) == BASE_REGNUM
f3e9edff
UW
2544 || REGNO (base) == STACK_POINTER_REGNUM
2545 || REGNO (base) == FRAME_POINTER_REGNUM
2546 || ((reload_completed || reload_in_progress)
2547 && frame_pointer_needed
2548 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
a41c6c53 2549 || REGNO (base) == ARG_POINTER_REGNUM
f3e9edff
UW
2550 || (flag_pic
2551 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
fd7643fb 2552 pointer = base_ptr = TRUE;
9db1d521
HP
2553 }
2554
2555 /* Validate index register. */
2556 if (indx)
2557 {
2558 if (GET_CODE (indx) == UNSPEC)
585539a1
UW
2559 switch (XINT (indx, 1))
2560 {
2561 case UNSPEC_LTREF:
2562 if (!disp)
2563 disp = gen_rtx_UNSPEC (Pmode,
2564 gen_rtvec (1, XVECEXP (indx, 0, 0)),
2565 UNSPEC_LTREL_OFFSET);
2566 else
2567 return FALSE;
2568
490ceeb4 2569 indx = gen_rtx_REG (Pmode, BASE_REGNUM);
585539a1
UW
2570 break;
2571
2572 case UNSPEC_LTREL_BASE:
490ceeb4 2573 indx = gen_rtx_REG (Pmode, BASE_REGNUM);
585539a1
UW
2574 break;
2575
2576 default:
fd7643fb 2577 return FALSE;
585539a1 2578 }
9db1d521
HP
2579
2580 if (GET_CODE (indx) != REG || GET_MODE (indx) != Pmode)
fd7643fb 2581 return FALSE;
9db1d521 2582
490ceeb4 2583 if (REGNO (indx) == BASE_REGNUM
f3e9edff
UW
2584 || REGNO (indx) == STACK_POINTER_REGNUM
2585 || REGNO (indx) == FRAME_POINTER_REGNUM
2586 || ((reload_completed || reload_in_progress)
2587 && frame_pointer_needed
2588 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
a41c6c53 2589 || REGNO (indx) == ARG_POINTER_REGNUM
f3e9edff
UW
2590 || (flag_pic
2591 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
fd7643fb
UW
2592 pointer = indx_ptr = TRUE;
2593 }
2594
2595 /* Prefer to use pointer as base, not index. */
2596 if (base && indx && !base_ptr
2597 && (indx_ptr || (!REG_POINTER (base) && REG_POINTER (indx))))
2598 {
2599 rtx tmp = base;
2600 base = indx;
2601 indx = tmp;
9db1d521
HP
2602 }
2603
2604 /* Validate displacement. */
585539a1 2605 if (!disp)
9db1d521 2606 {
a38e09bc
AK
2607 /* If the argument pointer or the return address pointer are involved,
2608 the displacement will change later anyway as the virtual registers get
2609 eliminated. This could make a valid displacement invalid, but it is
2610 more likely to make an invalid displacement valid, because we sometimes
2611 access the register save area via negative offsets to one of those
2612 registers.
585539a1
UW
2613 Thus we don't check the displacement for validity here. If after
2614 elimination the displacement turns out to be invalid after all,
2615 this is fixed up by reload in any case. */
a38e09bc
AK
2616 if (base != arg_pointer_rtx
2617 && indx != arg_pointer_rtx
2618 && base != return_address_pointer_rtx
2619 && indx != return_address_pointer_rtx)
585539a1
UW
2620 if (!DISP_IN_RANGE (offset))
2621 return FALSE;
2622 }
2623 else
2624 {
2625 /* All the special cases are pointers. */
2626 pointer = TRUE;
9db1d521 2627
fd7643fb 2628 /* In the small-PIC case, the linker converts @GOT
fd3cd001 2629 and @GOTNTPOFF offsets to possible displacements. */
585539a1
UW
2630 if (GET_CODE (disp) == UNSPEC
2631 && (XINT (disp, 1) == UNSPEC_GOT
2632 || XINT (disp, 1) == UNSPEC_GOTNTPOFF)
2633 && offset == 0
2634 && flag_pic == 1)
9db1d521 2635 {
585539a1 2636 ;
9db1d521
HP
2637 }
2638
585539a1
UW
2639 /* Accept chunkified literal pool symbol references. */
2640 else if (GET_CODE (disp) == MINUS
2641 && GET_CODE (XEXP (disp, 0)) == LABEL_REF
2642 && GET_CODE (XEXP (disp, 1)) == LABEL_REF)
b2ccb744 2643 {
585539a1 2644 ;
b2ccb744 2645 }
c7453384 2646
585539a1
UW
2647 /* Accept literal pool references. */
2648 else if (GET_CODE (disp) == UNSPEC
2649 && XINT (disp, 1) == UNSPEC_LTREL_OFFSET)
b2ccb744 2650 {
585539a1
UW
2651 orig_disp = gen_rtx_CONST (Pmode, disp);
2652 if (offset)
2653 {
2654 /* If we have an offset, make sure it does not
2655 exceed the size of the constant pool entry. */
2656 rtx sym = XVECEXP (disp, 0, 0);
2657 if (offset >= GET_MODE_SIZE (get_pool_mode (sym)))
2658 return FALSE;
2659
2660 orig_disp = plus_constant (orig_disp, offset);
2661 }
b2ccb744
UW
2662 }
2663
9db1d521 2664 else
585539a1 2665 return FALSE;
9db1d521
HP
2666 }
2667
f3e9edff
UW
2668 if (!base && !indx)
2669 pointer = TRUE;
c7453384 2670
9db1d521
HP
2671 if (out)
2672 {
2673 out->base = base;
2674 out->indx = indx;
585539a1 2675 out->disp = orig_disp;
f3e9edff 2676 out->pointer = pointer;
9db1d521
HP
2677 }
2678
2679 return TRUE;
2680}
2681
994fe660
UW
2682/* Return nonzero if ADDR is a valid memory address.
2683 STRICT specifies whether strict register checking applies. */
2684
9db1d521 2685int
9c808aad
AJ
2686legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2687 register rtx addr, int strict)
9db1d521 2688{
b808c04c
UW
2689 struct s390_address ad;
2690 if (!s390_decompose_address (addr, &ad))
2691 return FALSE;
2692
2693 if (strict)
2694 {
2695 if (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2696 return FALSE;
2697 if (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx))
2698 return FALSE;
2699 }
2700 else
2701 {
2702 if (ad.base && !REG_OK_FOR_BASE_NONSTRICT_P (ad.base))
2703 return FALSE;
2704 if (ad.indx && !REG_OK_FOR_INDEX_NONSTRICT_P (ad.indx))
2705 return FALSE;
2706 }
2707
2708 return TRUE;
9db1d521
HP
2709}
2710
ba956982
UW
2711/* Return 1 if OP is a valid operand for the LA instruction.
2712 In 31-bit, we need to prove that the result is used as an
2713 address, as LA performs only a 31-bit addition. */
2714
2715int
9c808aad 2716legitimate_la_operand_p (register rtx op)
ba956982
UW
2717{
2718 struct s390_address addr;
b808c04c 2719 if (!s390_decompose_address (op, &addr))
ba956982
UW
2720 return FALSE;
2721
f3e9edff 2722 if (TARGET_64BIT || addr.pointer)
ba956982
UW
2723 return TRUE;
2724
f3e9edff
UW
2725 return FALSE;
2726}
ba956982 2727
c112cf2b 2728/* Return 1 if it is valid *and* preferable to use LA to
e1d5ee28 2729 compute the sum of OP1 and OP2. */
c7453384 2730
100c7420 2731int
e1d5ee28 2732preferred_la_operand_p (rtx op1, rtx op2)
100c7420
UW
2733{
2734 struct s390_address addr;
e1d5ee28
UW
2735
2736 if (op2 != const0_rtx)
2737 op1 = gen_rtx_PLUS (Pmode, op1, op2);
2738
2739 if (!s390_decompose_address (op1, &addr))
2740 return FALSE;
2741 if (addr.base && !REG_OK_FOR_BASE_STRICT_P (addr.base))
2742 return FALSE;
2743 if (addr.indx && !REG_OK_FOR_INDEX_STRICT_P (addr.indx))
100c7420
UW
2744 return FALSE;
2745
2746 if (!TARGET_64BIT && !addr.pointer)
2747 return FALSE;
2748
2749 if (addr.pointer)
2750 return TRUE;
2751
4888ec5d
UW
2752 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
2753 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
2754 return TRUE;
100c7420
UW
2755
2756 return FALSE;
2757}
2758
a41c6c53
UW
2759/* Emit a forced load-address operation to load SRC into DST.
2760 This will use the LOAD ADDRESS instruction even in situations
2761 where legitimate_la_operand_p (SRC) returns false. */
ba956982 2762
a41c6c53 2763void
9c808aad 2764s390_load_address (rtx dst, rtx src)
f3e9edff 2765{
a41c6c53
UW
2766 if (TARGET_64BIT)
2767 emit_move_insn (dst, src);
2768 else
2769 emit_insn (gen_force_la_31 (dst, src));
ba956982
UW
2770}
2771
9db1d521
HP
2772/* Return a legitimate reference for ORIG (an address) using the
2773 register REG. If REG is 0, a new pseudo is generated.
2774
2775 There are two types of references that must be handled:
2776
2777 1. Global data references must load the address from the GOT, via
2778 the PIC reg. An insn is emitted to do this load, and the reg is
2779 returned.
2780
2781 2. Static data references, constant pool addresses, and code labels
2782 compute the address as an offset from the GOT, whose base is in
114278e7 2783 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
9db1d521
HP
2784 differentiate them from global data objects. The returned
2785 address is the PIC reg + an unspec constant.
2786
2787 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2788 reg also appears in the address. */
2789
2790rtx
9c808aad 2791legitimize_pic_address (rtx orig, rtx reg)
9db1d521
HP
2792{
2793 rtx addr = orig;
2794 rtx new = orig;
2795 rtx base;
2796
2797 if (GET_CODE (addr) == LABEL_REF
114278e7 2798 || (GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (addr)))
9db1d521
HP
2799 {
2800 /* This is a local symbol. */
9e8327e3 2801 if (TARGET_CPU_ZARCH && larl_operand (addr, VOIDmode))
9db1d521 2802 {
c7453384
EC
2803 /* Access local symbols PC-relative via LARL.
2804 This is the same as in the non-PIC case, so it is
d65f7478 2805 handled automatically ... */
9db1d521
HP
2806 }
2807 else
2808 {
fd7643fb 2809 /* Access local symbols relative to the GOT. */
9db1d521
HP
2810
2811 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2812
fd7643fb
UW
2813 if (reload_in_progress || reload_completed)
2814 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2815
2816 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTOFF);
e23795ea
UW
2817 addr = gen_rtx_CONST (Pmode, addr);
2818 addr = force_const_mem (Pmode, addr);
9db1d521
HP
2819 emit_move_insn (temp, addr);
2820
fd7643fb 2821 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
9db1d521
HP
2822 if (reg != 0)
2823 {
2824 emit_move_insn (reg, new);
2825 new = reg;
2826 }
2827 }
2828 }
2829 else if (GET_CODE (addr) == SYMBOL_REF)
2830 {
2831 if (reg == 0)
2832 reg = gen_reg_rtx (Pmode);
2833
2834 if (flag_pic == 1)
2835 {
2836 /* Assume GOT offset < 4k. This is handled the same way
fd7643fb 2837 in both 31- and 64-bit code (@GOT). */
9db1d521 2838
c3cc6b78
UW
2839 if (reload_in_progress || reload_completed)
2840 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
9db1d521 2841
fd7643fb 2842 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
9db1d521
HP
2843 new = gen_rtx_CONST (Pmode, new);
2844 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
542a8afa 2845 new = gen_const_mem (Pmode, new);
9db1d521
HP
2846 emit_move_insn (reg, new);
2847 new = reg;
2848 }
9e8327e3 2849 else if (TARGET_CPU_ZARCH)
9db1d521
HP
2850 {
2851 /* If the GOT offset might be >= 4k, we determine the position
2852 of the GOT entry via a PC-relative LARL (@GOTENT). */
2853
2854 rtx temp = gen_reg_rtx (Pmode);
2855
fd7643fb 2856 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTENT);
9db1d521
HP
2857 new = gen_rtx_CONST (Pmode, new);
2858 emit_move_insn (temp, new);
2859
542a8afa 2860 new = gen_const_mem (Pmode, temp);
9db1d521
HP
2861 emit_move_insn (reg, new);
2862 new = reg;
2863 }
2864 else
2865 {
c7453384 2866 /* If the GOT offset might be >= 4k, we have to load it
9db1d521
HP
2867 from the literal pool (@GOT). */
2868
2869 rtx temp = gen_reg_rtx (Pmode);
2870
c3cc6b78
UW
2871 if (reload_in_progress || reload_completed)
2872 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
9db1d521 2873
fd7643fb 2874 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
e23795ea
UW
2875 addr = gen_rtx_CONST (Pmode, addr);
2876 addr = force_const_mem (Pmode, addr);
9db1d521
HP
2877 emit_move_insn (temp, addr);
2878
2879 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
542a8afa 2880 new = gen_const_mem (Pmode, new);
9db1d521
HP
2881 emit_move_insn (reg, new);
2882 new = reg;
2883 }
c7453384 2884 }
9db1d521
HP
2885 else
2886 {
2887 if (GET_CODE (addr) == CONST)
2888 {
2889 addr = XEXP (addr, 0);
2890 if (GET_CODE (addr) == UNSPEC)
2891 {
2892 if (XVECLEN (addr, 0) != 1)
994fe660 2893 abort ();
9db1d521
HP
2894 switch (XINT (addr, 1))
2895 {
fd7643fb 2896 /* If someone moved a GOT-relative UNSPEC
9db1d521 2897 out of the literal pool, force them back in. */
fd7643fb
UW
2898 case UNSPEC_GOTOFF:
2899 case UNSPEC_PLTOFF:
e23795ea 2900 new = force_const_mem (Pmode, orig);
9db1d521
HP
2901 break;
2902
fd7643fb
UW
2903 /* @GOT is OK as is if small. */
2904 case UNSPEC_GOT:
2905 if (flag_pic == 2)
2906 new = force_const_mem (Pmode, orig);
2907 break;
2908
9db1d521 2909 /* @GOTENT is OK as is. */
fd7643fb 2910 case UNSPEC_GOTENT:
9db1d521
HP
2911 break;
2912
2913 /* @PLT is OK as is on 64-bit, must be converted to
fd7643fb
UW
2914 GOT-relative @PLTOFF on 31-bit. */
2915 case UNSPEC_PLT:
9e8327e3 2916 if (!TARGET_CPU_ZARCH)
9db1d521
HP
2917 {
2918 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2919
fd7643fb
UW
2920 if (reload_in_progress || reload_completed)
2921 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2922
9db1d521 2923 addr = XVECEXP (addr, 0, 0);
c7453384 2924 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
fd7643fb 2925 UNSPEC_PLTOFF);
e23795ea
UW
2926 addr = gen_rtx_CONST (Pmode, addr);
2927 addr = force_const_mem (Pmode, addr);
9db1d521
HP
2928 emit_move_insn (temp, addr);
2929
fd7643fb 2930 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
9db1d521
HP
2931 if (reg != 0)
2932 {
2933 emit_move_insn (reg, new);
2934 new = reg;
2935 }
2936 }
2937 break;
2938
2939 /* Everything else cannot happen. */
2940 default:
2941 abort ();
2942 }
2943 }
2944 else if (GET_CODE (addr) != PLUS)
994fe660 2945 abort ();
9db1d521
HP
2946 }
2947 if (GET_CODE (addr) == PLUS)
2948 {
2949 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
c7453384 2950 /* Check first to see if this is a constant offset
9db1d521
HP
2951 from a local symbol reference. */
2952 if ((GET_CODE (op0) == LABEL_REF
114278e7 2953 || (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (op0)))
9db1d521
HP
2954 && GET_CODE (op1) == CONST_INT)
2955 {
9e8327e3 2956 if (TARGET_CPU_ZARCH && larl_operand (op0, VOIDmode))
9db1d521
HP
2957 {
2958 if (INTVAL (op1) & 1)
2959 {
c7453384 2960 /* LARL can't handle odd offsets, so emit a
9db1d521
HP
2961 pair of LARL and LA. */
2962 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2963
d3632d41 2964 if (!DISP_IN_RANGE (INTVAL (op1)))
9db1d521
HP
2965 {
2966 int even = INTVAL (op1) - 1;
2967 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
b30d2115 2968 op0 = gen_rtx_CONST (Pmode, op0);
a556fd39 2969 op1 = const1_rtx;
9db1d521
HP
2970 }
2971
2972 emit_move_insn (temp, op0);
2973 new = gen_rtx_PLUS (Pmode, temp, op1);
2974
2975 if (reg != 0)
2976 {
2977 emit_move_insn (reg, new);
2978 new = reg;
2979 }
2980 }
2981 else
2982 {
2983 /* If the offset is even, we can just use LARL.
2984 This will happen automatically. */
2985 }
2986 }
2987 else
2988 {
fd7643fb 2989 /* Access local symbols relative to the GOT. */
9db1d521
HP
2990
2991 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2992
fd7643fb
UW
2993 if (reload_in_progress || reload_completed)
2994 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2995
c7453384 2996 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op0),
fd7643fb 2997 UNSPEC_GOTOFF);
e23795ea
UW
2998 addr = gen_rtx_PLUS (Pmode, addr, op1);
2999 addr = gen_rtx_CONST (Pmode, addr);
3000 addr = force_const_mem (Pmode, addr);
cfbab41c 3001 emit_move_insn (temp, addr);
9db1d521 3002
fd7643fb 3003 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
9db1d521
HP
3004 if (reg != 0)
3005 {
3006 emit_move_insn (reg, new);
3007 new = reg;
3008 }
3009 }
3010 }
3011
fd7643fb 3012 /* Now, check whether it is a GOT relative symbol plus offset
9db1d521
HP
3013 that was pulled out of the literal pool. Force it back in. */
3014
3015 else if (GET_CODE (op0) == UNSPEC
cfbab41c
JJ
3016 && GET_CODE (op1) == CONST_INT
3017 && XINT (op0, 1) == UNSPEC_GOTOFF)
9db1d521
HP
3018 {
3019 if (XVECLEN (op0, 0) != 1)
994fe660 3020 abort ();
9db1d521 3021
e23795ea 3022 new = force_const_mem (Pmode, orig);
9db1d521
HP
3023 }
3024
3025 /* Otherwise, compute the sum. */
3026 else
3027 {
3028 base = legitimize_pic_address (XEXP (addr, 0), reg);
3029 new = legitimize_pic_address (XEXP (addr, 1),
3030 base == reg ? NULL_RTX : reg);
3031 if (GET_CODE (new) == CONST_INT)
3032 new = plus_constant (base, INTVAL (new));
3033 else
3034 {
3035 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
3036 {
3037 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
3038 new = XEXP (new, 1);
3039 }
3040 new = gen_rtx_PLUS (Pmode, base, new);
3041 }
3042
3043 if (GET_CODE (new) == CONST)
3044 new = XEXP (new, 0);
3045 new = force_operand (new, 0);
3046 }
3047 }
3048 }
3049 return new;
3050}
3051
fd3cd001
UW
3052/* Load the thread pointer into a register. */
3053
3054static rtx
9c808aad 3055get_thread_pointer (void)
fd3cd001 3056{
c5aa1d12 3057 rtx tp = gen_reg_rtx (Pmode);
fd3cd001 3058
c5aa1d12 3059 emit_move_insn (tp, gen_rtx_REG (Pmode, TP_REGNUM));
fd3cd001
UW
3060 mark_reg_pointer (tp, BITS_PER_WORD);
3061
3062 return tp;
3063}
3064
ed9676cf
AK
3065/* Emit a tls call insn. The call target is the SYMBOL_REF stored
3066 in s390_tls_symbol which always refers to __tls_get_offset.
3067 The returned offset is written to RESULT_REG and an USE rtx is
3068 generated for TLS_CALL. */
fd3cd001
UW
3069
3070static GTY(()) rtx s390_tls_symbol;
ed9676cf
AK
3071
3072static void
3073s390_emit_tls_call_insn (rtx result_reg, rtx tls_call)
fd3cd001 3074{
ed9676cf 3075 rtx insn;
38899e29 3076
ed9676cf
AK
3077 if (!flag_pic)
3078 abort ();
3079
fd3cd001
UW
3080 if (!s390_tls_symbol)
3081 s390_tls_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_offset");
3082
38899e29
EC
3083 insn = s390_emit_call (s390_tls_symbol, tls_call, result_reg,
3084 gen_rtx_REG (Pmode, RETURN_REGNUM));
ed9676cf
AK
3085
3086 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), result_reg);
3087 CONST_OR_PURE_CALL_P (insn) = 1;
fd3cd001
UW
3088}
3089
3090/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3091 this (thread-local) address. REG may be used as temporary. */
3092
3093static rtx
9c808aad 3094legitimize_tls_address (rtx addr, rtx reg)
fd3cd001
UW
3095{
3096 rtx new, tls_call, temp, base, r2, insn;
3097
3098 if (GET_CODE (addr) == SYMBOL_REF)
3099 switch (tls_symbolic_operand (addr))
3100 {
3101 case TLS_MODEL_GLOBAL_DYNAMIC:
3102 start_sequence ();
3103 r2 = gen_rtx_REG (Pmode, 2);
3104 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_TLSGD);
3105 new = gen_rtx_CONST (Pmode, tls_call);
3106 new = force_const_mem (Pmode, new);
3107 emit_move_insn (r2, new);
ed9676cf 3108 s390_emit_tls_call_insn (r2, tls_call);
fd3cd001
UW
3109 insn = get_insns ();
3110 end_sequence ();
3111
3112 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3113 temp = gen_reg_rtx (Pmode);
3114 emit_libcall_block (insn, temp, r2, new);
3115
3116 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
3117 if (reg != 0)
3118 {
3119 s390_load_address (reg, new);
3120 new = reg;
3121 }
3122 break;
3123
3124 case TLS_MODEL_LOCAL_DYNAMIC:
3125 start_sequence ();
3126 r2 = gen_rtx_REG (Pmode, 2);
3127 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM);
3128 new = gen_rtx_CONST (Pmode, tls_call);
3129 new = force_const_mem (Pmode, new);
3130 emit_move_insn (r2, new);
ed9676cf 3131 s390_emit_tls_call_insn (r2, tls_call);
fd3cd001
UW
3132 insn = get_insns ();
3133 end_sequence ();
3134
3135 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_NTPOFF);
3136 temp = gen_reg_rtx (Pmode);
3137 emit_libcall_block (insn, temp, r2, new);
3138
3139 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
3140 base = gen_reg_rtx (Pmode);
3141 s390_load_address (base, new);
3142
3143 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_DTPOFF);
3144 new = gen_rtx_CONST (Pmode, new);
3145 new = force_const_mem (Pmode, new);
3146 temp = gen_reg_rtx (Pmode);
3147 emit_move_insn (temp, new);
3148
3149 new = gen_rtx_PLUS (Pmode, base, temp);
3150 if (reg != 0)
3151 {
3152 s390_load_address (reg, new);
3153 new = reg;
3154 }
3155 break;
3156
3157 case TLS_MODEL_INITIAL_EXEC:
3158 if (flag_pic == 1)
3159 {
3160 /* Assume GOT offset < 4k. This is handled the same way
3161 in both 31- and 64-bit code. */
3162
3163 if (reload_in_progress || reload_completed)
3164 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
3165
3166 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3167 new = gen_rtx_CONST (Pmode, new);
3168 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
542a8afa 3169 new = gen_const_mem (Pmode, new);
fd3cd001
UW
3170 temp = gen_reg_rtx (Pmode);
3171 emit_move_insn (temp, new);
3172 }
9e8327e3 3173 else if (TARGET_CPU_ZARCH)
fd3cd001
UW
3174 {
3175 /* If the GOT offset might be >= 4k, we determine the position
3176 of the GOT entry via a PC-relative LARL. */
3177
3178 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3179 new = gen_rtx_CONST (Pmode, new);
3180 temp = gen_reg_rtx (Pmode);
3181 emit_move_insn (temp, new);
3182
542a8afa 3183 new = gen_const_mem (Pmode, temp);
fd3cd001
UW
3184 temp = gen_reg_rtx (Pmode);
3185 emit_move_insn (temp, new);
3186 }
3187 else if (flag_pic)
3188 {
c7453384 3189 /* If the GOT offset might be >= 4k, we have to load it
fd3cd001
UW
3190 from the literal pool. */
3191
3192 if (reload_in_progress || reload_completed)
3193 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
3194
3195 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3196 new = gen_rtx_CONST (Pmode, new);
3197 new = force_const_mem (Pmode, new);
3198 temp = gen_reg_rtx (Pmode);
3199 emit_move_insn (temp, new);
3200
3201 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
542a8afa 3202 new = gen_const_mem (Pmode, new);
fd3cd001
UW
3203
3204 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
3205 temp = gen_reg_rtx (Pmode);
3206 emit_insn (gen_rtx_SET (Pmode, temp, new));
3207 }
3208 else
3209 {
3210 /* In position-dependent code, load the absolute address of
3211 the GOT entry from the literal pool. */
3212
3213 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3214 new = gen_rtx_CONST (Pmode, new);
3215 new = force_const_mem (Pmode, new);
3216 temp = gen_reg_rtx (Pmode);
3217 emit_move_insn (temp, new);
3218
3219 new = temp;
542a8afa 3220 new = gen_const_mem (Pmode, new);
fd3cd001
UW
3221 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
3222 temp = gen_reg_rtx (Pmode);
3223 emit_insn (gen_rtx_SET (Pmode, temp, new));
3224 }
3225
3226 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
3227 if (reg != 0)
3228 {
3229 s390_load_address (reg, new);
3230 new = reg;
3231 }
3232 break;
3233
3234 case TLS_MODEL_LOCAL_EXEC:
3235 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3236 new = gen_rtx_CONST (Pmode, new);
3237 new = force_const_mem (Pmode, new);
3238 temp = gen_reg_rtx (Pmode);
3239 emit_move_insn (temp, new);
3240
3241 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
3242 if (reg != 0)
3243 {
3244 s390_load_address (reg, new);
3245 new = reg;
3246 }
3247 break;
3248
3249 default:
3250 abort ();
3251 }
3252
3253 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == UNSPEC)
3254 {
3255 switch (XINT (XEXP (addr, 0), 1))
3256 {
3257 case UNSPEC_INDNTPOFF:
9e8327e3 3258 if (TARGET_CPU_ZARCH)
fd3cd001
UW
3259 new = addr;
3260 else
3261 abort ();
3262 break;
3263
3264 default:
3265 abort ();
3266 }
3267 }
3268
578d1468
UW
3269 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3270 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3271 {
3272 new = XEXP (XEXP (addr, 0), 0);
3273 if (GET_CODE (new) != SYMBOL_REF)
3274 new = gen_rtx_CONST (Pmode, new);
3275
3276 new = legitimize_tls_address (new, reg);
3277 new = plus_constant (new, INTVAL (XEXP (XEXP (addr, 0), 1)));
3278 new = force_operand (new, 0);
3279 }
3280
fd3cd001
UW
3281 else
3282 abort (); /* for now ... */
3283
3284 return new;
3285}
3286
9db1d521
HP
3287/* Emit insns to move operands[1] into operands[0]. */
3288
3289void
9c808aad 3290emit_symbolic_move (rtx *operands)
9db1d521 3291{
4023fb28 3292 rtx temp = no_new_pseudos ? operands[0] : gen_reg_rtx (Pmode);
9db1d521 3293
fd3cd001 3294 if (GET_CODE (operands[0]) == MEM)
9db1d521 3295 operands[1] = force_reg (Pmode, operands[1]);
fd3cd001
UW
3296 else if (TLS_SYMBOLIC_CONST (operands[1]))
3297 operands[1] = legitimize_tls_address (operands[1], temp);
3298 else if (flag_pic)
9db1d521
HP
3299 operands[1] = legitimize_pic_address (operands[1], temp);
3300}
3301
994fe660 3302/* Try machine-dependent ways of modifying an illegitimate address X
9db1d521 3303 to be legitimate. If we find one, return the new, valid address.
9db1d521
HP
3304
3305 OLDX is the address as it was before break_out_memory_refs was called.
3306 In some cases it is useful to look at this to decide what needs to be done.
3307
994fe660 3308 MODE is the mode of the operand pointed to by X.
9db1d521
HP
3309
3310 When -fpic is used, special handling is needed for symbolic references.
3311 See comments by legitimize_pic_address for details. */
3312
3313rtx
9c808aad
AJ
3314legitimize_address (register rtx x, register rtx oldx ATTRIBUTE_UNUSED,
3315 enum machine_mode mode ATTRIBUTE_UNUSED)
9db1d521 3316{
ba956982 3317 rtx constant_term = const0_rtx;
9db1d521 3318
fd3cd001
UW
3319 if (TLS_SYMBOLIC_CONST (x))
3320 {
3321 x = legitimize_tls_address (x, 0);
3322
3323 if (legitimate_address_p (mode, x, FALSE))
3324 return x;
3325 }
3326 else if (flag_pic)
9db1d521 3327 {
ba956982 3328 if (SYMBOLIC_CONST (x)
c7453384
EC
3329 || (GET_CODE (x) == PLUS
3330 && (SYMBOLIC_CONST (XEXP (x, 0))
ba956982
UW
3331 || SYMBOLIC_CONST (XEXP (x, 1)))))
3332 x = legitimize_pic_address (x, 0);
3333
3334 if (legitimate_address_p (mode, x, FALSE))
3335 return x;
9db1d521 3336 }
9db1d521 3337
ba956982 3338 x = eliminate_constant_term (x, &constant_term);
994fe660 3339
61f02ff5
UW
3340 /* Optimize loading of large displacements by splitting them
3341 into the multiple of 4K and the rest; this allows the
c7453384 3342 former to be CSE'd if possible.
61f02ff5
UW
3343
3344 Don't do this if the displacement is added to a register
3345 pointing into the stack frame, as the offsets will
3346 change later anyway. */
3347
3348 if (GET_CODE (constant_term) == CONST_INT
d3632d41
UW
3349 && !TARGET_LONG_DISPLACEMENT
3350 && !DISP_IN_RANGE (INTVAL (constant_term))
61f02ff5
UW
3351 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
3352 {
3353 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
3354 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
3355
3356 rtx temp = gen_reg_rtx (Pmode);
3357 rtx val = force_operand (GEN_INT (upper), temp);
3358 if (val != temp)
3359 emit_move_insn (temp, val);
3360
3361 x = gen_rtx_PLUS (Pmode, x, temp);
3362 constant_term = GEN_INT (lower);
3363 }
3364
ba956982 3365 if (GET_CODE (x) == PLUS)
9db1d521 3366 {
ba956982
UW
3367 if (GET_CODE (XEXP (x, 0)) == REG)
3368 {
3369 register rtx temp = gen_reg_rtx (Pmode);
3370 register rtx val = force_operand (XEXP (x, 1), temp);
3371 if (val != temp)
3372 emit_move_insn (temp, val);
3373
3374 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
3375 }
3376
3377 else if (GET_CODE (XEXP (x, 1)) == REG)
3378 {
3379 register rtx temp = gen_reg_rtx (Pmode);
3380 register rtx val = force_operand (XEXP (x, 0), temp);
3381 if (val != temp)
3382 emit_move_insn (temp, val);
3383
3384 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
3385 }
9db1d521 3386 }
ba956982
UW
3387
3388 if (constant_term != const0_rtx)
3389 x = gen_rtx_PLUS (Pmode, x, constant_term);
3390
3391 return x;
9db1d521
HP
3392}
3393
0b540f12
UW
3394/* Try a machine-dependent way of reloading an illegitimate address AD
3395 operand. If we find one, push the reload and and return the new address.
3396
3397 MODE is the mode of the enclosing MEM. OPNUM is the operand number
3398 and TYPE is the reload type of the current reload. */
3399
3400rtx
3401legitimize_reload_address (rtx ad, enum machine_mode mode ATTRIBUTE_UNUSED,
3402 int opnum, int type)
3403{
3404 if (!optimize || TARGET_LONG_DISPLACEMENT)
3405 return NULL_RTX;
3406
3407 if (GET_CODE (ad) == PLUS)
3408 {
3409 rtx tem = simplify_binary_operation (PLUS, Pmode,
3410 XEXP (ad, 0), XEXP (ad, 1));
3411 if (tem)
3412 ad = tem;
3413 }
3414
3415 if (GET_CODE (ad) == PLUS
3416 && GET_CODE (XEXP (ad, 0)) == REG
3417 && GET_CODE (XEXP (ad, 1)) == CONST_INT
3418 && !DISP_IN_RANGE (INTVAL (XEXP (ad, 1))))
3419 {
3420 HOST_WIDE_INT lower = INTVAL (XEXP (ad, 1)) & 0xfff;
3421 HOST_WIDE_INT upper = INTVAL (XEXP (ad, 1)) ^ lower;
3422 rtx cst, tem, new;
3423
3424 cst = GEN_INT (upper);
3425 if (!legitimate_reload_constant_p (cst))
3426 cst = force_const_mem (Pmode, cst);
3427
3428 tem = gen_rtx_PLUS (Pmode, XEXP (ad, 0), cst);
3429 new = gen_rtx_PLUS (Pmode, tem, GEN_INT (lower));
3430
3431 push_reload (XEXP (tem, 1), 0, &XEXP (tem, 1), 0,
3432 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3433 opnum, (enum reload_type) type);
3434 return new;
3435 }
3436
3437 return NULL_RTX;
3438}
3439
a41c6c53
UW
3440/* Emit code to move LEN bytes from DST to SRC. */
3441
3442void
70128ad9 3443s390_expand_movmem (rtx dst, rtx src, rtx len)
a41c6c53 3444{
a41c6c53
UW
3445 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3446 {
3447 if (INTVAL (len) > 0)
70128ad9 3448 emit_insn (gen_movmem_short (dst, src, GEN_INT (INTVAL (len) - 1)));
a41c6c53
UW
3449 }
3450
3451 else if (TARGET_MVCLE)
3452 {
70128ad9 3453 emit_insn (gen_movmem_long (dst, src, convert_to_mode (Pmode, len, 1)));
a41c6c53
UW
3454 }
3455
3456 else
3457 {
3458 rtx dst_addr, src_addr, count, blocks, temp;
70315fcd 3459 rtx loop_start_label = gen_label_rtx ();
6de9cd9a 3460 rtx loop_end_label = gen_label_rtx ();
a41c6c53
UW
3461 rtx end_label = gen_label_rtx ();
3462 enum machine_mode mode;
a41c6c53
UW
3463
3464 mode = GET_MODE (len);
3465 if (mode == VOIDmode)
b9404c99 3466 mode = Pmode;
a41c6c53 3467
a41c6c53
UW
3468 dst_addr = gen_reg_rtx (Pmode);
3469 src_addr = gen_reg_rtx (Pmode);
3470 count = gen_reg_rtx (mode);
3471 blocks = gen_reg_rtx (mode);
3472
3473 convert_move (count, len, 1);
c7453384 3474 emit_cmp_and_jump_insns (count, const0_rtx,
a41c6c53
UW
3475 EQ, NULL_RTX, mode, 1, end_label);
3476
3477 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3478 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
3479 dst = change_address (dst, VOIDmode, dst_addr);
3480 src = change_address (src, VOIDmode, src_addr);
c7453384 3481
a41c6c53
UW
3482 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3483 if (temp != count)
3484 emit_move_insn (count, temp);
3485
3486 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3487 if (temp != blocks)
3488 emit_move_insn (blocks, temp);
3489
6de9cd9a
DN
3490 emit_cmp_and_jump_insns (blocks, const0_rtx,
3491 EQ, NULL_RTX, mode, 1, loop_end_label);
70315fcd
SB
3492
3493 emit_label (loop_start_label);
a41c6c53 3494
70128ad9 3495 emit_insn (gen_movmem_short (dst, src, GEN_INT (255)));
c7453384 3496 s390_load_address (dst_addr,
a41c6c53 3497 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
c7453384 3498 s390_load_address (src_addr,
a41c6c53 3499 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
c7453384 3500
a41c6c53
UW
3501 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3502 if (temp != blocks)
3503 emit_move_insn (blocks, temp);
3504
6de9cd9a
DN
3505 emit_cmp_and_jump_insns (blocks, const0_rtx,
3506 EQ, NULL_RTX, mode, 1, loop_end_label);
70315fcd
SB
3507
3508 emit_jump (loop_start_label);
6de9cd9a 3509 emit_label (loop_end_label);
a41c6c53 3510
70128ad9 3511 emit_insn (gen_movmem_short (dst, src,
b9404c99 3512 convert_to_mode (Pmode, count, 1)));
a41c6c53
UW
3513 emit_label (end_label);
3514 }
3515}
3516
3517/* Emit code to clear LEN bytes at DST. */
3518
3519void
70128ad9 3520s390_expand_clrmem (rtx dst, rtx len)
a41c6c53 3521{
a41c6c53
UW
3522 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3523 {
3524 if (INTVAL (len) > 0)
70128ad9 3525 emit_insn (gen_clrmem_short (dst, GEN_INT (INTVAL (len) - 1)));
a41c6c53
UW
3526 }
3527
3528 else if (TARGET_MVCLE)
3529 {
70128ad9 3530 emit_insn (gen_clrmem_long (dst, convert_to_mode (Pmode, len, 1)));
a41c6c53
UW
3531 }
3532
3533 else
3534 {
3535 rtx dst_addr, src_addr, count, blocks, temp;
70315fcd 3536 rtx loop_start_label = gen_label_rtx ();
6de9cd9a 3537 rtx loop_end_label = gen_label_rtx ();
a41c6c53
UW
3538 rtx end_label = gen_label_rtx ();
3539 enum machine_mode mode;
a41c6c53
UW
3540
3541 mode = GET_MODE (len);
3542 if (mode == VOIDmode)
b9404c99 3543 mode = Pmode;
a41c6c53 3544
a41c6c53
UW
3545 dst_addr = gen_reg_rtx (Pmode);
3546 src_addr = gen_reg_rtx (Pmode);
3547 count = gen_reg_rtx (mode);
3548 blocks = gen_reg_rtx (mode);
3549
3550 convert_move (count, len, 1);
c7453384 3551 emit_cmp_and_jump_insns (count, const0_rtx,
a41c6c53
UW
3552 EQ, NULL_RTX, mode, 1, end_label);
3553
3554 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3555 dst = change_address (dst, VOIDmode, dst_addr);
c7453384 3556
a41c6c53
UW
3557 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3558 if (temp != count)
3559 emit_move_insn (count, temp);
3560
3561 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3562 if (temp != blocks)
3563 emit_move_insn (blocks, temp);
3564
6de9cd9a
DN
3565 emit_cmp_and_jump_insns (blocks, const0_rtx,
3566 EQ, NULL_RTX, mode, 1, loop_end_label);
70315fcd
SB
3567
3568 emit_label (loop_start_label);
a41c6c53 3569
70128ad9 3570 emit_insn (gen_clrmem_short (dst, GEN_INT (255)));
c7453384 3571 s390_load_address (dst_addr,
a41c6c53 3572 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
c7453384 3573
a41c6c53
UW
3574 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3575 if (temp != blocks)
3576 emit_move_insn (blocks, temp);
3577
6de9cd9a
DN
3578 emit_cmp_and_jump_insns (blocks, const0_rtx,
3579 EQ, NULL_RTX, mode, 1, loop_end_label);
70315fcd
SB
3580
3581 emit_jump (loop_start_label);
6de9cd9a 3582 emit_label (loop_end_label);
a41c6c53 3583
70128ad9 3584 emit_insn (gen_clrmem_short (dst, convert_to_mode (Pmode, count, 1)));
a41c6c53
UW
3585 emit_label (end_label);
3586 }
3587}
3588
3589/* Emit code to compare LEN bytes at OP0 with those at OP1,
3590 and return the result in TARGET. */
3591
3592void
9c808aad 3593s390_expand_cmpmem (rtx target, rtx op0, rtx op1, rtx len)
a41c6c53 3594{
5b022de5
UW
3595 rtx ccreg = gen_rtx_REG (CCUmode, CC_REGNUM);
3596 rtx result = gen_rtx_UNSPEC (SImode, gen_rtvec (1, ccreg), UNSPEC_CMPINT);
a41c6c53 3597
a41c6c53
UW
3598 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3599 {
3600 if (INTVAL (len) > 0)
3601 {
b9404c99 3602 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (INTVAL (len) - 1)));
5b022de5 3603 emit_move_insn (target, result);
a41c6c53
UW
3604 }
3605 else
3606 emit_move_insn (target, const0_rtx);
3607 }
9dc62c00 3608 else if (TARGET_MVCLE)
a41c6c53 3609 {
b9404c99 3610 emit_insn (gen_cmpmem_long (op0, op1, convert_to_mode (Pmode, len, 1)));
5b022de5 3611 emit_move_insn (target, result);
a41c6c53 3612 }
a41c6c53
UW
3613 else
3614 {
3615 rtx addr0, addr1, count, blocks, temp;
70315fcd 3616 rtx loop_start_label = gen_label_rtx ();
6de9cd9a 3617 rtx loop_end_label = gen_label_rtx ();
a41c6c53
UW
3618 rtx end_label = gen_label_rtx ();
3619 enum machine_mode mode;
a41c6c53
UW
3620
3621 mode = GET_MODE (len);
3622 if (mode == VOIDmode)
b9404c99 3623 mode = Pmode;
a41c6c53 3624
a41c6c53
UW
3625 addr0 = gen_reg_rtx (Pmode);
3626 addr1 = gen_reg_rtx (Pmode);
3627 count = gen_reg_rtx (mode);
3628 blocks = gen_reg_rtx (mode);
3629
3630 convert_move (count, len, 1);
c7453384 3631 emit_cmp_and_jump_insns (count, const0_rtx,
a41c6c53
UW
3632 EQ, NULL_RTX, mode, 1, end_label);
3633
3634 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
3635 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
3636 op0 = change_address (op0, VOIDmode, addr0);
3637 op1 = change_address (op1, VOIDmode, addr1);
c7453384 3638
a41c6c53
UW
3639 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3640 if (temp != count)
3641 emit_move_insn (count, temp);
3642
3643 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3644 if (temp != blocks)
3645 emit_move_insn (blocks, temp);
3646
6de9cd9a
DN
3647 emit_cmp_and_jump_insns (blocks, const0_rtx,
3648 EQ, NULL_RTX, mode, 1, loop_end_label);
70315fcd
SB
3649
3650 emit_label (loop_start_label);
a41c6c53 3651
b9404c99 3652 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (255)));
5b022de5 3653 temp = gen_rtx_NE (VOIDmode, ccreg, const0_rtx);
c7453384 3654 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
a41c6c53
UW
3655 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
3656 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
3657 emit_jump_insn (temp);
3658
c7453384 3659 s390_load_address (addr0,
a41c6c53 3660 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
c7453384 3661 s390_load_address (addr1,
a41c6c53 3662 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
c7453384 3663
a41c6c53
UW
3664 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3665 if (temp != blocks)
3666 emit_move_insn (blocks, temp);
3667
6de9cd9a
DN
3668 emit_cmp_and_jump_insns (blocks, const0_rtx,
3669 EQ, NULL_RTX, mode, 1, loop_end_label);
70315fcd
SB
3670
3671 emit_jump (loop_start_label);
6de9cd9a 3672 emit_label (loop_end_label);
a41c6c53 3673
38899e29 3674 emit_insn (gen_cmpmem_short (op0, op1,
b9404c99 3675 convert_to_mode (Pmode, count, 1)));
a41c6c53
UW
3676 emit_label (end_label);
3677
5b022de5 3678 emit_move_insn (target, result);
a41c6c53
UW
3679 }
3680}
3681
5d880bd2
UW
3682
3683/* Expand conditional increment or decrement using alc/slb instructions.
3684 Should generate code setting DST to either SRC or SRC + INCREMENT,
3685 depending on the result of the comparison CMP_OP0 CMP_CODE CMP_OP1.
3686 Returns true if successful, false otherwise. */
3687
3688bool
3689s390_expand_addcc (enum rtx_code cmp_code, rtx cmp_op0, rtx cmp_op1,
3690 rtx dst, rtx src, rtx increment)
3691{
3692 enum machine_mode cmp_mode;
3693 enum machine_mode cc_mode;
3694 rtx op_res;
3695 rtx insn;
3696 rtvec p;
3697
3698 if ((GET_MODE (cmp_op0) == SImode || GET_MODE (cmp_op0) == VOIDmode)
3699 && (GET_MODE (cmp_op1) == SImode || GET_MODE (cmp_op1) == VOIDmode))
3700 cmp_mode = SImode;
3701 else if ((GET_MODE (cmp_op0) == DImode || GET_MODE (cmp_op0) == VOIDmode)
3702 && (GET_MODE (cmp_op1) == DImode || GET_MODE (cmp_op1) == VOIDmode))
3703 cmp_mode = DImode;
3704 else
3705 return false;
3706
3707 /* Try ADD LOGICAL WITH CARRY. */
3708 if (increment == const1_rtx)
3709 {
3710 /* Determine CC mode to use. */
3711 if (cmp_code == EQ || cmp_code == NE)
3712 {
3713 if (cmp_op1 != const0_rtx)
3714 {
3715 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
3716 NULL_RTX, 0, OPTAB_WIDEN);
3717 cmp_op1 = const0_rtx;
3718 }
3719
3720 cmp_code = cmp_code == EQ ? LEU : GTU;
3721 }
3722
3723 if (cmp_code == LTU || cmp_code == LEU)
3724 {
3725 rtx tem = cmp_op0;
3726 cmp_op0 = cmp_op1;
3727 cmp_op1 = tem;
3728 cmp_code = swap_condition (cmp_code);
3729 }
3730
3731 switch (cmp_code)
3732 {
3733 case GTU:
3734 cc_mode = CCUmode;
3735 break;
3736
3737 case GEU:
3738 cc_mode = CCL3mode;
3739 break;
3740
3741 default:
3742 return false;
3743 }
3744
3745 /* Emit comparison instruction pattern. */
3746 if (!register_operand (cmp_op0, cmp_mode))
3747 cmp_op0 = force_reg (cmp_mode, cmp_op0);
3748
3749 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
3750 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
3751 /* We use insn_invalid_p here to add clobbers if required. */
3752 if (insn_invalid_p (emit_insn (insn)))
3753 abort ();
3754
3755 /* Emit ALC instruction pattern. */
3756 op_res = gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
3757 gen_rtx_REG (cc_mode, CC_REGNUM),
3758 const0_rtx);
3759
3760 if (src != const0_rtx)
3761 {
3762 if (!register_operand (src, GET_MODE (dst)))
3763 src = force_reg (GET_MODE (dst), src);
3764
3765 src = gen_rtx_PLUS (GET_MODE (dst), src, const0_rtx);
3766 op_res = gen_rtx_PLUS (GET_MODE (dst), src, op_res);
3767 }
3768
3769 p = rtvec_alloc (2);
3770 RTVEC_ELT (p, 0) =
3771 gen_rtx_SET (VOIDmode, dst, op_res);
3772 RTVEC_ELT (p, 1) =
3773 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
3774 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
3775
3776 return true;
3777 }
3778
3779 /* Try SUBTRACT LOGICAL WITH BORROW. */
3780 if (increment == constm1_rtx)
3781 {
3782 /* Determine CC mode to use. */
3783 if (cmp_code == EQ || cmp_code == NE)
3784 {
3785 if (cmp_op1 != const0_rtx)
3786 {
3787 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
3788 NULL_RTX, 0, OPTAB_WIDEN);
3789 cmp_op1 = const0_rtx;
3790 }
3791
3792 cmp_code = cmp_code == EQ ? LEU : GTU;
3793 }
3794
3795 if (cmp_code == GTU || cmp_code == GEU)
3796 {
3797 rtx tem = cmp_op0;
3798 cmp_op0 = cmp_op1;
3799 cmp_op1 = tem;
3800 cmp_code = swap_condition (cmp_code);
3801 }
3802
3803 switch (cmp_code)
3804 {
3805 case LEU:
3806 cc_mode = CCUmode;
3807 break;
3808
3809 case LTU:
3810 cc_mode = CCL3mode;
3811 break;
3812
3813 default:
3814 return false;
3815 }
3816
3817 /* Emit comparison instruction pattern. */
3818 if (!register_operand (cmp_op0, cmp_mode))
3819 cmp_op0 = force_reg (cmp_mode, cmp_op0);
3820
3821 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
3822 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
3823 /* We use insn_invalid_p here to add clobbers if required. */
3824 if (insn_invalid_p (emit_insn (insn)))
3825 abort ();
3826
3827 /* Emit SLB instruction pattern. */
3828 if (!register_operand (src, GET_MODE (dst)))
3829 src = force_reg (GET_MODE (dst), src);
3830
3831 op_res = gen_rtx_MINUS (GET_MODE (dst),
3832 gen_rtx_MINUS (GET_MODE (dst), src, const0_rtx),
3833 gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
3834 gen_rtx_REG (cc_mode, CC_REGNUM),
3835 const0_rtx));
3836 p = rtvec_alloc (2);
3837 RTVEC_ELT (p, 0) =
3838 gen_rtx_SET (VOIDmode, dst, op_res);
3839 RTVEC_ELT (p, 1) =
3840 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
3841 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
3842
3843 return true;
3844 }
3845
3846 return false;
3847}
3848
3849
6b2300b3
JJ
3850/* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
3851 We need to emit DTP-relative relocations. */
3852
3853void
9c808aad 3854s390_output_dwarf_dtprel (FILE *file, int size, rtx x)
6b2300b3
JJ
3855{
3856 switch (size)
3857 {
3858 case 4:
3859 fputs ("\t.long\t", file);
3860 break;
3861 case 8:
3862 fputs ("\t.quad\t", file);
3863 break;
3864 default:
3865 abort ();
3866 }
3867 output_addr_const (file, x);
3868 fputs ("@DTPOFF", file);
3869}
3870
4c8c0dec
JJ
3871/* In the name of slightly smaller debug output, and to cater to
3872 general assembler losage, recognize various UNSPEC sequences
3873 and turn them back into a direct symbol reference. */
3874
69bd9368 3875static rtx
9c808aad 3876s390_delegitimize_address (rtx orig_x)
4c8c0dec
JJ
3877{
3878 rtx x = orig_x, y;
3879
3880 if (GET_CODE (x) != MEM)
3881 return orig_x;
3882
3883 x = XEXP (x, 0);
3884 if (GET_CODE (x) == PLUS
3885 && GET_CODE (XEXP (x, 1)) == CONST
3886 && GET_CODE (XEXP (x, 0)) == REG
3887 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
3888 {
3889 y = XEXP (XEXP (x, 1), 0);
3890 if (GET_CODE (y) == UNSPEC
fd7643fb 3891 && XINT (y, 1) == UNSPEC_GOT)
4c8c0dec
JJ
3892 return XVECEXP (y, 0, 0);
3893 return orig_x;
3894 }
3895
3896 if (GET_CODE (x) == CONST)
3897 {
3898 y = XEXP (x, 0);
3899 if (GET_CODE (y) == UNSPEC
fd7643fb 3900 && XINT (y, 1) == UNSPEC_GOTENT)
4c8c0dec
JJ
3901 return XVECEXP (y, 0, 0);
3902 return orig_x;
3903 }
3904
c7453384 3905 return orig_x;
4c8c0dec 3906}
ba956982 3907
ac32b25e
UW
3908/* Output shift count operand OP to stdio stream FILE. */
3909
3910static void
3911print_shift_count_operand (FILE *file, rtx op)
3912{
3913 HOST_WIDE_INT offset = 0;
3914
3915 /* We can have an integer constant, an address register,
3916 or a sum of the two. */
3917 if (GET_CODE (op) == CONST_INT)
3918 {
3919 offset = INTVAL (op);
3920 op = NULL_RTX;
3921 }
3922 if (op && GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
3923 {
3924 offset = INTVAL (XEXP (op, 1));
3925 op = XEXP (op, 0);
3926 }
3927 while (op && GET_CODE (op) == SUBREG)
3928 op = SUBREG_REG (op);
3929
3930 /* Sanity check. */
3931 if (op && (GET_CODE (op) != REG
3932 || REGNO (op) >= FIRST_PSEUDO_REGISTER
3933 || REGNO_REG_CLASS (REGNO (op)) != ADDR_REGS))
3934 abort ();
3935
3936 /* Shift counts are truncated to the low six bits anyway. */
3937 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset & 63);
3938 if (op)
3939 fprintf (file, "(%s)", reg_names[REGNO (op)]);
3940}
3941
fd3cd001
UW
3942/* Locate some local-dynamic symbol still in use by this function
3943 so that we can print its name in local-dynamic base patterns. */
3944
3945static const char *
9c808aad 3946get_some_local_dynamic_name (void)
fd3cd001
UW
3947{
3948 rtx insn;
3949
3950 if (cfun->machine->some_ld_name)
3951 return cfun->machine->some_ld_name;
3952
3953 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
3954 if (INSN_P (insn)
3955 && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
3956 return cfun->machine->some_ld_name;
3957
3958 abort ();
3959}
3960
3961static int
9c808aad 3962get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
fd3cd001
UW
3963{
3964 rtx x = *px;
3965
3966 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3967 {
3968 x = get_pool_constant (x);
3969 return for_each_rtx (&x, get_some_local_dynamic_name_1, 0);
3970 }
3971
3972 if (GET_CODE (x) == SYMBOL_REF
3973 && tls_symbolic_operand (x) == TLS_MODEL_LOCAL_DYNAMIC)
3974 {
3975 cfun->machine->some_ld_name = XSTR (x, 0);
3976 return 1;
3977 }
3978
3979 return 0;
3980}
3981
38899e29 3982/* Output machine-dependent UNSPECs occurring in address constant X
faeb9bb6
UW
3983 in assembler syntax to stdio stream FILE. Returns true if the
3984 constant X could be recognized, false otherwise. */
9db1d521 3985
faeb9bb6
UW
3986bool
3987s390_output_addr_const_extra (FILE *file, rtx x)
9db1d521 3988{
faeb9bb6
UW
3989 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
3990 switch (XINT (x, 1))
3991 {
3992 case UNSPEC_GOTENT:
3993 output_addr_const (file, XVECEXP (x, 0, 0));
3994 fprintf (file, "@GOTENT");
3995 return true;
3996 case UNSPEC_GOT:
3997 output_addr_const (file, XVECEXP (x, 0, 0));
3998 fprintf (file, "@GOT");
3999 return true;
4000 case UNSPEC_GOTOFF:
4001 output_addr_const (file, XVECEXP (x, 0, 0));
4002 fprintf (file, "@GOTOFF");
4003 return true;
4004 case UNSPEC_PLT:
4005 output_addr_const (file, XVECEXP (x, 0, 0));
4006 fprintf (file, "@PLT");
4007 return true;
4008 case UNSPEC_PLTOFF:
4009 output_addr_const (file, XVECEXP (x, 0, 0));
4010 fprintf (file, "@PLTOFF");
4011 return true;
4012 case UNSPEC_TLSGD:
4013 output_addr_const (file, XVECEXP (x, 0, 0));
4014 fprintf (file, "@TLSGD");
4015 return true;
4016 case UNSPEC_TLSLDM:
4017 assemble_name (file, get_some_local_dynamic_name ());
4018 fprintf (file, "@TLSLDM");
4019 return true;
4020 case UNSPEC_DTPOFF:
4021 output_addr_const (file, XVECEXP (x, 0, 0));
4022 fprintf (file, "@DTPOFF");
4023 return true;
4024 case UNSPEC_NTPOFF:
4025 output_addr_const (file, XVECEXP (x, 0, 0));
4026 fprintf (file, "@NTPOFF");
4027 return true;
4028 case UNSPEC_GOTNTPOFF:
4029 output_addr_const (file, XVECEXP (x, 0, 0));
4030 fprintf (file, "@GOTNTPOFF");
4031 return true;
4032 case UNSPEC_INDNTPOFF:
4033 output_addr_const (file, XVECEXP (x, 0, 0));
4034 fprintf (file, "@INDNTPOFF");
4035 return true;
4036 }
9db1d521 4037
faeb9bb6 4038 return false;
9db1d521
HP
4039}
4040
c7453384 4041/* Output address operand ADDR in assembler syntax to
994fe660 4042 stdio stream FILE. */
9db1d521
HP
4043
4044void
9c808aad 4045print_operand_address (FILE *file, rtx addr)
9db1d521
HP
4046{
4047 struct s390_address ad;
4048
b808c04c
UW
4049 if (!s390_decompose_address (addr, &ad)
4050 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
4051 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
4023fb28 4052 output_operand_lossage ("Cannot decompose address.");
c7453384 4053
9db1d521 4054 if (ad.disp)
faeb9bb6 4055 output_addr_const (file, ad.disp);
9db1d521
HP
4056 else
4057 fprintf (file, "0");
4058
4059 if (ad.base && ad.indx)
4060 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
4061 reg_names[REGNO (ad.base)]);
4062 else if (ad.base)
4063 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
4064}
4065
c7453384
EC
4066/* Output operand X in assembler syntax to stdio stream FILE.
4067 CODE specified the format flag. The following format flags
994fe660
UW
4068 are recognized:
4069
4070 'C': print opcode suffix for branch condition.
4071 'D': print opcode suffix for inverse branch condition.
fd3cd001 4072 'J': print tls_load/tls_gdcall/tls_ldcall suffix
994fe660
UW
4073 'O': print only the displacement of a memory reference.
4074 'R': print only the base register of a memory reference.
fc0ea003 4075 'S': print S-type memory reference (base+displacement).
994fe660
UW
4076 'N': print the second word of a DImode operand.
4077 'M': print the second word of a TImode operand.
ac32b25e 4078 'Y': print shift count operand.
994fe660 4079
5519a4f9
KH
4080 'b': print integer X as if it's an unsigned byte.
4081 'x': print integer X as if it's an unsigned word.
f19a9af7
AK
4082 'h': print integer X as if it's a signed word.
4083 'i': print the first nonzero HImode part of X.
4084 'j': print the first HImode part unequal to 0xffff of X. */
9db1d521
HP
4085
4086void
9c808aad 4087print_operand (FILE *file, rtx x, int code)
9db1d521
HP
4088{
4089 switch (code)
4090 {
4091 case 'C':
ba956982 4092 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
9db1d521
HP
4093 return;
4094
4095 case 'D':
ba956982 4096 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
9db1d521
HP
4097 return;
4098
fd3cd001
UW
4099 case 'J':
4100 if (GET_CODE (x) == SYMBOL_REF)
4101 {
4102 fprintf (file, "%s", ":tls_load:");
4103 output_addr_const (file, x);
4104 }
4105 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD)
4106 {
4107 fprintf (file, "%s", ":tls_gdcall:");
4108 output_addr_const (file, XVECEXP (x, 0, 0));
4109 }
4110 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM)
4111 {
4112 fprintf (file, "%s", ":tls_ldcall:");
4113 assemble_name (file, get_some_local_dynamic_name ());
4114 }
4115 else
4116 abort ();
4117 return;
4118
9db1d521
HP
4119 case 'O':
4120 {
4121 struct s390_address ad;
4122
4123 if (GET_CODE (x) != MEM
b808c04c
UW
4124 || !s390_decompose_address (XEXP (x, 0), &ad)
4125 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
9db1d521 4126 || ad.indx)
994fe660 4127 abort ();
9db1d521
HP
4128
4129 if (ad.disp)
faeb9bb6 4130 output_addr_const (file, ad.disp);
9db1d521
HP
4131 else
4132 fprintf (file, "0");
4133 }
4134 return;
4135
4136 case 'R':
4137 {
4138 struct s390_address ad;
4139
4140 if (GET_CODE (x) != MEM
b808c04c
UW
4141 || !s390_decompose_address (XEXP (x, 0), &ad)
4142 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
9db1d521 4143 || ad.indx)
994fe660 4144 abort ();
9db1d521
HP
4145
4146 if (ad.base)
4147 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
4148 else
4149 fprintf (file, "0");
4150 }
4151 return;
4152
fc0ea003
UW
4153 case 'S':
4154 {
4155 struct s390_address ad;
4156
4157 if (GET_CODE (x) != MEM
4158 || !s390_decompose_address (XEXP (x, 0), &ad)
4159 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
4160 || ad.indx)
4161 abort ();
4162
4163 if (ad.disp)
4164 output_addr_const (file, ad.disp);
4165 else
4166 fprintf (file, "0");
4167
4168 if (ad.base)
4169 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
4170 }
4171 return;
4172
9db1d521
HP
4173 case 'N':
4174 if (GET_CODE (x) == REG)
4175 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
4176 else if (GET_CODE (x) == MEM)
4177 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
4178 else
994fe660 4179 abort ();
9db1d521
HP
4180 break;
4181
4182 case 'M':
4183 if (GET_CODE (x) == REG)
4184 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
4185 else if (GET_CODE (x) == MEM)
4186 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
4187 else
994fe660 4188 abort ();
9db1d521 4189 break;
ac32b25e
UW
4190
4191 case 'Y':
4192 print_shift_count_operand (file, x);
4193 return;
9db1d521
HP
4194 }
4195
4196 switch (GET_CODE (x))
4197 {
4198 case REG:
4199 fprintf (file, "%s", reg_names[REGNO (x)]);
4200 break;
4201
4202 case MEM:
4203 output_address (XEXP (x, 0));
4204 break;
4205
4206 case CONST:
4207 case CODE_LABEL:
4208 case LABEL_REF:
4209 case SYMBOL_REF:
faeb9bb6 4210 output_addr_const (file, x);
9db1d521
HP
4211 break;
4212
4213 case CONST_INT:
4214 if (code == 'b')
4023fb28
UW
4215 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
4216 else if (code == 'x')
4217 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
4218 else if (code == 'h')
4219 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
f19a9af7 4220 else if (code == 'i')
38899e29 4221 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
f19a9af7
AK
4222 s390_extract_part (x, HImode, 0));
4223 else if (code == 'j')
38899e29
EC
4224 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4225 s390_extract_part (x, HImode, -1));
4023fb28
UW
4226 else
4227 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
4228 break;
4229
4230 case CONST_DOUBLE:
4231 if (GET_MODE (x) != VOIDmode)
4232 abort ();
4233 if (code == 'b')
4234 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
9db1d521 4235 else if (code == 'x')
4023fb28 4236 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
9db1d521 4237 else if (code == 'h')
4023fb28 4238 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
9db1d521 4239 else
4023fb28 4240 abort ();
9db1d521
HP
4241 break;
4242
4243 default:
4244 fatal_insn ("UNKNOWN in print_operand !?", x);
4245 break;
4246 }
4247}
4248
301d03af
RS
4249/* Target hook for assembling integer objects. We need to define it
4250 here to work a round a bug in some versions of GAS, which couldn't
4251 handle values smaller than INT_MIN when printed in decimal. */
4252
4253static bool
9c808aad 4254s390_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af
RS
4255{
4256 if (size == 8 && aligned_p
4257 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
4258 {
4a0a75dd
KG
4259 fprintf (asm_out_file, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX "\n",
4260 INTVAL (x));
301d03af
RS
4261 return true;
4262 }
4263 return default_assemble_integer (x, size, aligned_p);
4264}
4265
c7453384 4266/* Returns true if register REGNO is used for forming
994fe660 4267 a memory address in expression X. */
9db1d521
HP
4268
4269static int
9c808aad 4270reg_used_in_mem_p (int regno, rtx x)
9db1d521
HP
4271{
4272 enum rtx_code code = GET_CODE (x);
4273 int i, j;
4274 const char *fmt;
c7453384 4275
9db1d521
HP
4276 if (code == MEM)
4277 {
4278 if (refers_to_regno_p (regno, regno+1,
4279 XEXP (x, 0), 0))
4280 return 1;
4281 }
c7453384 4282 else if (code == SET
4023fb28
UW
4283 && GET_CODE (SET_DEST (x)) == PC)
4284 {
4285 if (refers_to_regno_p (regno, regno+1,
4286 SET_SRC (x), 0))
4287 return 1;
4288 }
9db1d521
HP
4289
4290 fmt = GET_RTX_FORMAT (code);
4291 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4292 {
4293 if (fmt[i] == 'e'
4294 && reg_used_in_mem_p (regno, XEXP (x, i)))
4295 return 1;
c7453384 4296
9db1d521
HP
4297 else if (fmt[i] == 'E')
4298 for (j = 0; j < XVECLEN (x, i); j++)
4299 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
4300 return 1;
4301 }
4302 return 0;
4303}
4304
d65f7478 4305/* Returns true if expression DEP_RTX sets an address register
994fe660 4306 used by instruction INSN to address memory. */
9db1d521 4307
c7453384 4308static int
9c808aad 4309addr_generation_dependency_p (rtx dep_rtx, rtx insn)
9db1d521 4310{
4023fb28 4311 rtx target, pat;
9db1d521 4312
077dab3b
HP
4313 if (GET_CODE (dep_rtx) == INSN)
4314 dep_rtx = PATTERN (dep_rtx);
4315
9db1d521
HP
4316 if (GET_CODE (dep_rtx) == SET)
4317 {
4318 target = SET_DEST (dep_rtx);
cc7ab9b7
UW
4319 if (GET_CODE (target) == STRICT_LOW_PART)
4320 target = XEXP (target, 0);
4321 while (GET_CODE (target) == SUBREG)
4322 target = SUBREG_REG (target);
4323
9db1d521
HP
4324 if (GET_CODE (target) == REG)
4325 {
4326 int regno = REGNO (target);
4327
077dab3b 4328 if (s390_safe_attr_type (insn) == TYPE_LA)
4023fb28
UW
4329 {
4330 pat = PATTERN (insn);
4331 if (GET_CODE (pat) == PARALLEL)
4332 {
4333 if (XVECLEN (pat, 0) != 2)
4334 abort();
4335 pat = XVECEXP (pat, 0, 0);
4336 }
4337 if (GET_CODE (pat) == SET)
4338 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
4339 else
4340 abort();
4341 }
077dab3b 4342 else if (get_attr_atype (insn) == ATYPE_AGEN)
4023fb28
UW
4343 return reg_used_in_mem_p (regno, PATTERN (insn));
4344 }
9db1d521
HP
4345 }
4346 return 0;
4347}
4348
077dab3b
HP
4349/* Return 1, if dep_insn sets register used in insn in the agen unit. */
4350
c7453384 4351int
9c808aad 4352s390_agen_dep_p (rtx dep_insn, rtx insn)
c7453384 4353{
077dab3b
HP
4354 rtx dep_rtx = PATTERN (dep_insn);
4355 int i;
c7453384
EC
4356
4357 if (GET_CODE (dep_rtx) == SET
077dab3b
HP
4358 && addr_generation_dependency_p (dep_rtx, insn))
4359 return 1;
4360 else if (GET_CODE (dep_rtx) == PARALLEL)
4361 {
4362 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
4363 {
4364 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
4365 return 1;
4366 }
4367 }
4368 return 0;
4369}
4370
52609473
HP
4371/* A C statement (sans semicolon) to update the integer scheduling priority
4372 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
4373 reduce the priority to execute INSN later. Do not define this macro if
c7453384 4374 you do not need to adjust the scheduling priorities of insns.
52609473 4375
c7453384 4376 A STD instruction should be scheduled earlier,
52609473
HP
4377 in order to use the bypass. */
4378
4379static int
9c808aad 4380s390_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
52609473
HP
4381{
4382 if (! INSN_P (insn))
4383 return priority;
4384
4385 if (s390_tune != PROCESSOR_2084_Z990)
4386 return priority;
4387
4388 switch (s390_safe_attr_type (insn))
4389 {
4390 case TYPE_FSTORED:
4391 case TYPE_FSTORES:
4392 priority = priority << 3;
4393 break;
4394 case TYPE_STORE:
ea77e738 4395 case TYPE_STM:
52609473
HP
4396 priority = priority << 1;
4397 break;
4398 default:
4399 break;
4400 }
4401 return priority;
4402}
f2d3c02a 4403
077dab3b 4404/* The number of instructions that can be issued per cycle. */
f2d3c02a 4405
077dab3b 4406static int
9c808aad 4407s390_issue_rate (void)
077dab3b 4408{
c7453384 4409 if (s390_tune == PROCESSOR_2084_Z990)
52609473 4410 return 3;
077dab3b
HP
4411 return 1;
4412}
f2d3c02a 4413
52609473 4414static int
9c808aad 4415s390_first_cycle_multipass_dfa_lookahead (void)
52609473 4416{
64e1e4c4 4417 return 4;
52609473
HP
4418}
4419
52609473 4420
c7453384 4421/* Split all branches that exceed the maximum distance.
545d16ff 4422 Returns true if this created a new literal pool entry. */
9db1d521 4423
c7453384 4424static int
545d16ff 4425s390_split_branches (void)
9db1d521 4426{
545d16ff 4427 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
aee4e0db 4428 int new_literal = 0;
0a3bdf9d
UW
4429 rtx insn, pat, tmp, target;
4430 rtx *label;
9db1d521 4431
c3cc6b78
UW
4432 /* We need correct insn addresses. */
4433
4434 shorten_branches (get_insns ());
4435
13e58269 4436 /* Find all branches that exceed 64KB, and split them. */
9db1d521 4437
13e58269
UW
4438 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4439 {
4440 if (GET_CODE (insn) != JUMP_INSN)
4441 continue;
9db1d521 4442
13e58269 4443 pat = PATTERN (insn);
0a3bdf9d
UW
4444 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
4445 pat = XVECEXP (pat, 0, 0);
4446 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
13e58269 4447 continue;
9db1d521 4448
c7453384 4449 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
9db1d521 4450 {
0a3bdf9d 4451 label = &SET_SRC (pat);
c7453384
EC
4452 }
4453 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
9db1d521 4454 {
c7453384 4455 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
0a3bdf9d 4456 label = &XEXP (SET_SRC (pat), 1);
c7453384 4457 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
0a3bdf9d 4458 label = &XEXP (SET_SRC (pat), 2);
13e58269
UW
4459 else
4460 continue;
4461 }
4462 else
4463 continue;
4464
545d16ff 4465 if (get_attr_length (insn) <= 4)
13e58269
UW
4466 continue;
4467
545d16ff
UW
4468 /* We are going to use the return register as scratch register,
4469 make sure it will be saved/restored by the prologue/epilogue. */
adf39f8f 4470 cfun_frame_layout.save_return_addr_p = 1;
c3cc6b78 4471
545d16ff 4472 if (!flag_pic)
9db1d521 4473 {
aee4e0db 4474 new_literal = 1;
0a3bdf9d
UW
4475 tmp = force_const_mem (Pmode, *label);
4476 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
4477 INSN_ADDRESSES_NEW (tmp, -1);
585539a1 4478 annotate_constant_pool_refs (&PATTERN (tmp));
13e58269 4479
0a3bdf9d
UW
4480 target = temp_reg;
4481 }
4482 else
13e58269 4483 {
aee4e0db 4484 new_literal = 1;
c7453384 4485 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, *label),
fd7643fb
UW
4486 UNSPEC_LTREL_OFFSET);
4487 target = gen_rtx_CONST (Pmode, target);
4488 target = force_const_mem (Pmode, target);
4489 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, target), insn);
0a3bdf9d 4490 INSN_ADDRESSES_NEW (tmp, -1);
585539a1 4491 annotate_constant_pool_refs (&PATTERN (tmp));
0a3bdf9d 4492
585539a1
UW
4493 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XEXP (target, 0),
4494 cfun->machine->base_reg),
fd7643fb
UW
4495 UNSPEC_LTREL_BASE);
4496 target = gen_rtx_PLUS (Pmode, temp_reg, target);
13e58269
UW
4497 }
4498
0a3bdf9d
UW
4499 if (!validate_change (insn, label, target, 0))
4500 abort ();
9db1d521 4501 }
aee4e0db
UW
4502
4503 return new_literal;
9db1d521
HP
4504}
4505
585539a1
UW
4506/* Annotate every literal pool reference in X by an UNSPEC_LTREF expression.
4507 Fix up MEMs as required. */
4508
4509static void
4510annotate_constant_pool_refs (rtx *x)
4511{
4512 int i, j;
4513 const char *fmt;
4514
4515 if (GET_CODE (*x) == SYMBOL_REF
4516 && CONSTANT_POOL_ADDRESS_P (*x))
4517 abort ();
4518
4519 /* Literal pool references can only occur inside a MEM ... */
4520 if (GET_CODE (*x) == MEM)
4521 {
4522 rtx memref = XEXP (*x, 0);
4523
4524 if (GET_CODE (memref) == SYMBOL_REF
4525 && CONSTANT_POOL_ADDRESS_P (memref))
4526 {
4527 rtx base = cfun->machine->base_reg;
4528 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, memref, base),
4529 UNSPEC_LTREF);
4530
4531 *x = replace_equiv_address (*x, addr);
4532 return;
4533 }
4534
4535 if (GET_CODE (memref) == CONST
4536 && GET_CODE (XEXP (memref, 0)) == PLUS
4537 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
4538 && GET_CODE (XEXP (XEXP (memref, 0), 0)) == SYMBOL_REF
4539 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (memref, 0), 0)))
4540 {
4541 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
4542 rtx sym = XEXP (XEXP (memref, 0), 0);
4543 rtx base = cfun->machine->base_reg;
4544 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
4545 UNSPEC_LTREF);
4546
4547 *x = replace_equiv_address (*x, plus_constant (addr, off));
4548 return;
4549 }
4550 }
4551
4552 /* ... or a load-address type pattern. */
4553 if (GET_CODE (*x) == SET)
4554 {
4555 rtx addrref = SET_SRC (*x);
4556
4557 if (GET_CODE (addrref) == SYMBOL_REF
4558 && CONSTANT_POOL_ADDRESS_P (addrref))
4559 {
4560 rtx base = cfun->machine->base_reg;
4561 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, addrref, base),
4562 UNSPEC_LTREF);
4563
4564 SET_SRC (*x) = addr;
4565 return;
4566 }
4567
4568 if (GET_CODE (addrref) == CONST
4569 && GET_CODE (XEXP (addrref, 0)) == PLUS
4570 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
4571 && GET_CODE (XEXP (XEXP (addrref, 0), 0)) == SYMBOL_REF
4572 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (addrref, 0), 0)))
4573 {
4574 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
4575 rtx sym = XEXP (XEXP (addrref, 0), 0);
4576 rtx base = cfun->machine->base_reg;
4577 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
4578 UNSPEC_LTREF);
4579
4580 SET_SRC (*x) = plus_constant (addr, off);
4581 return;
4582 }
4583 }
4584
4585 /* Annotate LTREL_BASE as well. */
4586 if (GET_CODE (*x) == UNSPEC
4587 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
4588 {
4589 rtx base = cfun->machine->base_reg;
4590 *x = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XVECEXP (*x, 0, 0), base),
4591 UNSPEC_LTREL_BASE);
4592 return;
4593 }
4594
4595 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4596 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4597 {
4598 if (fmt[i] == 'e')
4599 {
4600 annotate_constant_pool_refs (&XEXP (*x, i));
4601 }
4602 else if (fmt[i] == 'E')
4603 {
4604 for (j = 0; j < XVECLEN (*x, i); j++)
4605 annotate_constant_pool_refs (&XVECEXP (*x, i, j));
4606 }
4607 }
4608}
4609
b2ccb744 4610
585539a1
UW
4611/* Find an annotated literal pool symbol referenced in RTX X,
4612 and store it at REF. Will abort if X contains references to
4613 more than one such pool symbol; multiple references to the same
4614 symbol are allowed, however.
b2ccb744 4615
c7453384 4616 The rtx pointed to by REF must be initialized to NULL_RTX
b2ccb744
UW
4617 by the caller before calling this routine. */
4618
4619static void
9c808aad 4620find_constant_pool_ref (rtx x, rtx *ref)
b2ccb744
UW
4621{
4622 int i, j;
4623 const char *fmt;
4624
fd7643fb
UW
4625 /* Ignore LTREL_BASE references. */
4626 if (GET_CODE (x) == UNSPEC
4627 && XINT (x, 1) == UNSPEC_LTREL_BASE)
4628 return;
5af2f3d3
UW
4629 /* Likewise POOL_ENTRY insns. */
4630 if (GET_CODE (x) == UNSPEC_VOLATILE
4631 && XINT (x, 1) == UNSPECV_POOL_ENTRY)
4632 return;
fd7643fb 4633
b2ccb744
UW
4634 if (GET_CODE (x) == SYMBOL_REF
4635 && CONSTANT_POOL_ADDRESS_P (x))
585539a1
UW
4636 abort ();
4637
4638 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_LTREF)
b2ccb744 4639 {
585539a1
UW
4640 rtx sym = XVECEXP (x, 0, 0);
4641 if (GET_CODE (sym) != SYMBOL_REF
4642 || !CONSTANT_POOL_ADDRESS_P (sym))
4643 abort ();
4644
b2ccb744 4645 if (*ref == NULL_RTX)
585539a1
UW
4646 *ref = sym;
4647 else if (*ref != sym)
4648 abort ();
4649
4650 return;
b2ccb744
UW
4651 }
4652
4653 fmt = GET_RTX_FORMAT (GET_CODE (x));
4654 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4655 {
4656 if (fmt[i] == 'e')
4657 {
4658 find_constant_pool_ref (XEXP (x, i), ref);
4659 }
4660 else if (fmt[i] == 'E')
4661 {
4662 for (j = 0; j < XVECLEN (x, i); j++)
4663 find_constant_pool_ref (XVECEXP (x, i, j), ref);
4664 }
4665 }
4666}
4667
585539a1
UW
4668/* Replace every reference to the annotated literal pool
4669 symbol REF in X by its base plus OFFSET. */
b2ccb744
UW
4670
4671static void
585539a1 4672replace_constant_pool_ref (rtx *x, rtx ref, rtx offset)
b2ccb744
UW
4673{
4674 int i, j;
4675 const char *fmt;
4676
4677 if (*x == ref)
4678 abort ();
4679
585539a1
UW
4680 if (GET_CODE (*x) == UNSPEC
4681 && XINT (*x, 1) == UNSPEC_LTREF
4682 && XVECEXP (*x, 0, 0) == ref)
b2ccb744 4683 {
585539a1
UW
4684 *x = gen_rtx_PLUS (Pmode, XVECEXP (*x, 0, 1), offset);
4685 return;
b2ccb744
UW
4686 }
4687
585539a1
UW
4688 if (GET_CODE (*x) == PLUS
4689 && GET_CODE (XEXP (*x, 1)) == CONST_INT
4690 && GET_CODE (XEXP (*x, 0)) == UNSPEC
4691 && XINT (XEXP (*x, 0), 1) == UNSPEC_LTREF
4692 && XVECEXP (XEXP (*x, 0), 0, 0) == ref)
b2ccb744 4693 {
585539a1
UW
4694 rtx addr = gen_rtx_PLUS (Pmode, XVECEXP (XEXP (*x, 0), 0, 1), offset);
4695 *x = plus_constant (addr, INTVAL (XEXP (*x, 1)));
4696 return;
b2ccb744
UW
4697 }
4698
4699 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4700 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4701 {
4702 if (fmt[i] == 'e')
4703 {
585539a1 4704 replace_constant_pool_ref (&XEXP (*x, i), ref, offset);
b2ccb744
UW
4705 }
4706 else if (fmt[i] == 'E')
4707 {
4708 for (j = 0; j < XVECLEN (*x, i); j++)
585539a1 4709 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, offset);
b2ccb744
UW
4710 }
4711 }
4712}
4713
c7453384 4714/* Check whether X contains an UNSPEC_LTREL_BASE.
fd7643fb 4715 Return its constant pool symbol if found, NULL_RTX otherwise. */
aee4e0db 4716
fd7643fb 4717static rtx
9c808aad 4718find_ltrel_base (rtx x)
aee4e0db 4719{
aee4e0db
UW
4720 int i, j;
4721 const char *fmt;
4722
fd7643fb
UW
4723 if (GET_CODE (x) == UNSPEC
4724 && XINT (x, 1) == UNSPEC_LTREL_BASE)
4725 return XVECEXP (x, 0, 0);
aee4e0db
UW
4726
4727 fmt = GET_RTX_FORMAT (GET_CODE (x));
4728 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4729 {
4730 if (fmt[i] == 'e')
4731 {
fd7643fb
UW
4732 rtx fnd = find_ltrel_base (XEXP (x, i));
4733 if (fnd)
4734 return fnd;
aee4e0db
UW
4735 }
4736 else if (fmt[i] == 'E')
4737 {
4738 for (j = 0; j < XVECLEN (x, i); j++)
fd7643fb
UW
4739 {
4740 rtx fnd = find_ltrel_base (XVECEXP (x, i, j));
4741 if (fnd)
4742 return fnd;
4743 }
aee4e0db
UW
4744 }
4745 }
4746
fd7643fb 4747 return NULL_RTX;
aee4e0db
UW
4748}
4749
585539a1 4750/* Replace any occurrence of UNSPEC_LTREL_BASE in X with its base. */
aee4e0db
UW
4751
4752static void
585539a1 4753replace_ltrel_base (rtx *x)
aee4e0db 4754{
fd7643fb 4755 int i, j;
aee4e0db
UW
4756 const char *fmt;
4757
fd7643fb
UW
4758 if (GET_CODE (*x) == UNSPEC
4759 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
aee4e0db 4760 {
585539a1 4761 *x = XVECEXP (*x, 0, 1);
fd7643fb 4762 return;
aee4e0db
UW
4763 }
4764
4765 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4766 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4767 {
4768 if (fmt[i] == 'e')
4769 {
585539a1 4770 replace_ltrel_base (&XEXP (*x, i));
aee4e0db
UW
4771 }
4772 else if (fmt[i] == 'E')
4773 {
4774 for (j = 0; j < XVECLEN (*x, i); j++)
585539a1 4775 replace_ltrel_base (&XVECEXP (*x, i, j));
aee4e0db
UW
4776 }
4777 }
4778}
4779
4780
fd7643fb 4781/* We keep a list of constants which we have to add to internal
b2ccb744
UW
4782 constant tables in the middle of large functions. */
4783
fd7643fb 4784#define NR_C_MODES 7
c7453384 4785enum machine_mode constant_modes[NR_C_MODES] =
b2ccb744 4786{
fd7643fb 4787 TImode,
b2ccb744
UW
4788 DFmode, DImode,
4789 SFmode, SImode,
4790 HImode,
4791 QImode
4792};
4793
b2ccb744
UW
4794struct constant
4795{
4796 struct constant *next;
4797 rtx value;
4798 rtx label;
4799};
4800
4801struct constant_pool
4802{
4803 struct constant_pool *next;
4804 rtx first_insn;
aee4e0db
UW
4805 rtx pool_insn;
4806 bitmap insns;
b2ccb744
UW
4807
4808 struct constant *constants[NR_C_MODES];
9bb86f41 4809 struct constant *execute;
b2ccb744
UW
4810 rtx label;
4811 int size;
4812};
4813
5af2f3d3 4814static struct constant_pool * s390_mainpool_start (void);
585539a1 4815static void s390_mainpool_finish (struct constant_pool *);
5af2f3d3
UW
4816static void s390_mainpool_cancel (struct constant_pool *);
4817
585539a1
UW
4818static struct constant_pool * s390_chunkify_start (void);
4819static void s390_chunkify_finish (struct constant_pool *);
9c808aad 4820static void s390_chunkify_cancel (struct constant_pool *);
aee4e0db 4821
9c808aad
AJ
4822static struct constant_pool *s390_start_pool (struct constant_pool **, rtx);
4823static void s390_end_pool (struct constant_pool *, rtx);
4824static void s390_add_pool_insn (struct constant_pool *, rtx);
4825static struct constant_pool *s390_find_pool (struct constant_pool *, rtx);
4826static void s390_add_constant (struct constant_pool *, rtx, enum machine_mode);
4827static rtx s390_find_constant (struct constant_pool *, rtx, enum machine_mode);
9bb86f41
UW
4828static void s390_add_execute (struct constant_pool *, rtx);
4829static rtx s390_find_execute (struct constant_pool *, rtx);
4830static rtx s390_execute_label (rtx);
4831static rtx s390_execute_target (rtx);
4832static void s390_dump_pool (struct constant_pool *, bool);
4833static void s390_dump_execute (struct constant_pool *);
5af2f3d3 4834static struct constant_pool *s390_alloc_pool (void);
9c808aad 4835static void s390_free_pool (struct constant_pool *);
b2ccb744
UW
4836
4837/* Create new constant pool covering instructions starting at INSN
4838 and chain it to the end of POOL_LIST. */
4839
4840static struct constant_pool *
9c808aad 4841s390_start_pool (struct constant_pool **pool_list, rtx insn)
b2ccb744
UW
4842{
4843 struct constant_pool *pool, **prev;
b2ccb744 4844
5af2f3d3 4845 pool = s390_alloc_pool ();
b2ccb744 4846 pool->first_insn = insn;
aee4e0db 4847
b2ccb744
UW
4848 for (prev = pool_list; *prev; prev = &(*prev)->next)
4849 ;
4850 *prev = pool;
4851
4852 return pool;
4853}
4854
aee4e0db
UW
4855/* End range of instructions covered by POOL at INSN and emit
4856 placeholder insn representing the pool. */
b2ccb744
UW
4857
4858static void
9c808aad 4859s390_end_pool (struct constant_pool *pool, rtx insn)
b2ccb744 4860{
aee4e0db
UW
4861 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
4862
4863 if (!insn)
4864 insn = get_last_insn ();
4865
4866 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
4867 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
4868}
4869
4870/* Add INSN to the list of insns covered by POOL. */
4871
4872static void
9c808aad 4873s390_add_pool_insn (struct constant_pool *pool, rtx insn)
aee4e0db
UW
4874{
4875 bitmap_set_bit (pool->insns, INSN_UID (insn));
b2ccb744
UW
4876}
4877
4878/* Return pool out of POOL_LIST that covers INSN. */
4879
4880static struct constant_pool *
9c808aad 4881s390_find_pool (struct constant_pool *pool_list, rtx insn)
b2ccb744 4882{
b2ccb744
UW
4883 struct constant_pool *pool;
4884
b2ccb744 4885 for (pool = pool_list; pool; pool = pool->next)
aee4e0db 4886 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
b2ccb744
UW
4887 break;
4888
4889 return pool;
4890}
4891
aee4e0db 4892/* Add constant VAL of mode MODE to the constant pool POOL. */
b2ccb744 4893
aee4e0db 4894static void
9c808aad 4895s390_add_constant (struct constant_pool *pool, rtx val, enum machine_mode mode)
b2ccb744
UW
4896{
4897 struct constant *c;
b2ccb744
UW
4898 int i;
4899
4900 for (i = 0; i < NR_C_MODES; i++)
4901 if (constant_modes[i] == mode)
4902 break;
4903 if (i == NR_C_MODES)
4904 abort ();
4905
4906 for (c = pool->constants[i]; c != NULL; c = c->next)
4907 if (rtx_equal_p (val, c->value))
4908 break;
4909
4910 if (c == NULL)
4911 {
4912 c = (struct constant *) xmalloc (sizeof *c);
4913 c->value = val;
4914 c->label = gen_label_rtx ();
4915 c->next = pool->constants[i];
4916 pool->constants[i] = c;
4917 pool->size += GET_MODE_SIZE (mode);
4918 }
aee4e0db 4919}
b2ccb744 4920
aee4e0db
UW
4921/* Find constant VAL of mode MODE in the constant pool POOL.
4922 Return an RTX describing the distance from the start of
4923 the pool to the location of the new constant. */
c7453384 4924
aee4e0db 4925static rtx
9c808aad
AJ
4926s390_find_constant (struct constant_pool *pool, rtx val,
4927 enum machine_mode mode)
aee4e0db
UW
4928{
4929 struct constant *c;
4930 rtx offset;
4931 int i;
c7453384 4932
aee4e0db
UW
4933 for (i = 0; i < NR_C_MODES; i++)
4934 if (constant_modes[i] == mode)
4935 break;
4936 if (i == NR_C_MODES)
4937 abort ();
c7453384 4938
aee4e0db
UW
4939 for (c = pool->constants[i]; c != NULL; c = c->next)
4940 if (rtx_equal_p (val, c->value))
4941 break;
c7453384 4942
aee4e0db
UW
4943 if (c == NULL)
4944 abort ();
c7453384 4945
aee4e0db
UW
4946 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
4947 gen_rtx_LABEL_REF (Pmode, pool->label));
b2ccb744
UW
4948 offset = gen_rtx_CONST (Pmode, offset);
4949 return offset;
4950}
4951
9bb86f41
UW
4952/* Add execute target for INSN to the constant pool POOL. */
4953
4954static void
4955s390_add_execute (struct constant_pool *pool, rtx insn)
4956{
4957 struct constant *c;
4958
4959 for (c = pool->execute; c != NULL; c = c->next)
4960 if (INSN_UID (insn) == INSN_UID (c->value))
4961 break;
4962
4963 if (c == NULL)
4964 {
4965 rtx label = s390_execute_label (insn);
4966 gcc_assert (label);
4967
4968 c = (struct constant *) xmalloc (sizeof *c);
4969 c->value = insn;
4970 c->label = label == const0_rtx ? gen_label_rtx () : XEXP (label, 0);
4971 c->next = pool->execute;
4972 pool->execute = c;
4973 pool->size += label == const0_rtx ? 6 : 0;
4974 }
4975}
4976
4977/* Find execute target for INSN in the constant pool POOL.
4978 Return an RTX describing the distance from the start of
4979 the pool to the location of the execute target. */
4980
4981static rtx
4982s390_find_execute (struct constant_pool *pool, rtx insn)
4983{
4984 struct constant *c;
4985 rtx offset;
4986
4987 for (c = pool->execute; c != NULL; c = c->next)
4988 if (INSN_UID (insn) == INSN_UID (c->value))
4989 break;
4990
4991 if (c == NULL)
4992 abort ();
4993
4994 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
4995 gen_rtx_LABEL_REF (Pmode, pool->label));
4996 offset = gen_rtx_CONST (Pmode, offset);
4997 return offset;
4998}
4999
5000/* Check whether INSN is an execute. Return the label_ref to its
5001 execute target template if so, NULL_RTX otherwise. */
5002
5003static rtx
5004s390_execute_label (rtx insn)
5005{
5006 if (GET_CODE (insn) == INSN
5007 && GET_CODE (PATTERN (insn)) == PARALLEL
5008 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == UNSPEC
5009 && XINT (XVECEXP (PATTERN (insn), 0, 0), 1) == UNSPEC_EXECUTE)
5010 return XVECEXP (XVECEXP (PATTERN (insn), 0, 0), 0, 2);
5011
5012 return NULL_RTX;
5013}
5014
5015/* For an execute INSN, extract the execute target template. */
5016
5017static rtx
5018s390_execute_target (rtx insn)
5019{
5020 rtx pattern = PATTERN (insn);
5021 gcc_assert (s390_execute_label (insn));
5022
5023 if (XVECLEN (pattern, 0) == 2)
5024 {
5025 pattern = copy_rtx (XVECEXP (pattern, 0, 1));
5026 }
5027 else
5028 {
5029 rtvec vec = rtvec_alloc (XVECLEN (pattern, 0) - 1);
5030 int i;
5031
5032 for (i = 0; i < XVECLEN (pattern, 0) - 1; i++)
5033 RTVEC_ELT (vec, i) = copy_rtx (XVECEXP (pattern, 0, i + 1));
5034
5035 pattern = gen_rtx_PARALLEL (VOIDmode, vec);
5036 }
5037
5038 return pattern;
5039}
5040
5041/* Indicate that INSN cannot be duplicated. This is the case for
5042 execute insns that carry a unique label. */
5043
5044static bool
5045s390_cannot_copy_insn_p (rtx insn)
5046{
5047 rtx label = s390_execute_label (insn);
5048 return label && label != const0_rtx;
5049}
5050
5af2f3d3
UW
5051/* Dump out the constants in POOL. If REMOTE_LABEL is true,
5052 do not emit the pool base label. */
b2ccb744 5053
9bb86f41 5054static void
5af2f3d3 5055s390_dump_pool (struct constant_pool *pool, bool remote_label)
b2ccb744
UW
5056{
5057 struct constant *c;
9bb86f41 5058 rtx insn = pool->pool_insn;
b2ccb744
UW
5059 int i;
5060
9bb86f41
UW
5061 /* Switch to rodata section. */
5062 if (TARGET_CPU_ZARCH)
5063 {
5064 insn = emit_insn_after (gen_pool_section_start (), insn);
5065 INSN_ADDRESSES_NEW (insn, -1);
5066 }
5067
5068 /* Ensure minimum pool alignment. */
9e8327e3 5069 if (TARGET_CPU_ZARCH)
9bb86f41 5070 insn = emit_insn_after (gen_pool_align (GEN_INT (8)), insn);
b2ccb744 5071 else
9bb86f41 5072 insn = emit_insn_after (gen_pool_align (GEN_INT (4)), insn);
b2ccb744
UW
5073 INSN_ADDRESSES_NEW (insn, -1);
5074
9bb86f41 5075 /* Emit pool base label. */
5af2f3d3
UW
5076 if (!remote_label)
5077 {
5078 insn = emit_label_after (pool->label, insn);
5079 INSN_ADDRESSES_NEW (insn, -1);
5080 }
b2ccb744
UW
5081
5082 /* Dump constants in descending alignment requirement order,
5083 ensuring proper alignment for every constant. */
5084 for (i = 0; i < NR_C_MODES; i++)
5085 for (c = pool->constants[i]; c; c = c->next)
5086 {
fd7643fb 5087 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
aee4e0db
UW
5088 rtx value = c->value;
5089 if (GET_CODE (value) == CONST
5090 && GET_CODE (XEXP (value, 0)) == UNSPEC
fd7643fb 5091 && XINT (XEXP (value, 0), 1) == UNSPEC_LTREL_OFFSET
aee4e0db
UW
5092 && XVECLEN (XEXP (value, 0), 0) == 1)
5093 {
5094 value = gen_rtx_MINUS (Pmode, XVECEXP (XEXP (value, 0), 0, 0),
9c808aad 5095 gen_rtx_LABEL_REF (VOIDmode, pool->label));
aee4e0db
UW
5096 value = gen_rtx_CONST (VOIDmode, value);
5097 }
5098
b2ccb744
UW
5099 insn = emit_label_after (c->label, insn);
5100 INSN_ADDRESSES_NEW (insn, -1);
416cf582 5101
38899e29 5102 value = gen_rtx_UNSPEC_VOLATILE (constant_modes[i],
416cf582
UW
5103 gen_rtvec (1, value),
5104 UNSPECV_POOL_ENTRY);
5105 insn = emit_insn_after (value, insn);
b2ccb744
UW
5106 INSN_ADDRESSES_NEW (insn, -1);
5107 }
5108
9bb86f41
UW
5109 /* Ensure minimum alignment for instructions. */
5110 insn = emit_insn_after (gen_pool_align (GEN_INT (2)), insn);
b2ccb744
UW
5111 INSN_ADDRESSES_NEW (insn, -1);
5112
9bb86f41
UW
5113 /* Output in-pool execute template insns. */
5114 for (c = pool->execute; c; c = c->next)
5115 {
5116 if (s390_execute_label (c->value) != const0_rtx)
5117 continue;
5118
5119 insn = emit_label_after (c->label, insn);
5120 INSN_ADDRESSES_NEW (insn, -1);
5121
5122 insn = emit_insn_after (s390_execute_target (c->value), insn);
5123 INSN_ADDRESSES_NEW (insn, -1);
5124 }
5125
5126 /* Switch back to previous section. */
5127 if (TARGET_CPU_ZARCH)
5128 {
5129 insn = emit_insn_after (gen_pool_section_end (), insn);
5130 INSN_ADDRESSES_NEW (insn, -1);
5131 }
5132
b2ccb744
UW
5133 insn = emit_barrier_after (insn);
5134 INSN_ADDRESSES_NEW (insn, -1);
5135
aee4e0db
UW
5136 /* Remove placeholder insn. */
5137 remove_insn (pool->pool_insn);
5138
9bb86f41
UW
5139 /* Output out-of-pool execute template isns. */
5140 s390_dump_execute (pool);
5141}
5142
5143/* Dump out the out-of-pool execute template insns in POOL
5144 at the end of the instruction stream. */
5145
5146static void
5147s390_dump_execute (struct constant_pool *pool)
5148{
5149 struct constant *c;
5150 rtx insn;
5151
5152 for (c = pool->execute; c; c = c->next)
5153 {
5154 if (s390_execute_label (c->value) == const0_rtx)
5155 continue;
5156
5157 insn = emit_label (c->label);
5158 INSN_ADDRESSES_NEW (insn, -1);
5159
5160 insn = emit_insn (s390_execute_target (c->value));
5161 INSN_ADDRESSES_NEW (insn, -1);
5162 }
b2ccb744
UW
5163}
5164
5af2f3d3
UW
5165/* Allocate new constant_pool structure. */
5166
5167static struct constant_pool *
5168s390_alloc_pool (void)
5169{
5170 struct constant_pool *pool;
5171 int i;
5172
5173 pool = (struct constant_pool *) xmalloc (sizeof *pool);
5174 pool->next = NULL;
5175 for (i = 0; i < NR_C_MODES; i++)
5176 pool->constants[i] = NULL;
5177
9bb86f41 5178 pool->execute = NULL;
5af2f3d3
UW
5179 pool->label = gen_label_rtx ();
5180 pool->first_insn = NULL_RTX;
5181 pool->pool_insn = NULL_RTX;
5182 pool->insns = BITMAP_XMALLOC ();
5183 pool->size = 0;
5184
5185 return pool;
5186}
5187
b2ccb744
UW
5188/* Free all memory used by POOL. */
5189
5190static void
9c808aad 5191s390_free_pool (struct constant_pool *pool)
b2ccb744 5192{
9bb86f41 5193 struct constant *c, *next;
b2ccb744
UW
5194 int i;
5195
5196 for (i = 0; i < NR_C_MODES; i++)
9bb86f41
UW
5197 for (c = pool->constants[i]; c; c = next)
5198 {
5199 next = c->next;
5200 free (c);
5201 }
5202
5203 for (c = pool->execute; c; c = next)
b2ccb744 5204 {
9bb86f41
UW
5205 next = c->next;
5206 free (c);
b2ccb744
UW
5207 }
5208
aee4e0db 5209 BITMAP_XFREE (pool->insns);
b2ccb744 5210 free (pool);
c7453384 5211}
b2ccb744 5212
b2ccb744 5213
5af2f3d3
UW
5214/* Collect main literal pool. Return NULL on overflow. */
5215
5216static struct constant_pool *
5217s390_mainpool_start (void)
5218{
5219 struct constant_pool *pool;
5220 rtx insn;
5221
5222 pool = s390_alloc_pool ();
5223
5224 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5225 {
5226 if (GET_CODE (insn) == INSN
585539a1
UW
5227 && GET_CODE (PATTERN (insn)) == SET
5228 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC_VOLATILE
5229 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPECV_MAIN_POOL)
5af2f3d3
UW
5230 {
5231 if (pool->pool_insn)
5232 abort ();
5233 pool->pool_insn = insn;
5234 }
5235
9bb86f41
UW
5236 if (s390_execute_label (insn))
5237 {
5238 s390_add_execute (pool, insn);
5239 }
5240 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5af2f3d3
UW
5241 {
5242 rtx pool_ref = NULL_RTX;
5243 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5244 if (pool_ref)
5245 {
5246 rtx constant = get_pool_constant (pool_ref);
5247 enum machine_mode mode = get_pool_mode (pool_ref);
5248 s390_add_constant (pool, constant, mode);
5249 }
5250 }
5251 }
5252
91086990 5253 if (!pool->pool_insn && pool->size > 0)
5af2f3d3
UW
5254 abort ();
5255
5256 if (pool->size >= 4096)
5257 {
d76e8439
UW
5258 /* We're going to chunkify the pool, so remove the main
5259 pool placeholder insn. */
5260 remove_insn (pool->pool_insn);
5261
5af2f3d3
UW
5262 s390_free_pool (pool);
5263 pool = NULL;
5264 }
5265
5266 return pool;
5267}
5268
5269/* POOL holds the main literal pool as collected by s390_mainpool_start.
5270 Modify the current function to output the pool constants as well as
585539a1 5271 the pool register setup instruction. */
5af2f3d3
UW
5272
5273static void
585539a1 5274s390_mainpool_finish (struct constant_pool *pool)
5af2f3d3 5275{
91086990 5276 rtx base_reg = cfun->machine->base_reg;
5af2f3d3
UW
5277 rtx insn;
5278
5279 /* If the pool is empty, we're done. */
5280 if (pool->size == 0)
5281 {
9bb86f41
UW
5282 /* However, we may have out-of-pool execute templates. */
5283 s390_dump_execute (pool);
5284
91086990
UW
5285 /* We don't actually need a base register after all. */
5286 cfun->machine->base_reg = NULL_RTX;
5287
5288 if (pool->pool_insn)
5289 remove_insn (pool->pool_insn);
5af2f3d3
UW
5290 s390_free_pool (pool);
5291 return;
5292 }
5293
5294 /* We need correct insn addresses. */
5295 shorten_branches (get_insns ());
5296
9e8327e3 5297 /* On zSeries, we use a LARL to load the pool register. The pool is
5af2f3d3 5298 located in the .rodata section, so we emit it after the function. */
9e8327e3 5299 if (TARGET_CPU_ZARCH)
5af2f3d3
UW
5300 {
5301 insn = gen_main_base_64 (base_reg, pool->label);
5302 insn = emit_insn_after (insn, pool->pool_insn);
5303 INSN_ADDRESSES_NEW (insn, -1);
5304 remove_insn (pool->pool_insn);
38899e29
EC
5305
5306 insn = get_last_insn ();
5af2f3d3
UW
5307 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5308 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5309
5310 s390_dump_pool (pool, 0);
5311 }
5312
9e8327e3 5313 /* On S/390, if the total size of the function's code plus literal pool
5af2f3d3
UW
5314 does not exceed 4096 bytes, we use BASR to set up a function base
5315 pointer, and emit the literal pool at the end of the function. */
5316 else if (INSN_ADDRESSES (INSN_UID (get_last_insn ()))
5317 + pool->size + 8 /* alignment slop */ < 4096)
5318 {
5319 insn = gen_main_base_31_small (base_reg, pool->label);
5320 insn = emit_insn_after (insn, pool->pool_insn);
5321 INSN_ADDRESSES_NEW (insn, -1);
5322 remove_insn (pool->pool_insn);
5323
5324 insn = emit_label_after (pool->label, insn);
5325 INSN_ADDRESSES_NEW (insn, -1);
5326
5327 insn = get_last_insn ();
5328 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5329 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5330
5331 s390_dump_pool (pool, 1);
5332 }
5333
5334 /* Otherwise, we emit an inline literal pool and use BASR to branch
5335 over it, setting up the pool register at the same time. */
5336 else
5337 {
5338 rtx pool_end = gen_label_rtx ();
5339
5340 insn = gen_main_base_31_large (base_reg, pool->label, pool_end);
5341 insn = emit_insn_after (insn, pool->pool_insn);
5342 INSN_ADDRESSES_NEW (insn, -1);
5343 remove_insn (pool->pool_insn);
5344
5345 insn = emit_label_after (pool->label, insn);
5346 INSN_ADDRESSES_NEW (insn, -1);
5347
5348 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5349 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5350
5351 insn = emit_label_after (pool_end, pool->pool_insn);
5352 INSN_ADDRESSES_NEW (insn, -1);
5353
5354 s390_dump_pool (pool, 1);
5355 }
5356
5357
5358 /* Replace all literal pool references. */
5359
5360 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5361 {
5362 if (INSN_P (insn))
585539a1 5363 replace_ltrel_base (&PATTERN (insn));
5af2f3d3
UW
5364
5365 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5366 {
5367 rtx addr, pool_ref = NULL_RTX;
5368 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5369 if (pool_ref)
5370 {
9bb86f41
UW
5371 if (s390_execute_label (insn))
5372 addr = s390_find_execute (pool, insn);
5373 else
5374 addr = s390_find_constant (pool, get_pool_constant (pool_ref),
5375 get_pool_mode (pool_ref));
5376
5af2f3d3
UW
5377 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
5378 INSN_CODE (insn) = -1;
5379 }
5380 }
5381 }
5382
5383
5384 /* Free the pool. */
5385 s390_free_pool (pool);
5386}
5387
5388/* POOL holds the main literal pool as collected by s390_mainpool_start.
5389 We have decided we cannot use this pool, so revert all changes
5390 to the current function that were done by s390_mainpool_start. */
5391static void
5392s390_mainpool_cancel (struct constant_pool *pool)
5393{
5394 /* We didn't actually change the instruction stream, so simply
5395 free the pool memory. */
5396 s390_free_pool (pool);
5397}
5398
5399
585539a1 5400/* Chunkify the literal pool. */
9db1d521 5401
b2ccb744
UW
5402#define S390_POOL_CHUNK_MIN 0xc00
5403#define S390_POOL_CHUNK_MAX 0xe00
5404
c7453384 5405static struct constant_pool *
585539a1 5406s390_chunkify_start (void)
9db1d521 5407{
b2ccb744
UW
5408 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
5409 int extra_size = 0;
5410 bitmap far_labels;
fd7643fb 5411 rtx pending_ltrel = NULL_RTX;
13e58269 5412 rtx insn;
9db1d521 5413
9c808aad 5414 rtx (*gen_reload_base) (rtx, rtx) =
9e8327e3 5415 TARGET_CPU_ZARCH? gen_reload_base_64 : gen_reload_base_31;
aee4e0db
UW
5416
5417
c3cc6b78
UW
5418 /* We need correct insn addresses. */
5419
5420 shorten_branches (get_insns ());
5421
fd7643fb 5422 /* Scan all insns and move literals to pool chunks. */
13e58269 5423
13e58269 5424 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9db1d521 5425 {
fd7643fb
UW
5426 /* Check for pending LTREL_BASE. */
5427 if (INSN_P (insn))
5428 {
5429 rtx ltrel_base = find_ltrel_base (PATTERN (insn));
5430 if (ltrel_base)
5431 {
5432 if (ltrel_base == pending_ltrel)
5433 pending_ltrel = NULL_RTX;
5434 else
5435 abort ();
5436 }
5437 }
5438
9bb86f41
UW
5439 if (s390_execute_label (insn))
5440 {
5441 if (!curr_pool)
5442 curr_pool = s390_start_pool (&pool_list, insn);
5443
5444 s390_add_execute (curr_pool, insn);
5445 s390_add_pool_insn (curr_pool, insn);
5446 }
5447 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
b2ccb744 5448 {
aee4e0db 5449 rtx pool_ref = NULL_RTX;
b2ccb744
UW
5450 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5451 if (pool_ref)
5452 {
fd7643fb
UW
5453 rtx constant = get_pool_constant (pool_ref);
5454 enum machine_mode mode = get_pool_mode (pool_ref);
5455
b2ccb744
UW
5456 if (!curr_pool)
5457 curr_pool = s390_start_pool (&pool_list, insn);
5458
fd7643fb 5459 s390_add_constant (curr_pool, constant, mode);
aee4e0db 5460 s390_add_pool_insn (curr_pool, insn);
aee4e0db 5461
fd7643fb
UW
5462 /* Don't split the pool chunk between a LTREL_OFFSET load
5463 and the corresponding LTREL_BASE. */
5464 if (GET_CODE (constant) == CONST
5465 && GET_CODE (XEXP (constant, 0)) == UNSPEC
5466 && XINT (XEXP (constant, 0), 1) == UNSPEC_LTREL_OFFSET)
5467 {
5468 if (pending_ltrel)
5469 abort ();
5470 pending_ltrel = pool_ref;
5471 }
b2ccb744
UW
5472 }
5473 }
5474
aee4e0db 5475 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
fd7643fb
UW
5476 {
5477 if (curr_pool)
5478 s390_add_pool_insn (curr_pool, insn);
5479 /* An LTREL_BASE must follow within the same basic block. */
5480 if (pending_ltrel)
5481 abort ();
5482 }
aee4e0db 5483
c7453384 5484 if (!curr_pool
b2ccb744
UW
5485 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
5486 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
9db1d521 5487 continue;
13e58269 5488
9e8327e3 5489 if (TARGET_CPU_ZARCH)
9db1d521 5490 {
b2ccb744
UW
5491 if (curr_pool->size < S390_POOL_CHUNK_MAX)
5492 continue;
13e58269 5493
aee4e0db 5494 s390_end_pool (curr_pool, NULL_RTX);
b2ccb744
UW
5495 curr_pool = NULL;
5496 }
5497 else
9db1d521 5498 {
b2ccb744 5499 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
9c808aad 5500 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
b2ccb744
UW
5501 + extra_size;
5502
5503 /* We will later have to insert base register reload insns.
5504 Those will have an effect on code size, which we need to
5505 consider here. This calculation makes rather pessimistic
5506 worst-case assumptions. */
aee4e0db 5507 if (GET_CODE (insn) == CODE_LABEL)
b2ccb744 5508 extra_size += 6;
b2ccb744
UW
5509
5510 if (chunk_size < S390_POOL_CHUNK_MIN
5511 && curr_pool->size < S390_POOL_CHUNK_MIN)
5512 continue;
5513
5514 /* Pool chunks can only be inserted after BARRIERs ... */
5515 if (GET_CODE (insn) == BARRIER)
5516 {
5517 s390_end_pool (curr_pool, insn);
5518 curr_pool = NULL;
5519 extra_size = 0;
5520 }
5521
5522 /* ... so if we don't find one in time, create one. */
5523 else if ((chunk_size > S390_POOL_CHUNK_MAX
aee4e0db 5524 || curr_pool->size > S390_POOL_CHUNK_MAX))
b2ccb744 5525 {
b2ccb744
UW
5526 rtx label, jump, barrier;
5527
aee4e0db
UW
5528 /* We can insert the barrier only after a 'real' insn. */
5529 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
5530 continue;
5531 if (get_attr_length (insn) == 0)
5532 continue;
5533
c7453384 5534 /* Don't separate LTREL_BASE from the corresponding
fd7643fb
UW
5535 LTREL_OFFSET load. */
5536 if (pending_ltrel)
aee4e0db
UW
5537 continue;
5538
9c808aad 5539 label = gen_label_rtx ();
b2ccb744
UW
5540 jump = emit_jump_insn_after (gen_jump (label), insn);
5541 barrier = emit_barrier_after (jump);
5542 insn = emit_label_after (label, barrier);
5543 JUMP_LABEL (jump) = label;
5544 LABEL_NUSES (label) = 1;
5545
aee4e0db
UW
5546 INSN_ADDRESSES_NEW (jump, -1);
5547 INSN_ADDRESSES_NEW (barrier, -1);
b2ccb744
UW
5548 INSN_ADDRESSES_NEW (insn, -1);
5549
5550 s390_end_pool (curr_pool, barrier);
5551 curr_pool = NULL;
5552 extra_size = 0;
5553 }
13e58269 5554 }
9db1d521 5555 }
ce50cae8 5556
aee4e0db
UW
5557 if (curr_pool)
5558 s390_end_pool (curr_pool, NULL_RTX);
fd7643fb
UW
5559 if (pending_ltrel)
5560 abort ();
13e58269 5561
b2ccb744 5562
c7453384 5563 /* Find all labels that are branched into
13e58269 5564 from an insn belonging to a different chunk. */
ce50cae8 5565
b2ccb744 5566 far_labels = BITMAP_XMALLOC ();
6bc627b3 5567
13e58269 5568 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9db1d521 5569 {
b2ccb744
UW
5570 /* Labels marked with LABEL_PRESERVE_P can be target
5571 of non-local jumps, so we have to mark them.
5572 The same holds for named labels.
5573
5574 Don't do that, however, if it is the label before
5575 a jump table. */
5576
c7453384 5577 if (GET_CODE (insn) == CODE_LABEL
b2ccb744
UW
5578 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
5579 {
5580 rtx vec_insn = next_real_insn (insn);
c7453384 5581 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
b2ccb744
UW
5582 PATTERN (vec_insn) : NULL_RTX;
5583 if (!vec_pat
5584 || !(GET_CODE (vec_pat) == ADDR_VEC
5585 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
5586 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
5587 }
5588
5589 /* If we have a direct jump (conditional or unconditional)
5590 or a casesi jump, check all potential targets. */
c7453384 5591 else if (GET_CODE (insn) == JUMP_INSN)
13e58269
UW
5592 {
5593 rtx pat = PATTERN (insn);
0a3bdf9d
UW
5594 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
5595 pat = XVECEXP (pat, 0, 0);
5596
c7453384 5597 if (GET_CODE (pat) == SET)
13e58269 5598 {
aee4e0db 5599 rtx label = JUMP_LABEL (insn);
13e58269
UW
5600 if (label)
5601 {
c7453384 5602 if (s390_find_pool (pool_list, label)
b2ccb744
UW
5603 != s390_find_pool (pool_list, insn))
5604 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
13e58269 5605 }
c7453384 5606 }
b2ccb744
UW
5607 else if (GET_CODE (pat) == PARALLEL
5608 && XVECLEN (pat, 0) == 2
5609 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
5610 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
5611 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
5612 {
5613 /* Find the jump table used by this casesi jump. */
5614 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
5615 rtx vec_insn = next_real_insn (vec_label);
c7453384 5616 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
b2ccb744
UW
5617 PATTERN (vec_insn) : NULL_RTX;
5618 if (vec_pat
5619 && (GET_CODE (vec_pat) == ADDR_VEC
5620 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
5621 {
5622 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
13e58269 5623
b2ccb744
UW
5624 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
5625 {
5626 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
13e58269 5627
c7453384 5628 if (s390_find_pool (pool_list, label)
b2ccb744
UW
5629 != s390_find_pool (pool_list, insn))
5630 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
5631 }
5632 }
5633 }
13e58269 5634 }
9db1d521 5635 }
ce50cae8 5636
b2ccb744
UW
5637 /* Insert base register reload insns before every pool. */
5638
5639 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
aee4e0db 5640 {
585539a1
UW
5641 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
5642 curr_pool->label);
aee4e0db
UW
5643 rtx insn = curr_pool->first_insn;
5644 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
5645 }
b2ccb744
UW
5646
5647 /* Insert base register reload insns at every far label. */
13e58269 5648
13e58269 5649 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
c7453384 5650 if (GET_CODE (insn) == CODE_LABEL
b2ccb744
UW
5651 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
5652 {
5653 struct constant_pool *pool = s390_find_pool (pool_list, insn);
5654 if (pool)
5655 {
585539a1
UW
5656 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
5657 pool->label);
aee4e0db 5658 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
b2ccb744
UW
5659 }
5660 }
5661
aee4e0db
UW
5662
5663 BITMAP_XFREE (far_labels);
13e58269 5664
13e58269
UW
5665
5666 /* Recompute insn addresses. */
5667
5668 init_insn_lengths ();
5669 shorten_branches (get_insns ());
9db1d521 5670
aee4e0db
UW
5671 return pool_list;
5672}
9db1d521 5673
aee4e0db 5674/* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
c7453384 5675 After we have decided to use this list, finish implementing
585539a1 5676 all changes to the current function as required. */
c7453384 5677
aee4e0db 5678static void
585539a1 5679s390_chunkify_finish (struct constant_pool *pool_list)
aee4e0db 5680{
aee4e0db
UW
5681 struct constant_pool *curr_pool = NULL;
5682 rtx insn;
c7453384
EC
5683
5684
aee4e0db
UW
5685 /* Replace all literal pool references. */
5686
c7453384 5687 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
aee4e0db 5688 {
fd7643fb 5689 if (INSN_P (insn))
585539a1 5690 replace_ltrel_base (&PATTERN (insn));
fd7643fb 5691
aee4e0db
UW
5692 curr_pool = s390_find_pool (pool_list, insn);
5693 if (!curr_pool)
5694 continue;
5695
5696 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5697 {
5698 rtx addr, pool_ref = NULL_RTX;
5699 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5700 if (pool_ref)
5701 {
9bb86f41
UW
5702 if (s390_execute_label (insn))
5703 addr = s390_find_execute (curr_pool, insn);
5704 else
5705 addr = s390_find_constant (curr_pool,
5706 get_pool_constant (pool_ref),
5707 get_pool_mode (pool_ref));
5708
aee4e0db
UW
5709 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
5710 INSN_CODE (insn) = -1;
5711 }
aee4e0db
UW
5712 }
5713 }
5714
5715 /* Dump out all literal pools. */
c7453384 5716
aee4e0db 5717 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
5af2f3d3 5718 s390_dump_pool (curr_pool, 0);
c7453384 5719
aee4e0db
UW
5720 /* Free pool list. */
5721
5722 while (pool_list)
5723 {
5724 struct constant_pool *next = pool_list->next;
5725 s390_free_pool (pool_list);
5726 pool_list = next;
5727 }
5728}
5729
5730/* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
5731 We have decided we cannot use this list, so revert all changes
5732 to the current function that were done by s390_chunkify_start. */
c7453384 5733
aee4e0db 5734static void
9c808aad 5735s390_chunkify_cancel (struct constant_pool *pool_list)
aee4e0db
UW
5736{
5737 struct constant_pool *curr_pool = NULL;
5738 rtx insn;
5739
5740 /* Remove all pool placeholder insns. */
5741
5742 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
5743 {
5744 /* Did we insert an extra barrier? Remove it. */
5745 rtx barrier = PREV_INSN (curr_pool->pool_insn);
5746 rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
5747 rtx label = NEXT_INSN (curr_pool->pool_insn);
5748
5749 if (jump && GET_CODE (jump) == JUMP_INSN
5750 && barrier && GET_CODE (barrier) == BARRIER
5751 && label && GET_CODE (label) == CODE_LABEL
5752 && GET_CODE (PATTERN (jump)) == SET
5753 && SET_DEST (PATTERN (jump)) == pc_rtx
5754 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
5755 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
5756 {
5757 remove_insn (jump);
5758 remove_insn (barrier);
5759 remove_insn (label);
b2ccb744 5760 }
9db1d521 5761
aee4e0db
UW
5762 remove_insn (curr_pool->pool_insn);
5763 }
5764
fd7643fb 5765 /* Remove all base register reload insns. */
aee4e0db
UW
5766
5767 for (insn = get_insns (); insn; )
5768 {
5769 rtx next_insn = NEXT_INSN (insn);
5770
5771 if (GET_CODE (insn) == INSN
5772 && GET_CODE (PATTERN (insn)) == SET
5773 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
fd7643fb 5774 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
aee4e0db 5775 remove_insn (insn);
9db1d521 5776
aee4e0db
UW
5777 insn = next_insn;
5778 }
5779
5780 /* Free pool list. */
9db1d521 5781
b2ccb744 5782 while (pool_list)
9db1d521 5783 {
b2ccb744
UW
5784 struct constant_pool *next = pool_list->next;
5785 s390_free_pool (pool_list);
5786 pool_list = next;
9db1d521 5787 }
9db1d521
HP
5788}
5789
b2ccb744 5790
faeb9bb6 5791/* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
416cf582
UW
5792
5793void
faeb9bb6 5794s390_output_pool_entry (rtx exp, enum machine_mode mode, unsigned int align)
416cf582
UW
5795{
5796 REAL_VALUE_TYPE r;
5797
5798 switch (GET_MODE_CLASS (mode))
5799 {
5800 case MODE_FLOAT:
5801 if (GET_CODE (exp) != CONST_DOUBLE)
5802 abort ();
5803
5804 REAL_VALUE_FROM_CONST_DOUBLE (r, exp);
5805 assemble_real (r, mode, align);
5806 break;
5807
5808 case MODE_INT:
faeb9bb6 5809 assemble_integer (exp, GET_MODE_SIZE (mode), align, 1);
416cf582
UW
5810 break;
5811
5812 default:
5813 abort ();
5814 }
5815}
5816
5817
adf39f8f 5818/* Rework the prologue/epilogue to avoid saving/restoring
91086990 5819 registers unnecessarily. */
b2ccb744 5820
c3cc6b78 5821static void
91086990 5822s390_optimize_prologue (void)
b2ccb744 5823{
c3cc6b78
UW
5824 rtx insn, new_insn, next_insn;
5825
b767fc11
UW
5826 /* Do a final recompute of the frame-related data. */
5827
91086990 5828 s390_update_frame_layout ();
aee4e0db 5829
c3cc6b78
UW
5830 /* If all special registers are in fact used, there's nothing we
5831 can do, so no point in walking the insn list. */
c3cc6b78 5832
adf39f8f
AK
5833 if (cfun_frame_layout.first_save_gpr <= BASE_REGNUM
5834 && cfun_frame_layout.last_save_gpr >= BASE_REGNUM
b767fc11 5835 && (TARGET_CPU_ZARCH
adf39f8f
AK
5836 || (cfun_frame_layout.first_save_gpr <= RETURN_REGNUM
5837 && cfun_frame_layout.last_save_gpr >= RETURN_REGNUM)))
b767fc11 5838 return;
c3cc6b78 5839
adf39f8f 5840 /* Search for prologue/epilogue insns and replace them. */
c3cc6b78
UW
5841
5842 for (insn = get_insns (); insn; insn = next_insn)
5843 {
5844 int first, last, off;
5845 rtx set, base, offset;
5846
5847 next_insn = NEXT_INSN (insn);
5848
5849 if (GET_CODE (insn) != INSN)
5850 continue;
c3cc6b78 5851
545d16ff
UW
5852 if (GET_CODE (PATTERN (insn)) == PARALLEL
5853 && store_multiple_operation (PATTERN (insn), VOIDmode))
c3cc6b78
UW
5854 {
5855 set = XVECEXP (PATTERN (insn), 0, 0);
5856 first = REGNO (SET_SRC (set));
5857 last = first + XVECLEN (PATTERN (insn), 0) - 1;
5858 offset = const0_rtx;
5859 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
adf39f8f 5860 off = INTVAL (offset);
c3cc6b78
UW
5861
5862 if (GET_CODE (base) != REG || off < 0)
5863 continue;
adf39f8f
AK
5864 if (REGNO (base) != STACK_POINTER_REGNUM
5865 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
5866 continue;
490ceeb4 5867 if (first > BASE_REGNUM || last < BASE_REGNUM)
c3cc6b78 5868 continue;
545d16ff 5869
adf39f8f 5870 if (cfun_frame_layout.first_save_gpr != -1)
545d16ff 5871 {
adf39f8f
AK
5872 new_insn = save_gprs (base,
5873 off + (cfun_frame_layout.first_save_gpr
5874 - first) * UNITS_PER_WORD,
5875 cfun_frame_layout.first_save_gpr,
5876 cfun_frame_layout.last_save_gpr);
545d16ff
UW
5877 new_insn = emit_insn_before (new_insn, insn);
5878 INSN_ADDRESSES_NEW (new_insn, -1);
5879 }
5880
5881 remove_insn (insn);
5882 continue;
5883 }
5884
5885 if (GET_CODE (PATTERN (insn)) == SET
5886 && GET_CODE (SET_SRC (PATTERN (insn))) == REG
91086990
UW
5887 && (REGNO (SET_SRC (PATTERN (insn))) == BASE_REGNUM
5888 || (!TARGET_CPU_ZARCH
5889 && REGNO (SET_SRC (PATTERN (insn))) == RETURN_REGNUM))
545d16ff
UW
5890 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
5891 {
5892 set = PATTERN (insn);
91086990 5893 first = REGNO (SET_SRC (set));
545d16ff
UW
5894 offset = const0_rtx;
5895 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
adf39f8f 5896 off = INTVAL (offset);
545d16ff
UW
5897
5898 if (GET_CODE (base) != REG || off < 0)
c3cc6b78 5899 continue;
adf39f8f
AK
5900 if (REGNO (base) != STACK_POINTER_REGNUM
5901 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
5902 continue;
5903 if (cfun_frame_layout.first_save_gpr != -1)
c3cc6b78 5904 {
adf39f8f
AK
5905 new_insn = save_gprs (base,
5906 off + (cfun_frame_layout.first_save_gpr
91086990 5907 - first) * UNITS_PER_WORD,
adf39f8f
AK
5908 cfun_frame_layout.first_save_gpr,
5909 cfun_frame_layout.last_save_gpr);
c3cc6b78
UW
5910 new_insn = emit_insn_before (new_insn, insn);
5911 INSN_ADDRESSES_NEW (new_insn, -1);
5912 }
5913
5914 remove_insn (insn);
545d16ff 5915 continue;
c3cc6b78
UW
5916 }
5917
545d16ff
UW
5918 if (GET_CODE (PATTERN (insn)) == PARALLEL
5919 && load_multiple_operation (PATTERN (insn), VOIDmode))
c3cc6b78
UW
5920 {
5921 set = XVECEXP (PATTERN (insn), 0, 0);
5922 first = REGNO (SET_DEST (set));
5923 last = first + XVECLEN (PATTERN (insn), 0) - 1;
5924 offset = const0_rtx;
5925 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
adf39f8f 5926 off = INTVAL (offset);
c3cc6b78
UW
5927
5928 if (GET_CODE (base) != REG || off < 0)
5929 continue;
adf39f8f
AK
5930 if (REGNO (base) != STACK_POINTER_REGNUM
5931 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
5932 continue;
490ceeb4 5933 if (first > BASE_REGNUM || last < BASE_REGNUM)
c3cc6b78 5934 continue;
545d16ff 5935
adf39f8f 5936 if (cfun_frame_layout.first_restore_gpr != -1)
545d16ff 5937 {
adf39f8f
AK
5938 new_insn = restore_gprs (base,
5939 off + (cfun_frame_layout.first_restore_gpr
5940 - first) * UNITS_PER_WORD,
5941 cfun_frame_layout.first_restore_gpr,
5942 cfun_frame_layout.last_restore_gpr);
545d16ff
UW
5943 new_insn = emit_insn_before (new_insn, insn);
5944 INSN_ADDRESSES_NEW (new_insn, -1);
5945 }
5946
5947 remove_insn (insn);
5948 continue;
5949 }
5950
5951 if (GET_CODE (PATTERN (insn)) == SET
5952 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
91086990
UW
5953 && (REGNO (SET_DEST (PATTERN (insn))) == BASE_REGNUM
5954 || (!TARGET_CPU_ZARCH
5955 && REGNO (SET_DEST (PATTERN (insn))) == RETURN_REGNUM))
545d16ff
UW
5956 && GET_CODE (SET_SRC (PATTERN (insn))) == MEM)
5957 {
5958 set = PATTERN (insn);
91086990 5959 first = REGNO (SET_DEST (set));
545d16ff
UW
5960 offset = const0_rtx;
5961 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
adf39f8f 5962 off = INTVAL (offset);
545d16ff
UW
5963
5964 if (GET_CODE (base) != REG || off < 0)
c3cc6b78 5965 continue;
adf39f8f
AK
5966 if (REGNO (base) != STACK_POINTER_REGNUM
5967 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
5968 continue;
5969 if (cfun_frame_layout.first_restore_gpr != -1)
c3cc6b78 5970 {
adf39f8f
AK
5971 new_insn = restore_gprs (base,
5972 off + (cfun_frame_layout.first_restore_gpr
91086990 5973 - first) * UNITS_PER_WORD,
adf39f8f
AK
5974 cfun_frame_layout.first_restore_gpr,
5975 cfun_frame_layout.last_restore_gpr);
c3cc6b78
UW
5976 new_insn = emit_insn_before (new_insn, insn);
5977 INSN_ADDRESSES_NEW (new_insn, -1);
5978 }
5979
5980 remove_insn (insn);
545d16ff 5981 continue;
c3cc6b78
UW
5982 }
5983 }
5984}
5985
5986/* Perform machine-dependent processing. */
5987
18dbd950 5988static void
9c808aad 5989s390_reorg (void)
c3cc6b78 5990{
5af2f3d3 5991 bool pool_overflow = false;
c3cc6b78 5992
aee4e0db
UW
5993 /* Make sure all splits have been performed; splits after
5994 machine_dependent_reorg might confuse insn length counts. */
5995 split_all_insns_noflow ();
c3cc6b78 5996
c3cc6b78 5997
5af2f3d3
UW
5998 /* Install the main literal pool and the associated base
5999 register load insns.
6000
38899e29 6001 In addition, there are two problematic situations we need
5af2f3d3 6002 to correct:
c7453384 6003
aee4e0db
UW
6004 - the literal pool might be > 4096 bytes in size, so that
6005 some of its elements cannot be directly accessed
c7453384 6006
aee4e0db
UW
6007 - a branch target might be > 64K away from the branch, so that
6008 it is not possible to use a PC-relative instruction.
c7453384 6009
aee4e0db
UW
6010 To fix those, we split the single literal pool into multiple
6011 pool chunks, reloading the pool base register at various
6012 points throughout the function to ensure it always points to
6013 the pool chunk the following code expects, and / or replace
6014 PC-relative branches by absolute branches.
c7453384 6015
aee4e0db
UW
6016 However, the two problems are interdependent: splitting the
6017 literal pool can move a branch further away from its target,
6018 causing the 64K limit to overflow, and on the other hand,
6019 replacing a PC-relative branch by an absolute branch means
6020 we need to put the branch target address into the literal
6021 pool, possibly causing it to overflow.
c7453384 6022
aee4e0db
UW
6023 So, we loop trying to fix up both problems until we manage
6024 to satisfy both conditions at the same time. Note that the
6025 loop is guaranteed to terminate as every pass of the loop
6026 strictly decreases the total number of PC-relative branches
6027 in the function. (This is not completely true as there
6028 might be branch-over-pool insns introduced by chunkify_start.
6029 Those never need to be split however.) */
c7453384 6030
aee4e0db
UW
6031 for (;;)
6032 {
5af2f3d3
UW
6033 struct constant_pool *pool = NULL;
6034
6035 /* Collect the literal pool. */
6036 if (!pool_overflow)
6037 {
6038 pool = s390_mainpool_start ();
6039 if (!pool)
6040 pool_overflow = true;
6041 }
c7453384 6042
5af2f3d3
UW
6043 /* If literal pool overflowed, start to chunkify it. */
6044 if (pool_overflow)
585539a1 6045 pool = s390_chunkify_start ();
aee4e0db
UW
6046
6047 /* Split out-of-range branches. If this has created new
6048 literal pool entries, cancel current chunk list and
545d16ff
UW
6049 recompute it. zSeries machines have large branch
6050 instructions, so we never need to split a branch. */
6051 if (!TARGET_CPU_ZARCH && s390_split_branches ())
aee4e0db 6052 {
5af2f3d3
UW
6053 if (pool_overflow)
6054 s390_chunkify_cancel (pool);
6055 else
6056 s390_mainpool_cancel (pool);
c7453384 6057
aee4e0db
UW
6058 continue;
6059 }
6060
aee4e0db 6061 /* If we made it up to here, both conditions are satisfied.
5af2f3d3 6062 Finish up literal pool related changes. */
5af2f3d3 6063 if (pool_overflow)
585539a1 6064 s390_chunkify_finish (pool);
5af2f3d3 6065 else
585539a1 6066 s390_mainpool_finish (pool);
c7453384 6067
91086990
UW
6068 /* We're done splitting branches. */
6069 cfun->machine->split_branches_pending_p = false;
aee4e0db
UW
6070 break;
6071 }
c7453384 6072
91086990 6073 s390_optimize_prologue ();
9db1d521
HP
6074}
6075
c3cc6b78 6076
5d4d885c
UW
6077/* Return an RTL expression representing the value of the return address
6078 for the frame COUNT steps up from the current frame. FRAME is the
6079 frame pointer of that frame. */
6080
6081rtx
a38e09bc 6082s390_return_addr_rtx (int count, rtx frame ATTRIBUTE_UNUSED)
5d4d885c 6083{
adf39f8f 6084 int offset;
5d4d885c
UW
6085 rtx addr;
6086
590fcf48
UW
6087 /* Without backchain, we fail for all but the current frame. */
6088
adf39f8f 6089 if (!TARGET_BACKCHAIN && !TARGET_KERNEL_BACKCHAIN && count > 0)
590fcf48
UW
6090 return NULL_RTX;
6091
416cf582
UW
6092 /* For the current frame, we need to make sure the initial
6093 value of RETURN_REGNUM is actually saved. */
5d4d885c
UW
6094
6095 if (count == 0)
a38e09bc 6096 {
adf39f8f 6097 cfun_frame_layout.save_return_addr_p = true;
a38e09bc
AK
6098 return gen_rtx_MEM (Pmode, return_address_pointer_rtx);
6099 }
5d4d885c 6100
adf39f8f
AK
6101 if (TARGET_BACKCHAIN)
6102 offset = RETURN_REGNUM * UNITS_PER_WORD;
6103 else
6104 offset = -2 * UNITS_PER_WORD;
6105
6106 addr = plus_constant (frame, offset);
5d4d885c
UW
6107 addr = memory_address (Pmode, addr);
6108 return gen_rtx_MEM (Pmode, addr);
c7453384 6109}
5d4d885c 6110
ef44a6ff
UW
6111/* Return an RTL expression representing the back chain stored in
6112 the current stack frame. */
6113
6114rtx
6115s390_back_chain_rtx (void)
6116{
6117 rtx chain;
6118
6119 gcc_assert (TARGET_BACKCHAIN || TARGET_KERNEL_BACKCHAIN);
6120
6121 if (TARGET_BACKCHAIN)
6122 chain = stack_pointer_rtx;
6123 else
6124 chain = plus_constant (stack_pointer_rtx,
6125 STACK_POINTER_OFFSET - UNITS_PER_WORD);
6126
6127 chain = gen_rtx_MEM (Pmode, chain);
6128 return chain;
6129}
6130
1ae58c30 6131/* Find first call clobbered register unused in a function.
4023fb28
UW
6132 This could be used as base register in a leaf function
6133 or for holding the return address before epilogue. */
9db1d521 6134
4023fb28 6135static int
9c808aad 6136find_unused_clobbered_reg (void)
4023fb28
UW
6137{
6138 int i;
6139 for (i = 0; i < 6; i++)
6140 if (!regs_ever_live[i])
6141 return i;
6142 return 0;
6143}
6144
adf39f8f
AK
6145/* Determine the frame area which actually has to be accessed
6146 in the function epilogue. The values are stored at the
6147 given pointers AREA_BOTTOM (address of the lowest used stack
6148 address) and AREA_TOP (address of the first item which does
6149 not belong to the stack frame). */
6150
6151static void
6152s390_frame_area (int *area_bottom, int *area_top)
6153{
6154 int b, t;
6155 int i;
6156
6157 b = INT_MAX;
6158 t = INT_MIN;
6159
6160 if (cfun_frame_layout.first_restore_gpr != -1)
6161 {
6162 b = (cfun_frame_layout.gprs_offset
6163 + cfun_frame_layout.first_restore_gpr * UNITS_PER_WORD);
6164 t = b + (cfun_frame_layout.last_restore_gpr
6165 - cfun_frame_layout.first_restore_gpr + 1) * UNITS_PER_WORD;
6166 }
6167
6168 if (TARGET_64BIT && cfun_save_high_fprs_p)
6169 {
6170 b = MIN (b, cfun_frame_layout.f8_offset);
6171 t = MAX (t, (cfun_frame_layout.f8_offset
6172 + cfun_frame_layout.high_fprs * 8));
6173 }
6174
6175 if (!TARGET_64BIT)
6176 for (i = 2; i < 4; i++)
6177 if (cfun_fpr_bit_p (i))
6178 {
6179 b = MIN (b, cfun_frame_layout.f4_offset + (i - 2) * 8);
6180 t = MAX (t, cfun_frame_layout.f4_offset + (i - 1) * 8);
6181 }
6182
6183 *area_bottom = b;
6184 *area_top = t;
6185}
6186
91086990
UW
6187/* Fill cfun->machine with info about register usage of current function.
6188 Return in LIVE_REGS which GPRs are currently considered live. */
4023fb28
UW
6189
6190static void
91086990 6191s390_register_info (int live_regs[])
4023fb28
UW
6192{
6193 int i, j;
4023fb28 6194
adf39f8f
AK
6195 /* fprs 8 - 15 are call saved for 64 Bit ABI. */
6196 cfun_frame_layout.fpr_bitmap = 0;
6197 cfun_frame_layout.high_fprs = 0;
c3cc6b78 6198 if (TARGET_64BIT)
c7453384 6199 for (i = 24; i < 32; i++)
6f84708a 6200 if (regs_ever_live[i] && !global_regs[i])
c3cc6b78 6201 {
adf39f8f
AK
6202 cfun_set_fpr_bit (i - 16);
6203 cfun_frame_layout.high_fprs++;
c3cc6b78 6204 }
4023fb28 6205
b767fc11
UW
6206 /* Find first and last gpr to be saved. We trust regs_ever_live
6207 data, except that we don't save and restore global registers.
545d16ff 6208
b767fc11
UW
6209 Also, all registers with special meaning to the compiler need
6210 to be handled extra. */
545d16ff 6211
b767fc11
UW
6212 for (i = 0; i < 16; i++)
6213 live_regs[i] = regs_ever_live[i] && !global_regs[i];
c3cc6b78 6214
b767fc11 6215 if (flag_pic)
91086990
UW
6216 live_regs[PIC_OFFSET_TABLE_REGNUM]
6217 = regs_ever_live[PIC_OFFSET_TABLE_REGNUM];
6218
6219 live_regs[BASE_REGNUM]
6220 = cfun->machine->base_reg
6221 && REGNO (cfun->machine->base_reg) == BASE_REGNUM;
6222
6223 live_regs[RETURN_REGNUM]
6224 = cfun->machine->split_branches_pending_p
6225 || cfun_frame_layout.save_return_addr_p;
6226
6227 live_regs[STACK_POINTER_REGNUM]
6228 = !current_function_is_leaf
6229 || TARGET_TPF_PROFILING
6230 || cfun_save_high_fprs_p
6231 || get_frame_size () > 0
6232 || current_function_calls_alloca
6233 || current_function_stdarg;
adf39f8f 6234
b767fc11
UW
6235 for (i = 6; i < 16; i++)
6236 if (live_regs[i])
6237 break;
4023fb28 6238 for (j = 15; j > i; j--)
b767fc11
UW
6239 if (live_regs[j])
6240 break;
c3cc6b78 6241
b767fc11
UW
6242 if (i == 16)
6243 {
6244 /* Nothing to save/restore. */
adf39f8f
AK
6245 cfun_frame_layout.first_save_gpr = -1;
6246 cfun_frame_layout.first_restore_gpr = -1;
6247 cfun_frame_layout.last_save_gpr = -1;
6248 cfun_frame_layout.last_restore_gpr = -1;
b767fc11
UW
6249 }
6250 else
6251 {
6252 /* Save / Restore from gpr i to j. */
adf39f8f
AK
6253 cfun_frame_layout.first_save_gpr = i;
6254 cfun_frame_layout.first_restore_gpr = i;
6255 cfun_frame_layout.last_save_gpr = j;
6256 cfun_frame_layout.last_restore_gpr = j;
b767fc11 6257 }
c3cc6b78 6258
6c535c69 6259 if (current_function_stdarg)
b767fc11 6260 {
adf39f8f
AK
6261 /* Varargs functions need to save gprs 2 to 6. */
6262 if (cfun_frame_layout.first_save_gpr == -1
6263 || cfun_frame_layout.first_save_gpr > 2)
6264 cfun_frame_layout.first_save_gpr = 2;
6265
6266 if (cfun_frame_layout.last_save_gpr == -1
6267 || cfun_frame_layout.last_save_gpr < 6)
6268 cfun_frame_layout.last_save_gpr = 6;
b767fc11 6269
adf39f8f 6270 /* Mark f0, f2 for 31 bit and f0-f4 for 64 bit to be saved. */
8a512b77
AK
6271 if (TARGET_HARD_FLOAT)
6272 for (i = 0; i < (TARGET_64BIT ? 4 : 2); i++)
6273 cfun_set_fpr_bit (i);
adf39f8f
AK
6274 }
6275
6276 if (!TARGET_64BIT)
6277 for (i = 2; i < 4; i++)
6278 if (regs_ever_live[i + 16] && !global_regs[i + 16])
6279 cfun_set_fpr_bit (i);
6280}
6281
91086990 6282/* Fill cfun->machine with info about frame of current function. */
adf39f8f
AK
6283
6284static void
91086990 6285s390_frame_info (void)
adf39f8f
AK
6286{
6287 int i;
6288
6289 cfun_frame_layout.frame_size = get_frame_size ();
adf39f8f
AK
6290 if (!TARGET_64BIT && cfun_frame_layout.frame_size > 0x7fff0000)
6291 fatal_error ("Total size of local variables exceeds architecture limit.");
6292
6293 cfun_frame_layout.save_backchain_p = (TARGET_BACKCHAIN
6294 || TARGET_KERNEL_BACKCHAIN);
6295
6296 if (TARGET_BACKCHAIN)
6297 {
6298 cfun_frame_layout.backchain_offset = 0;
6299 cfun_frame_layout.f0_offset = 16 * UNITS_PER_WORD;
6300 cfun_frame_layout.f4_offset = cfun_frame_layout.f0_offset + 2 * 8;
6301 cfun_frame_layout.f8_offset = -cfun_frame_layout.high_fprs * 8;
6302 cfun_frame_layout.gprs_offset = (cfun_frame_layout.first_save_gpr
6303 * UNITS_PER_WORD);
6304 }
6305 else if (TARGET_KERNEL_BACKCHAIN)
6306 {
6307 cfun_frame_layout.backchain_offset = (STACK_POINTER_OFFSET
6308 - UNITS_PER_WORD);
6309 cfun_frame_layout.gprs_offset
6310 = (cfun_frame_layout.backchain_offset
6311 - (STACK_POINTER_REGNUM - cfun_frame_layout.first_save_gpr + 1)
6312 * UNITS_PER_WORD);
6313
6314 if (TARGET_64BIT)
6315 {
6316 cfun_frame_layout.f4_offset
6317 = (cfun_frame_layout.gprs_offset
6318 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6319
6320 cfun_frame_layout.f0_offset
6321 = (cfun_frame_layout.f4_offset
6322 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6323 }
6324 else
6325 {
ea506297
AK
6326 /* On 31 bit we have to care about alignment of the
6327 floating point regs to provide fastest access. */
adf39f8f 6328 cfun_frame_layout.f0_offset
ea506297
AK
6329 = ((cfun_frame_layout.gprs_offset
6330 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1))
adf39f8f
AK
6331 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6332
6333 cfun_frame_layout.f4_offset
6334 = (cfun_frame_layout.f0_offset
6335 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6336 }
6337 }
6338 else /* no backchain */
6339 {
6340 cfun_frame_layout.f4_offset
6341 = (STACK_POINTER_OFFSET
6342 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6343
6344 cfun_frame_layout.f0_offset
6345 = (cfun_frame_layout.f4_offset
6346 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6347
6348 cfun_frame_layout.gprs_offset
6349 = cfun_frame_layout.f0_offset - cfun_gprs_save_area_size;
6350 }
6351
6352 if (current_function_is_leaf
6353 && !TARGET_TPF_PROFILING
6354 && cfun_frame_layout.frame_size == 0
6355 && !cfun_save_high_fprs_p
6356 && !current_function_calls_alloca
6357 && !current_function_stdarg)
6358 return;
6359
6360 if (TARGET_BACKCHAIN)
6361 cfun_frame_layout.frame_size += (STARTING_FRAME_OFFSET
6362 + cfun_frame_layout.high_fprs * 8);
6363 else
6364 {
6365 cfun_frame_layout.frame_size += (cfun_frame_layout.save_backchain_p
6366 * UNITS_PER_WORD);
ea506297
AK
6367
6368 /* No alignment trouble here because f8-f15 are only saved under
6369 64 bit. */
adf39f8f
AK
6370 cfun_frame_layout.f8_offset = (MIN (MIN (cfun_frame_layout.f0_offset,
6371 cfun_frame_layout.f4_offset),
6372 cfun_frame_layout.gprs_offset)
6373 - cfun_frame_layout.high_fprs * 8);
6374
6375 cfun_frame_layout.frame_size += cfun_frame_layout.high_fprs * 8;
6376
6377 for (i = 0; i < 8; i++)
6378 if (cfun_fpr_bit_p (i))
6379 cfun_frame_layout.frame_size += 8;
6380
6381 cfun_frame_layout.frame_size += cfun_gprs_save_area_size;
ea506297
AK
6382
6383 /* If under 31 bit an odd number of gprs has to be saved we have to adjust
6384 the frame size to sustain 8 byte alignment of stack frames. */
adf39f8f
AK
6385 cfun_frame_layout.frame_size = ((cfun_frame_layout.frame_size +
6386 STACK_BOUNDARY / BITS_PER_UNIT - 1)
6387 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1));
6388
6389 cfun_frame_layout.frame_size += current_function_outgoing_args_size;
b767fc11 6390 }
4023fb28
UW
6391}
6392
91086990
UW
6393/* Generate frame layout. Fills in register and frame data for the current
6394 function in cfun->machine. This routine can be called multiple times;
6395 it will re-do the complete frame layout every time. */
4023fb28 6396
91086990
UW
6397static void
6398s390_init_frame_layout (void)
9db1d521 6399{
91086990
UW
6400 HOST_WIDE_INT frame_size;
6401 int base_used;
6402 int live_regs[16];
b767fc11 6403
91086990
UW
6404 /* If return address register is explicitly used, we need to save it. */
6405 if (regs_ever_live[RETURN_REGNUM]
6406 || !current_function_is_leaf
6407 || TARGET_TPF_PROFILING
6408 || current_function_stdarg
6409 || current_function_calls_eh_return)
6410 cfun_frame_layout.save_return_addr_p = true;
6411
6412 /* On S/390 machines, we may need to perform branch splitting, which
6413 will require both base and return address register. We have no
6414 choice but to assume we're going to need them until right at the
6415 end of the machine dependent reorg phase. */
6416 if (!TARGET_CPU_ZARCH)
6417 cfun->machine->split_branches_pending_p = true;
6418
6419 do
6420 {
6421 frame_size = cfun_frame_layout.frame_size;
6422
6423 /* Try to predict whether we'll need the base register. */
6424 base_used = cfun->machine->split_branches_pending_p
6425 || current_function_uses_const_pool
6426 || (!DISP_IN_RANGE (-frame_size)
6427 && !CONST_OK_FOR_CONSTRAINT_P (-frame_size, 'K', "K"));
6428
6429 /* Decide which register to use as literal pool base. In small
6430 leaf functions, try to use an unused call-clobbered register
6431 as base register to avoid save/restore overhead. */
6432 if (!base_used)
6433 cfun->machine->base_reg = NULL_RTX;
6434 else if (current_function_is_leaf && !regs_ever_live[5])
6435 cfun->machine->base_reg = gen_rtx_REG (Pmode, 5);
6436 else
6437 cfun->machine->base_reg = gen_rtx_REG (Pmode, BASE_REGNUM);
adf39f8f 6438
91086990
UW
6439 s390_register_info (live_regs);
6440 s390_frame_info ();
6441 }
6442 while (frame_size != cfun_frame_layout.frame_size);
9db1d521
HP
6443}
6444
91086990
UW
6445/* Update frame layout. Recompute actual register save data based on
6446 current info and update regs_ever_live for the special registers.
6447 May be called multiple times, but may never cause *more* registers
6448 to be saved than s390_init_frame_layout allocated room for. */
6449
6450static void
6451s390_update_frame_layout (void)
6452{
6453 int live_regs[16];
6454
6455 s390_register_info (live_regs);
6456
6457 regs_ever_live[BASE_REGNUM] = live_regs[BASE_REGNUM];
6458 regs_ever_live[RETURN_REGNUM] = live_regs[RETURN_REGNUM];
6459 regs_ever_live[STACK_POINTER_REGNUM] = live_regs[STACK_POINTER_REGNUM];
6460
6461 if (cfun->machine->base_reg)
6462 regs_ever_live[REGNO (cfun->machine->base_reg)] = 1;
6463}
6464
6465/* Return true if register FROM can be eliminated via register TO. */
6466
6467bool
6468s390_can_eliminate (int from, int to)
6469{
6470 gcc_assert (to == STACK_POINTER_REGNUM
6471 || to == HARD_FRAME_POINTER_REGNUM);
6472
6473 gcc_assert (from == FRAME_POINTER_REGNUM
6474 || from == ARG_POINTER_REGNUM
6475 || from == RETURN_ADDRESS_POINTER_REGNUM);
6476
6477 /* Make sure we actually saved the return address. */
6478 if (from == RETURN_ADDRESS_POINTER_REGNUM)
6479 if (!current_function_calls_eh_return
6480 && !current_function_stdarg
6481 && !cfun_frame_layout.save_return_addr_p)
6482 return false;
6483
6484 return true;
6485}
6486
6487/* Return offset between register FROM and TO initially after prolog. */
a38e09bc
AK
6488
6489HOST_WIDE_INT
91086990 6490s390_initial_elimination_offset (int from, int to)
a38e09bc 6491{
91086990
UW
6492 HOST_WIDE_INT offset;
6493 int index;
a38e09bc 6494
91086990
UW
6495 /* ??? Why are we called for non-eliminable pairs? */
6496 if (!s390_can_eliminate (from, to))
6497 return 0;
6498
6499 switch (from)
6500 {
6501 case FRAME_POINTER_REGNUM:
6502 offset = 0;
6503 break;
adf39f8f 6504
91086990
UW
6505 case ARG_POINTER_REGNUM:
6506 s390_init_frame_layout ();
6507 offset = cfun_frame_layout.frame_size + STACK_POINTER_OFFSET;
6508 break;
6509
6510 case RETURN_ADDRESS_POINTER_REGNUM:
6511 s390_init_frame_layout ();
6512 index = RETURN_REGNUM - cfun_frame_layout.first_save_gpr;
6513 gcc_assert (index >= 0);
6514 offset = cfun_frame_layout.frame_size + cfun_frame_layout.gprs_offset;
6515 offset += index * UNITS_PER_WORD;
6516 break;
6517
6518 default:
6519 gcc_unreachable ();
6520 }
6521
6522 return offset;
a38e09bc
AK
6523}
6524
4023fb28 6525/* Emit insn to save fpr REGNUM at offset OFFSET relative
c7453384 6526 to register BASE. Return generated insn. */
994fe660 6527
9db1d521 6528static rtx
9c808aad 6529save_fpr (rtx base, int offset, int regnum)
9db1d521 6530{
4023fb28
UW
6531 rtx addr;
6532 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
6533 set_mem_alias_set (addr, s390_sr_alias_set);
9db1d521 6534
4023fb28
UW
6535 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
6536}
9db1d521 6537
4023fb28 6538/* Emit insn to restore fpr REGNUM from offset OFFSET relative
c7453384 6539 to register BASE. Return generated insn. */
9db1d521 6540
4023fb28 6541static rtx
9c808aad 6542restore_fpr (rtx base, int offset, int regnum)
4023fb28
UW
6543{
6544 rtx addr;
6545 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
6546 set_mem_alias_set (addr, s390_sr_alias_set);
9db1d521 6547
4023fb28 6548 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
9db1d521
HP
6549}
6550
c3cc6b78 6551/* Generate insn to save registers FIRST to LAST into
c7453384 6552 the register save area located at offset OFFSET
c3cc6b78 6553 relative to register BASE. */
9db1d521 6554
c3cc6b78 6555static rtx
9c808aad 6556save_gprs (rtx base, int offset, int first, int last)
9db1d521 6557{
c3cc6b78
UW
6558 rtx addr, insn, note;
6559 int i;
6560
adf39f8f 6561 addr = plus_constant (base, offset);
c3cc6b78
UW
6562 addr = gen_rtx_MEM (Pmode, addr);
6563 set_mem_alias_set (addr, s390_sr_alias_set);
6564
6565 /* Special-case single register. */
6566 if (first == last)
6567 {
6568 if (TARGET_64BIT)
6569 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
6570 else
6571 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
6572
6573 RTX_FRAME_RELATED_P (insn) = 1;
6574 return insn;
6575 }
6576
6577
6578 insn = gen_store_multiple (addr,
6579 gen_rtx_REG (Pmode, first),
6580 GEN_INT (last - first + 1));
6581
6582
6583 /* We need to set the FRAME_RELATED flag on all SETs
6584 inside the store-multiple pattern.
6585
6586 However, we must not emit DWARF records for registers 2..5
c7453384 6587 if they are stored for use by variable arguments ...
c3cc6b78
UW
6588
6589 ??? Unfortunately, it is not enough to simply not the the
6590 FRAME_RELATED flags for those SETs, because the first SET
6591 of the PARALLEL is always treated as if it had the flag
6592 set, even if it does not. Therefore we emit a new pattern
6593 without those registers as REG_FRAME_RELATED_EXPR note. */
6594
6595 if (first >= 6)
6596 {
6597 rtx pat = PATTERN (insn);
6598
6599 for (i = 0; i < XVECLEN (pat, 0); i++)
6600 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
6601 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
6602
6603 RTX_FRAME_RELATED_P (insn) = 1;
6604 }
6605 else if (last >= 6)
6606 {
adf39f8f 6607 addr = plus_constant (base, offset + (6 - first) * UNITS_PER_WORD);
c7453384 6608 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
c3cc6b78
UW
6609 gen_rtx_REG (Pmode, 6),
6610 GEN_INT (last - 6 + 1));
6611 note = PATTERN (note);
6612
6613 REG_NOTES (insn) =
c7453384 6614 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
c3cc6b78
UW
6615 note, REG_NOTES (insn));
6616
6617 for (i = 0; i < XVECLEN (note, 0); i++)
6618 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
6619 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
6620
6621 RTX_FRAME_RELATED_P (insn) = 1;
6622 }
6623
6624 return insn;
4023fb28 6625}
9db1d521 6626
c3cc6b78 6627/* Generate insn to restore registers FIRST to LAST from
c7453384 6628 the register save area located at offset OFFSET
c3cc6b78 6629 relative to register BASE. */
9db1d521 6630
c3cc6b78 6631static rtx
9c808aad 6632restore_gprs (rtx base, int offset, int first, int last)
4023fb28 6633{
c3cc6b78
UW
6634 rtx addr, insn;
6635
adf39f8f 6636 addr = plus_constant (base, offset);
c3cc6b78
UW
6637 addr = gen_rtx_MEM (Pmode, addr);
6638 set_mem_alias_set (addr, s390_sr_alias_set);
6639
6640 /* Special-case single register. */
6641 if (first == last)
6642 {
6643 if (TARGET_64BIT)
6644 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
6645 else
6646 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
6647
6648 return insn;
6649 }
6650
6651 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
6652 addr,
6653 GEN_INT (last - first + 1));
6654 return insn;
4023fb28 6655}
9db1d521 6656
585539a1 6657/* Return insn sequence to load the GOT register. */
fd7643fb
UW
6658
6659static GTY(()) rtx got_symbol;
585539a1
UW
6660rtx
6661s390_load_got (void)
fd7643fb 6662{
585539a1
UW
6663 rtx insns;
6664
fd7643fb
UW
6665 if (!got_symbol)
6666 {
6667 got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
6668 SYMBOL_REF_FLAGS (got_symbol) = SYMBOL_FLAG_LOCAL;
6669 }
6670
585539a1
UW
6671 start_sequence ();
6672
9e8327e3 6673 if (TARGET_CPU_ZARCH)
fd7643fb 6674 {
585539a1 6675 emit_move_insn (pic_offset_table_rtx, got_symbol);
fd7643fb
UW
6676 }
6677 else
6678 {
585539a1 6679 rtx offset;
fd7643fb 6680
c7453384 6681 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, got_symbol),
fd7643fb
UW
6682 UNSPEC_LTREL_OFFSET);
6683 offset = gen_rtx_CONST (Pmode, offset);
6684 offset = force_const_mem (Pmode, offset);
6685
585539a1 6686 emit_move_insn (pic_offset_table_rtx, offset);
fd7643fb 6687
c7453384 6688 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (offset, 0)),
fd7643fb
UW
6689 UNSPEC_LTREL_BASE);
6690 offset = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset);
6691
585539a1 6692 emit_move_insn (pic_offset_table_rtx, offset);
fd7643fb 6693 }
585539a1
UW
6694
6695 insns = get_insns ();
6696 end_sequence ();
6697 return insns;
fd7643fb
UW
6698}
6699
4023fb28 6700/* Expand the prologue into a bunch of separate insns. */
9db1d521 6701
4023fb28 6702void
9c808aad 6703s390_emit_prologue (void)
4023fb28 6704{
4023fb28
UW
6705 rtx insn, addr;
6706 rtx temp_reg;
2c153108 6707 int i;
adf39f8f
AK
6708 int offset;
6709 int next_fpr = 0;
9db1d521 6710
91086990 6711 /* Complete frame layout. */
b767fc11 6712
91086990 6713 s390_update_frame_layout ();
4023fb28 6714
585539a1
UW
6715 /* Annotate all constant pool references to let the scheduler know
6716 they implicitly use the base register. */
6717
6718 push_topmost_sequence ();
6719
6720 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6721 if (INSN_P (insn))
6722 annotate_constant_pool_refs (&PATTERN (insn));
6723
6724 pop_topmost_sequence ();
6725
c7453384
EC
6726 /* Choose best register to use for temp use within prologue.
6727 See below for why TPF must use the register 1. */
6728
adf39f8f 6729 if (!current_function_is_leaf && !TARGET_TPF_PROFILING)
4023fb28 6730 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
9db1d521 6731 else
4023fb28 6732 temp_reg = gen_rtx_REG (Pmode, 1);
9db1d521 6733
4023fb28 6734 /* Save call saved gprs. */
adf39f8f 6735 if (cfun_frame_layout.first_save_gpr != -1)
2790879f
AK
6736 {
6737 insn = save_gprs (stack_pointer_rtx,
6738 cfun_frame_layout.gprs_offset,
6739 cfun_frame_layout.first_save_gpr,
6740 cfun_frame_layout.last_save_gpr);
6741 emit_insn (insn);
6742 }
4023fb28 6743
5af2f3d3 6744 /* Dummy insn to mark literal pool slot. */
c7453384 6745
91086990
UW
6746 if (cfun->machine->base_reg)
6747 emit_insn (gen_main_pool (cfun->machine->base_reg));
c7453384 6748
adf39f8f 6749 offset = cfun_frame_layout.f0_offset;
4023fb28 6750
adf39f8f
AK
6751 /* Save f0 and f2. */
6752 for (i = 0; i < 2; i++)
6753 {
6754 if (cfun_fpr_bit_p (i))
6755 {
6756 save_fpr (stack_pointer_rtx, offset, i + 16);
6757 offset += 8;
6758 }
6759 else if (TARGET_BACKCHAIN)
6760 offset += 8;
6761 }
9db1d521 6762
adf39f8f
AK
6763 /* Save f4 and f6. */
6764 offset = cfun_frame_layout.f4_offset;
6765 for (i = 2; i < 4; i++)
6766 {
6767 if (cfun_fpr_bit_p (i))
4023fb28 6768 {
adf39f8f
AK
6769 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
6770 offset += 8;
6771
6772 /* If f4 and f6 are call clobbered they are saved due to stdargs and
6773 therefore are not frame related. */
6774 if (!call_really_used_regs[i + 16])
6775 RTX_FRAME_RELATED_P (insn) = 1;
4023fb28 6776 }
adf39f8f
AK
6777 else if (TARGET_BACKCHAIN)
6778 offset += 8;
6779 }
6780
6781 if (!TARGET_BACKCHAIN
6782 && cfun_save_high_fprs_p
6783 && cfun_frame_layout.f8_offset + cfun_frame_layout.high_fprs * 8 > 0)
6784 {
6785 offset = (cfun_frame_layout.f8_offset
6786 + (cfun_frame_layout.high_fprs - 1) * 8);
6787
6788 for (i = 15; i > 7 && offset >= 0; i--)
6789 if (cfun_fpr_bit_p (i))
6790 {
6791 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
6792
6793 RTX_FRAME_RELATED_P (insn) = 1;
6794 offset -= 8;
6795 }
6796 if (offset >= cfun_frame_layout.f8_offset)
6797 next_fpr = i + 16;
6798 }
6799
6800 if (TARGET_BACKCHAIN)
6801 next_fpr = cfun_save_high_fprs_p ? 31 : 0;
9db1d521 6802
4023fb28 6803 /* Decrement stack pointer. */
9db1d521 6804
adf39f8f 6805 if (cfun_frame_layout.frame_size > 0)
4023fb28 6806 {
adf39f8f 6807 rtx frame_off = GEN_INT (-cfun_frame_layout.frame_size);
9db1d521 6808
d75f90f1
AK
6809 if (s390_stack_size)
6810 {
6811 HOST_WIDE_INT stack_check_mask = ((s390_stack_size - 1)
6812 & ~(s390_stack_guard - 1));
6813 rtx t = gen_rtx_AND (Pmode, stack_pointer_rtx,
6814 GEN_INT (stack_check_mask));
6815
6816 if (TARGET_64BIT)
6817 gen_cmpdi (t, const0_rtx);
6818 else
6819 gen_cmpsi (t, const0_rtx);
6820
6821 emit_insn (gen_conditional_trap (gen_rtx_EQ (CCmode,
6822 gen_rtx_REG (CCmode,
6823 CC_REGNUM),
6824 const0_rtx),
6825 const0_rtx));
6826 }
6827
6828 if (s390_warn_framesize > 0
6829 && cfun_frame_layout.frame_size >= s390_warn_framesize)
9e637a26 6830 warning ("frame size of %qs is " HOST_WIDE_INT_PRINT_DEC " bytes",
d75f90f1
AK
6831 current_function_name (), cfun_frame_layout.frame_size);
6832
6833 if (s390_warn_dynamicstack_p && cfun->calls_alloca)
9e637a26 6834 warning ("%qs uses dynamic stack allocation", current_function_name ());
d75f90f1 6835
4023fb28 6836 /* Save incoming stack pointer into temp reg. */
adf39f8f
AK
6837 if (cfun_frame_layout.save_backchain_p || next_fpr)
6838 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
c7453384 6839
fae778eb 6840 /* Subtract frame size from stack pointer. */
4023fb28 6841
d3632d41
UW
6842 if (DISP_IN_RANGE (INTVAL (frame_off)))
6843 {
c7453384 6844 insn = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
adf39f8f 6845 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
9c808aad 6846 frame_off));
d3632d41
UW
6847 insn = emit_insn (insn);
6848 }
6849 else
6850 {
f19a9af7 6851 if (!CONST_OK_FOR_CONSTRAINT_P (INTVAL (frame_off), 'K', "K"))
d3632d41
UW
6852 frame_off = force_const_mem (Pmode, frame_off);
6853
6854 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
585539a1 6855 annotate_constant_pool_refs (&PATTERN (insn));
d3632d41 6856 }
4023fb28 6857
4023fb28 6858 RTX_FRAME_RELATED_P (insn) = 1;
c7453384 6859 REG_NOTES (insn) =
4023fb28
UW
6860 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
6861 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
adf39f8f
AK
6862 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
6863 GEN_INT (-cfun_frame_layout.frame_size))),
4023fb28
UW
6864 REG_NOTES (insn));
6865
6866 /* Set backchain. */
c7453384 6867
adf39f8f 6868 if (cfun_frame_layout.save_backchain_p)
9db1d521 6869 {
adf39f8f
AK
6870 if (cfun_frame_layout.backchain_offset)
6871 addr = gen_rtx_MEM (Pmode,
6872 plus_constant (stack_pointer_rtx,
6873 cfun_frame_layout.backchain_offset));
6874 else
6875 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
4023fb28
UW
6876 set_mem_alias_set (addr, s390_sr_alias_set);
6877 insn = emit_insn (gen_move_insn (addr, temp_reg));
9db1d521 6878 }
7d798969
UW
6879
6880 /* If we support asynchronous exceptions (e.g. for Java),
6881 we need to make sure the backchain pointer is set up
6882 before any possibly trapping memory access. */
6883
adf39f8f 6884 if (cfun_frame_layout.save_backchain_p && flag_non_call_exceptions)
7d798969
UW
6885 {
6886 addr = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
6887 emit_insn (gen_rtx_CLOBBER (VOIDmode, addr));
6888 }
4023fb28 6889 }
9db1d521 6890
4023fb28 6891 /* Save fprs 8 - 15 (64 bit ABI). */
c7453384 6892
adf39f8f 6893 if (cfun_save_high_fprs_p && next_fpr)
4023fb28 6894 {
adf39f8f
AK
6895 insn = emit_insn (gen_add2_insn (temp_reg,
6896 GEN_INT (cfun_frame_layout.f8_offset)));
6897
6898 offset = 0;
9db1d521 6899
adf39f8f
AK
6900 for (i = 24; i <= next_fpr; i++)
6901 if (cfun_fpr_bit_p (i - 16))
4023fb28 6902 {
c7453384 6903 rtx addr = plus_constant (stack_pointer_rtx,
adf39f8f
AK
6904 cfun_frame_layout.frame_size
6905 + cfun_frame_layout.f8_offset
6906 + offset);
6907
6908 insn = save_fpr (temp_reg, offset, i);
6909 offset += 8;
4023fb28 6910 RTX_FRAME_RELATED_P (insn) = 1;
c7453384 6911 REG_NOTES (insn) =
4023fb28 6912 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
adf39f8f
AK
6913 gen_rtx_SET (VOIDmode,
6914 gen_rtx_MEM (DFmode, addr),
6915 gen_rtx_REG (DFmode, i)),
6916 REG_NOTES (insn));
4023fb28
UW
6917 }
6918 }
c7453384 6919
4023fb28 6920 /* Set frame pointer, if needed. */
c7453384 6921
29742ba4 6922 if (frame_pointer_needed)
4023fb28
UW
6923 {
6924 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
6925 RTX_FRAME_RELATED_P (insn) = 1;
6926 }
9db1d521 6927
4023fb28 6928 /* Set up got pointer, if needed. */
c7453384 6929
c3cc6b78 6930 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
585539a1
UW
6931 {
6932 rtx insns = s390_load_got ();
6933
6934 for (insn = insns; insn; insn = NEXT_INSN (insn))
6935 {
6936 annotate_constant_pool_refs (&PATTERN (insn));
6937
6938 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
6939 REG_NOTES (insn));
6940 }
6941
6942 emit_insn (insns);
6943 }
c7453384 6944
3839e36a 6945 if (TARGET_TPF_PROFILING)
c7453384
EC
6946 {
6947 /* Generate a BAS instruction to serve as a function
6948 entry intercept to facilitate the use of tracing
2f7e5a0d
EC
6949 algorithms located at the branch target. */
6950 emit_insn (gen_prologue_tpf ());
c7453384
EC
6951
6952 /* Emit a blockage here so that all code
6953 lies between the profiling mechanisms. */
6954 emit_insn (gen_blockage ());
6955 }
4023fb28 6956}
9db1d521 6957
b1c9bc51 6958/* Expand the epilogue into a bunch of separate insns. */
9db1d521 6959
4023fb28 6960void
ed9676cf 6961s390_emit_epilogue (bool sibcall)
4023fb28 6962{
4023fb28 6963 rtx frame_pointer, return_reg;
5d4d885c 6964 int area_bottom, area_top, offset = 0;
adf39f8f 6965 int next_offset;
4023fb28 6966 rtvec p;
7333171f 6967 int i;
9db1d521 6968
3839e36a 6969 if (TARGET_TPF_PROFILING)
c7453384
EC
6970 {
6971
6972 /* Generate a BAS instruction to serve as a function
6973 entry intercept to facilitate the use of tracing
2f7e5a0d 6974 algorithms located at the branch target. */
c7453384 6975
c7453384
EC
6976 /* Emit a blockage here so that all code
6977 lies between the profiling mechanisms. */
6978 emit_insn (gen_blockage ());
6979
2f7e5a0d 6980 emit_insn (gen_epilogue_tpf ());
c7453384
EC
6981 }
6982
4023fb28 6983 /* Check whether to use frame or stack pointer for restore. */
9db1d521 6984
adf39f8f
AK
6985 frame_pointer = (frame_pointer_needed
6986 ? hard_frame_pointer_rtx : stack_pointer_rtx);
9db1d521 6987
adf39f8f 6988 s390_frame_area (&area_bottom, &area_top);
9db1d521 6989
c7453384 6990 /* Check whether we can access the register save area.
4023fb28 6991 If not, increment the frame pointer as required. */
9db1d521 6992
4023fb28
UW
6993 if (area_top <= area_bottom)
6994 {
6995 /* Nothing to restore. */
6996 }
adf39f8f
AK
6997 else if (DISP_IN_RANGE (cfun_frame_layout.frame_size + area_bottom)
6998 && DISP_IN_RANGE (cfun_frame_layout.frame_size + area_top - 1))
4023fb28
UW
6999 {
7000 /* Area is in range. */
adf39f8f 7001 offset = cfun_frame_layout.frame_size;
4023fb28
UW
7002 }
7003 else
7004 {
7005 rtx insn, frame_off;
9db1d521 7006
c7453384 7007 offset = area_bottom < 0 ? -area_bottom : 0;
adf39f8f 7008 frame_off = GEN_INT (cfun_frame_layout.frame_size - offset);
9db1d521 7009
d3632d41
UW
7010 if (DISP_IN_RANGE (INTVAL (frame_off)))
7011 {
c7453384 7012 insn = gen_rtx_SET (VOIDmode, frame_pointer,
d3632d41
UW
7013 gen_rtx_PLUS (Pmode, frame_pointer, frame_off));
7014 insn = emit_insn (insn);
7015 }
7016 else
7017 {
f19a9af7 7018 if (!CONST_OK_FOR_CONSTRAINT_P (INTVAL (frame_off), 'K', "K"))
d3632d41 7019 frame_off = force_const_mem (Pmode, frame_off);
9db1d521 7020
d3632d41 7021 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
585539a1 7022 annotate_constant_pool_refs (&PATTERN (insn));
d3632d41 7023 }
4023fb28 7024 }
9db1d521 7025
4023fb28
UW
7026 /* Restore call saved fprs. */
7027
7028 if (TARGET_64BIT)
9db1d521 7029 {
adf39f8f
AK
7030 if (cfun_save_high_fprs_p)
7031 {
7032 next_offset = cfun_frame_layout.f8_offset;
7033 for (i = 24; i < 32; i++)
7034 {
7035 if (cfun_fpr_bit_p (i - 16))
7036 {
7037 restore_fpr (frame_pointer,
7038 offset + next_offset, i);
7039 next_offset += 8;
7040 }
7041 }
7042 }
7043
9db1d521
HP
7044 }
7045 else
7046 {
adf39f8f 7047 next_offset = cfun_frame_layout.f4_offset;
7333171f 7048 for (i = 18; i < 20; i++)
adf39f8f
AK
7049 {
7050 if (cfun_fpr_bit_p (i - 16))
7051 {
7052 restore_fpr (frame_pointer,
7053 offset + next_offset, i);
7054 next_offset += 8;
7055 }
7056 else if (TARGET_BACKCHAIN)
7057 next_offset += 8;
7058 }
7059
4023fb28 7060 }
9db1d521 7061
4023fb28
UW
7062 /* Return register. */
7063
c7453384 7064 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4023fb28
UW
7065
7066 /* Restore call saved gprs. */
7067
adf39f8f 7068 if (cfun_frame_layout.first_restore_gpr != -1)
4023fb28 7069 {
c3cc6b78 7070 rtx insn, addr;
1447dc69
HP
7071 int i;
7072
c7453384 7073 /* Check for global register and save them
1447dc69
HP
7074 to stack location from where they get restored. */
7075
adf39f8f
AK
7076 for (i = cfun_frame_layout.first_restore_gpr;
7077 i <= cfun_frame_layout.last_restore_gpr;
1447dc69
HP
7078 i++)
7079 {
c7453384 7080 /* These registers are special and need to be
cf5ee720 7081 restored in any case. */
c7453384 7082 if (i == STACK_POINTER_REGNUM
cf5ee720 7083 || i == RETURN_REGNUM
490ceeb4 7084 || i == BASE_REGNUM
5d4d885c 7085 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
cf5ee720
UW
7086 continue;
7087
1447dc69
HP
7088 if (global_regs[i])
7089 {
c7453384 7090 addr = plus_constant (frame_pointer,
adf39f8f
AK
7091 offset + cfun_frame_layout.gprs_offset
7092 + (i - cfun_frame_layout.first_save_gpr)
7093 * UNITS_PER_WORD);
1447dc69
HP
7094 addr = gen_rtx_MEM (Pmode, addr);
7095 set_mem_alias_set (addr, s390_sr_alias_set);
7096 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
c7453384 7097 }
1447dc69 7098 }
4023fb28 7099
ed9676cf 7100 if (! sibcall)
9db1d521 7101 {
ed9676cf
AK
7102 /* Fetch return address from stack before load multiple,
7103 this will do good for scheduling. */
38899e29 7104
adf39f8f
AK
7105 if (cfun_frame_layout.save_return_addr_p
7106 || (cfun_frame_layout.first_restore_gpr < BASE_REGNUM
7107 && cfun_frame_layout.last_restore_gpr > RETURN_REGNUM))
ed9676cf
AK
7108 {
7109 int return_regnum = find_unused_clobbered_reg();
7110 if (!return_regnum)
7111 return_regnum = 4;
7112 return_reg = gen_rtx_REG (Pmode, return_regnum);
38899e29 7113
ed9676cf 7114 addr = plus_constant (frame_pointer,
adf39f8f
AK
7115 offset + cfun_frame_layout.gprs_offset
7116 + (RETURN_REGNUM
7117 - cfun_frame_layout.first_save_gpr)
7118 * UNITS_PER_WORD);
ed9676cf
AK
7119 addr = gen_rtx_MEM (Pmode, addr);
7120 set_mem_alias_set (addr, s390_sr_alias_set);
7121 emit_move_insn (return_reg, addr);
7122 }
9db1d521 7123 }
4023fb28 7124
adf39f8f
AK
7125 insn = restore_gprs (frame_pointer,
7126 offset + cfun_frame_layout.gprs_offset
7127 + (cfun_frame_layout.first_restore_gpr
7128 - cfun_frame_layout.first_save_gpr)
7129 * UNITS_PER_WORD,
7130 cfun_frame_layout.first_restore_gpr,
7131 cfun_frame_layout.last_restore_gpr);
c3cc6b78 7132 emit_insn (insn);
4023fb28 7133 }
9db1d521 7134
ed9676cf
AK
7135 if (! sibcall)
7136 {
c7453384 7137
ed9676cf 7138 /* Return to caller. */
38899e29 7139
ed9676cf 7140 p = rtvec_alloc (2);
38899e29 7141
ed9676cf
AK
7142 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
7143 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
7144 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
7145 }
9db1d521
HP
7146}
7147
9db1d521 7148
c7453384 7149/* Return the size in bytes of a function argument of
994fe660
UW
7150 type TYPE and/or mode MODE. At least one of TYPE or
7151 MODE must be specified. */
9db1d521
HP
7152
7153static int
9c808aad 7154s390_function_arg_size (enum machine_mode mode, tree type)
9db1d521
HP
7155{
7156 if (type)
7157 return int_size_in_bytes (type);
7158
d65f7478 7159 /* No type info available for some library calls ... */
9db1d521
HP
7160 if (mode != BLKmode)
7161 return GET_MODE_SIZE (mode);
7162
7163 /* If we have neither type nor mode, abort */
994fe660 7164 abort ();
9db1d521
HP
7165}
7166
82b1c974
UW
7167/* Return true if a function argument of type TYPE and mode MODE
7168 is to be passed in a floating-point register, if available. */
7169
7170static bool
9c808aad 7171s390_function_arg_float (enum machine_mode mode, tree type)
82b1c974 7172{
8c17530e
UW
7173 int size = s390_function_arg_size (mode, type);
7174 if (size > 8)
7175 return false;
7176
82b1c974
UW
7177 /* Soft-float changes the ABI: no floating-point registers are used. */
7178 if (TARGET_SOFT_FLOAT)
7179 return false;
7180
7181 /* No type info available for some library calls ... */
7182 if (!type)
7183 return mode == SFmode || mode == DFmode;
7184
7185 /* The ABI says that record types with a single member are treated
7186 just like that member would be. */
7187 while (TREE_CODE (type) == RECORD_TYPE)
7188 {
7189 tree field, single = NULL_TREE;
7190
7191 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
7192 {
7193 if (TREE_CODE (field) != FIELD_DECL)
7194 continue;
7195
7196 if (single == NULL_TREE)
7197 single = TREE_TYPE (field);
7198 else
7199 return false;
7200 }
7201
7202 if (single == NULL_TREE)
7203 return false;
7204 else
7205 type = single;
7206 }
7207
7208 return TREE_CODE (type) == REAL_TYPE;
7209}
7210
8c17530e
UW
7211/* Return true if a function argument of type TYPE and mode MODE
7212 is to be passed in an integer register, or a pair of integer
7213 registers, if available. */
7214
7215static bool
7216s390_function_arg_integer (enum machine_mode mode, tree type)
7217{
7218 int size = s390_function_arg_size (mode, type);
7219 if (size > 8)
7220 return false;
7221
7222 /* No type info available for some library calls ... */
7223 if (!type)
7224 return GET_MODE_CLASS (mode) == MODE_INT
7225 || (TARGET_SOFT_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT);
7226
7227 /* We accept small integral (and similar) types. */
7228 if (INTEGRAL_TYPE_P (type)
38899e29 7229 || POINTER_TYPE_P (type)
8c17530e
UW
7230 || TREE_CODE (type) == OFFSET_TYPE
7231 || (TARGET_SOFT_FLOAT && TREE_CODE (type) == REAL_TYPE))
7232 return true;
7233
7234 /* We also accept structs of size 1, 2, 4, 8 that are not
38899e29 7235 passed in floating-point registers. */
8c17530e
UW
7236 if (AGGREGATE_TYPE_P (type)
7237 && exact_log2 (size) >= 0
7238 && !s390_function_arg_float (mode, type))
7239 return true;
7240
7241 return false;
7242}
7243
994fe660
UW
7244/* Return 1 if a function argument of type TYPE and mode MODE
7245 is to be passed by reference. The ABI specifies that only
7246 structures of size 1, 2, 4, or 8 bytes are passed by value,
7247 all other structures (and complex numbers) are passed by
7248 reference. */
7249
8cd5a4e0
RH
7250static bool
7251s390_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
7252 enum machine_mode mode, tree type,
7253 bool named ATTRIBUTE_UNUSED)
9db1d521
HP
7254{
7255 int size = s390_function_arg_size (mode, type);
8c17530e
UW
7256 if (size > 8)
7257 return true;
9db1d521
HP
7258
7259 if (type)
7260 {
8c17530e 7261 if (AGGREGATE_TYPE_P (type) && exact_log2 (size) < 0)
9db1d521
HP
7262 return 1;
7263
8c17530e
UW
7264 if (TREE_CODE (type) == COMPLEX_TYPE
7265 || TREE_CODE (type) == VECTOR_TYPE)
9db1d521
HP
7266 return 1;
7267 }
c7453384 7268
9db1d521 7269 return 0;
9db1d521
HP
7270}
7271
7272/* Update the data in CUM to advance over an argument of mode MODE and
7273 data type TYPE. (TYPE is null for libcalls where that information
994fe660
UW
7274 may not be available.). The boolean NAMED specifies whether the
7275 argument is a named argument (as opposed to an unnamed argument
7276 matching an ellipsis). */
9db1d521
HP
7277
7278void
9c808aad
AJ
7279s390_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
7280 tree type, int named ATTRIBUTE_UNUSED)
9db1d521 7281{
8cd5a4e0 7282 if (s390_function_arg_float (mode, type))
9db1d521 7283 {
82b1c974 7284 cum->fprs += 1;
9db1d521 7285 }
8c17530e 7286 else if (s390_function_arg_integer (mode, type))
9db1d521
HP
7287 {
7288 int size = s390_function_arg_size (mode, type);
7289 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
7290 }
8c17530e
UW
7291 else
7292 abort ();
9db1d521
HP
7293}
7294
994fe660
UW
7295/* Define where to put the arguments to a function.
7296 Value is zero to push the argument on the stack,
7297 or a hard register in which to store the argument.
7298
7299 MODE is the argument's machine mode.
7300 TYPE is the data type of the argument (as a tree).
7301 This is null for libcalls where that information may
7302 not be available.
7303 CUM is a variable of type CUMULATIVE_ARGS which gives info about
7304 the preceding args and about the function being called.
7305 NAMED is nonzero if this argument is a named parameter
c7453384 7306 (otherwise it is an extra parameter matching an ellipsis).
994fe660
UW
7307
7308 On S/390, we use general purpose registers 2 through 6 to
7309 pass integer, pointer, and certain structure arguments, and
7310 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
7311 to pass floating point arguments. All remaining arguments
7312 are pushed to the stack. */
9db1d521
HP
7313
7314rtx
9c808aad
AJ
7315s390_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
7316 int named ATTRIBUTE_UNUSED)
9db1d521 7317{
82b1c974 7318 if (s390_function_arg_float (mode, type))
9db1d521
HP
7319 {
7320 if (cum->fprs + 1 > (TARGET_64BIT? 4 : 2))
7321 return 0;
7322 else
f1c25d3b 7323 return gen_rtx_REG (mode, cum->fprs + 16);
9db1d521 7324 }
8c17530e 7325 else if (s390_function_arg_integer (mode, type))
9db1d521
HP
7326 {
7327 int size = s390_function_arg_size (mode, type);
7328 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
7329
7330 if (cum->gprs + n_gprs > 5)
7331 return 0;
7332 else
f1c25d3b 7333 return gen_rtx_REG (mode, cum->gprs + 2);
9db1d521 7334 }
8c17530e
UW
7335
7336 /* After the real arguments, expand_call calls us once again
7337 with a void_type_node type. Whatever we return here is
7338 passed as operand 2 to the call expanders.
7339
7340 We don't need this feature ... */
7341 else if (type == void_type_node)
7342 return const0_rtx;
7343
7344 abort ();
7345}
7346
7347/* Return true if return values of type TYPE should be returned
7348 in a memory buffer whose address is passed by the caller as
7349 hidden first argument. */
7350
7351static bool
7352s390_return_in_memory (tree type, tree fundecl ATTRIBUTE_UNUSED)
7353{
7354 /* We accept small integral (and similar) types. */
7355 if (INTEGRAL_TYPE_P (type)
38899e29 7356 || POINTER_TYPE_P (type)
8c17530e
UW
7357 || TREE_CODE (type) == OFFSET_TYPE
7358 || TREE_CODE (type) == REAL_TYPE)
7359 return int_size_in_bytes (type) > 8;
7360
7361 /* Aggregates and similar constructs are always returned
7362 in memory. */
7363 if (AGGREGATE_TYPE_P (type)
7364 || TREE_CODE (type) == COMPLEX_TYPE
7365 || TREE_CODE (type) == VECTOR_TYPE)
7366 return true;
7367
7368 /* ??? We get called on all sorts of random stuff from
7369 aggregate_value_p. We can't abort, but it's not clear
7370 what's safe to return. Pretend it's a struct I guess. */
7371 return true;
7372}
7373
7374/* Define where to return a (scalar) value of type TYPE.
7375 If TYPE is null, define where to return a (scalar)
7376 value of mode MODE from a libcall. */
7377
7378rtx
7379s390_function_value (tree type, enum machine_mode mode)
7380{
7381 if (type)
7382 {
8df83eae 7383 int unsignedp = TYPE_UNSIGNED (type);
8c17530e
UW
7384 mode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
7385 }
7386
38899e29 7387 if (GET_MODE_CLASS (mode) != MODE_INT
8c17530e
UW
7388 && GET_MODE_CLASS (mode) != MODE_FLOAT)
7389 abort ();
7390 if (GET_MODE_SIZE (mode) > 8)
7391 abort ();
7392
7393 if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
7394 return gen_rtx_REG (mode, 16);
7395 else
7396 return gen_rtx_REG (mode, 2);
9db1d521
HP
7397}
7398
7399
994fe660
UW
7400/* Create and return the va_list datatype.
7401
7402 On S/390, va_list is an array type equivalent to
7403
7404 typedef struct __va_list_tag
7405 {
7406 long __gpr;
7407 long __fpr;
7408 void *__overflow_arg_area;
7409 void *__reg_save_area;
994fe660
UW
7410 } va_list[1];
7411
7412 where __gpr and __fpr hold the number of general purpose
7413 or floating point arguments used up to now, respectively,
c7453384 7414 __overflow_arg_area points to the stack location of the
994fe660
UW
7415 next argument passed on the stack, and __reg_save_area
7416 always points to the start of the register area in the
7417 call frame of the current function. The function prologue
7418 saves all registers used for argument passing into this
7419 area if the function uses variable arguments. */
9db1d521 7420
c35d187f
RH
7421static tree
7422s390_build_builtin_va_list (void)
9db1d521
HP
7423{
7424 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
7425
47798692 7426 record = lang_hooks.types.make_type (RECORD_TYPE);
9db1d521
HP
7427
7428 type_decl =
7429 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
7430
c7453384 7431 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
9db1d521 7432 long_integer_type_node);
c7453384 7433 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
9db1d521
HP
7434 long_integer_type_node);
7435 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
7436 ptr_type_node);
7437 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
7438 ptr_type_node);
7439
7440 DECL_FIELD_CONTEXT (f_gpr) = record;
7441 DECL_FIELD_CONTEXT (f_fpr) = record;
7442 DECL_FIELD_CONTEXT (f_ovf) = record;
7443 DECL_FIELD_CONTEXT (f_sav) = record;
7444
7445 TREE_CHAIN (record) = type_decl;
7446 TYPE_NAME (record) = type_decl;
7447 TYPE_FIELDS (record) = f_gpr;
7448 TREE_CHAIN (f_gpr) = f_fpr;
7449 TREE_CHAIN (f_fpr) = f_ovf;
7450 TREE_CHAIN (f_ovf) = f_sav;
7451
7452 layout_type (record);
7453
7454 /* The correct type is an array type of one element. */
7455 return build_array_type (record, build_index_type (size_zero_node));
7456}
7457
994fe660 7458/* Implement va_start by filling the va_list structure VALIST.
6c535c69
ZW
7459 STDARG_P is always true, and ignored.
7460 NEXTARG points to the first anonymous stack argument.
994fe660 7461
f710504c 7462 The following global variables are used to initialize
994fe660
UW
7463 the va_list structure:
7464
7465 current_function_args_info:
7466 holds number of gprs and fprs used for named arguments.
7467 current_function_arg_offset_rtx:
7468 holds the offset of the first anonymous stack argument
7469 (relative to the virtual arg pointer). */
9db1d521
HP
7470
7471void
9c808aad 7472s390_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
9db1d521
HP
7473{
7474 HOST_WIDE_INT n_gpr, n_fpr;
7475 int off;
7476 tree f_gpr, f_fpr, f_ovf, f_sav;
7477 tree gpr, fpr, ovf, sav, t;
7478
7479 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
7480 f_fpr = TREE_CHAIN (f_gpr);
7481 f_ovf = TREE_CHAIN (f_fpr);
7482 f_sav = TREE_CHAIN (f_ovf);
7483
967af719 7484 valist = build_va_arg_indirect_ref (valist);
44de5aeb
RK
7485 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
7486 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
7487 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
7488 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
9db1d521
HP
7489
7490 /* Count number of gp and fp argument registers used. */
7491
7492 n_gpr = current_function_args_info.gprs;
7493 n_fpr = current_function_args_info.fprs;
7494
4a90aeeb 7495 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
7d60be94 7496 build_int_cst (NULL_TREE, n_gpr));
9db1d521
HP
7497 TREE_SIDE_EFFECTS (t) = 1;
7498 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7499
4a90aeeb 7500 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
7d60be94 7501 build_int_cst (NULL_TREE, n_fpr));
9db1d521
HP
7502 TREE_SIDE_EFFECTS (t) = 1;
7503 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7504
7505 /* Find the overflow area. */
7506 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
7507
7508 off = INTVAL (current_function_arg_offset_rtx);
7509 off = off < 0 ? 0 : off;
9db1d521
HP
7510 if (TARGET_DEBUG_ARG)
7511 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
7ee20eba 7512 (int)n_gpr, (int)n_fpr, off);
9db1d521 7513
7d60be94 7514 t = build (PLUS_EXPR, TREE_TYPE (ovf), t, build_int_cst (NULL_TREE, off));
9db1d521
HP
7515
7516 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
7517 TREE_SIDE_EFFECTS (t) = 1;
7518 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7519
7520 /* Find the register save area. */
adf39f8f
AK
7521 t = make_tree (TREE_TYPE (sav), return_address_pointer_rtx);
7522 if (TARGET_KERNEL_BACKCHAIN)
7523 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
4a90aeeb
NS
7524 build_int_cst (NULL_TREE,
7525 -(RETURN_REGNUM - 2) * UNITS_PER_WORD
7d60be94 7526 - (TARGET_64BIT ? 4 : 2) * 8));
adf39f8f
AK
7527 else
7528 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
7d60be94 7529 build_int_cst (NULL_TREE, -RETURN_REGNUM * UNITS_PER_WORD));
adf39f8f 7530
9db1d521
HP
7531 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
7532 TREE_SIDE_EFFECTS (t) = 1;
7533 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7534}
7535
c7453384 7536/* Implement va_arg by updating the va_list structure
994fe660 7537 VALIST as required to retrieve an argument of type
c7453384
EC
7538 TYPE, and returning that argument.
7539
994fe660 7540 Generates code equivalent to:
c7453384 7541
9db1d521
HP
7542 if (integral value) {
7543 if (size <= 4 && args.gpr < 5 ||
c7453384 7544 size > 4 && args.gpr < 4 )
9db1d521
HP
7545 ret = args.reg_save_area[args.gpr+8]
7546 else
7547 ret = *args.overflow_arg_area++;
7548 } else if (float value) {
7549 if (args.fgpr < 2)
7550 ret = args.reg_save_area[args.fpr+64]
7551 else
7552 ret = *args.overflow_arg_area++;
7553 } else if (aggregate value) {
7554 if (args.gpr < 5)
7555 ret = *args.reg_save_area[args.gpr]
7556 else
7557 ret = **args.overflow_arg_area++;
7558 } */
7559
63694b5e
UW
7560tree
7561s390_gimplify_va_arg (tree valist, tree type, tree *pre_p,
7562 tree *post_p ATTRIBUTE_UNUSED)
9db1d521
HP
7563{
7564 tree f_gpr, f_fpr, f_ovf, f_sav;
7565 tree gpr, fpr, ovf, sav, reg, t, u;
7566 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
63694b5e 7567 tree lab_false, lab_over, addr;
9db1d521
HP
7568
7569 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
7570 f_fpr = TREE_CHAIN (f_gpr);
7571 f_ovf = TREE_CHAIN (f_fpr);
7572 f_sav = TREE_CHAIN (f_ovf);
7573
967af719 7574 valist = build_va_arg_indirect_ref (valist);
44de5aeb
RK
7575 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
7576 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
7577 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
7578 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
9db1d521
HP
7579
7580 size = int_size_in_bytes (type);
7581
8cd5a4e0 7582 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
9db1d521
HP
7583 {
7584 if (TARGET_DEBUG_ARG)
7585 {
7586 fprintf (stderr, "va_arg: aggregate type");
7587 debug_tree (type);
7588 }
7589
7590 /* Aggregates are passed by reference. */
7591 indirect_p = 1;
7592 reg = gpr;
7593 n_reg = 1;
ea506297
AK
7594
7595 /* TARGET_KERNEL_BACKCHAIN on 31 bit: It is assumed here that no padding
7596 will be added by s390_frame_info because for va_args always an even
7597 number of gprs has to be saved r15-r2 = 14 regs. */
adf39f8f
AK
7598 sav_ofs = (TARGET_KERNEL_BACKCHAIN
7599 ? (TARGET_64BIT ? 4 : 2) * 8 : 2 * UNITS_PER_WORD);
9db1d521
HP
7600 sav_scale = UNITS_PER_WORD;
7601 size = UNITS_PER_WORD;
7602 max_reg = 4;
7603 }
82b1c974 7604 else if (s390_function_arg_float (TYPE_MODE (type), type))
9db1d521
HP
7605 {
7606 if (TARGET_DEBUG_ARG)
7607 {
7608 fprintf (stderr, "va_arg: float type");
7609 debug_tree (type);
7610 }
7611
7612 /* FP args go in FP registers, if present. */
7613 indirect_p = 0;
7614 reg = fpr;
7615 n_reg = 1;
adf39f8f 7616 sav_ofs = TARGET_KERNEL_BACKCHAIN ? 0 : 16 * UNITS_PER_WORD;
9db1d521
HP
7617 sav_scale = 8;
7618 /* TARGET_64BIT has up to 4 parameter in fprs */
7619 max_reg = TARGET_64BIT ? 3 : 1;
7620 }
7621 else
7622 {
7623 if (TARGET_DEBUG_ARG)
7624 {
7625 fprintf (stderr, "va_arg: other type");
7626 debug_tree (type);
7627 }
7628
7629 /* Otherwise into GP registers. */
7630 indirect_p = 0;
7631 reg = gpr;
7632 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
ea506297
AK
7633
7634 /* TARGET_KERNEL_BACKCHAIN on 31 bit: It is assumed here that no padding
7635 will be added by s390_frame_info because for va_args always an even
7636 number of gprs has to be saved r15-r2 = 14 regs. */
adf39f8f
AK
7637 sav_ofs = TARGET_KERNEL_BACKCHAIN ?
7638 (TARGET_64BIT ? 4 : 2) * 8 : 2*UNITS_PER_WORD;
c7453384 7639
c873e11f
UW
7640 if (size < UNITS_PER_WORD)
7641 sav_ofs += UNITS_PER_WORD - size;
9db1d521
HP
7642
7643 sav_scale = UNITS_PER_WORD;
7644 if (n_reg > 1)
7645 max_reg = 3;
7646 else
7647 max_reg = 4;
7648 }
7649
7650 /* Pull the value out of the saved registers ... */
7651
63694b5e
UW
7652 lab_false = create_artificial_label ();
7653 lab_over = create_artificial_label ();
7654 addr = create_tmp_var (ptr_type_node, "addr");
9db1d521 7655
6c6dd4bd 7656 t = fold_convert (TREE_TYPE (reg), size_int (max_reg));
63694b5e
UW
7657 t = build2 (GT_EXPR, boolean_type_node, reg, t);
7658 u = build1 (GOTO_EXPR, void_type_node, lab_false);
7659 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
7660 gimplify_and_add (t, pre_p);
9db1d521 7661
6c6dd4bd
UW
7662 t = build2 (PLUS_EXPR, ptr_type_node, sav,
7663 fold_convert (ptr_type_node, size_int (sav_ofs)));
7664 u = build2 (MULT_EXPR, TREE_TYPE (reg), reg,
7665 fold_convert (TREE_TYPE (reg), size_int (sav_scale)));
7666 t = build2 (PLUS_EXPR, ptr_type_node, t, fold_convert (ptr_type_node, u));
9db1d521 7667
63694b5e
UW
7668 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
7669 gimplify_and_add (t, pre_p);
9db1d521 7670
63694b5e
UW
7671 t = build1 (GOTO_EXPR, void_type_node, lab_over);
7672 gimplify_and_add (t, pre_p);
9db1d521 7673
63694b5e
UW
7674 t = build1 (LABEL_EXPR, void_type_node, lab_false);
7675 append_to_statement_list (t, pre_p);
9db1d521 7676
9db1d521
HP
7677
7678 /* ... Otherwise out of the overflow area. */
7679
63694b5e 7680 t = ovf;
9db1d521 7681 if (size < UNITS_PER_WORD)
6c6dd4bd
UW
7682 t = build2 (PLUS_EXPR, ptr_type_node, t,
7683 fold_convert (ptr_type_node, size_int (UNITS_PER_WORD - size)));
9db1d521 7684
63694b5e 7685 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
9db1d521 7686
63694b5e
UW
7687 u = build2 (MODIFY_EXPR, void_type_node, addr, t);
7688 gimplify_and_add (u, pre_p);
9db1d521 7689
6c6dd4bd
UW
7690 t = build2 (PLUS_EXPR, ptr_type_node, t,
7691 fold_convert (ptr_type_node, size_int (size)));
7692 t = build2 (MODIFY_EXPR, ptr_type_node, ovf, t);
63694b5e 7693 gimplify_and_add (t, pre_p);
9db1d521 7694
63694b5e
UW
7695 t = build1 (LABEL_EXPR, void_type_node, lab_over);
7696 append_to_statement_list (t, pre_p);
9db1d521 7697
9db1d521 7698
63694b5e
UW
7699 /* Increment register save count. */
7700
7701 u = build2 (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
6c6dd4bd 7702 fold_convert (TREE_TYPE (reg), size_int (n_reg)));
63694b5e 7703 gimplify_and_add (u, pre_p);
9db1d521
HP
7704
7705 if (indirect_p)
7706 {
63694b5e
UW
7707 t = build_pointer_type (build_pointer_type (type));
7708 addr = fold_convert (t, addr);
967af719 7709 addr = build_va_arg_indirect_ref (addr);
63694b5e
UW
7710 }
7711 else
7712 {
7713 t = build_pointer_type (type);
7714 addr = fold_convert (t, addr);
9db1d521
HP
7715 }
7716
967af719 7717 return build_va_arg_indirect_ref (addr);
9db1d521
HP
7718}
7719
994fe660 7720
fd3cd001
UW
7721/* Builtins. */
7722
7723enum s390_builtin
7724{
7725 S390_BUILTIN_THREAD_POINTER,
7726 S390_BUILTIN_SET_THREAD_POINTER,
7727
7728 S390_BUILTIN_max
7729};
7730
7731static unsigned int const code_for_builtin_64[S390_BUILTIN_max] = {
7732 CODE_FOR_get_tp_64,
7733 CODE_FOR_set_tp_64
7734};
7735
7736static unsigned int const code_for_builtin_31[S390_BUILTIN_max] = {
7737 CODE_FOR_get_tp_31,
7738 CODE_FOR_set_tp_31
7739};
7740
7741static void
9c808aad 7742s390_init_builtins (void)
fd3cd001
UW
7743{
7744 tree ftype;
7745
7746 ftype = build_function_type (ptr_type_node, void_list_node);
6e34d3a3
JM
7747 lang_hooks.builtin_function ("__builtin_thread_pointer", ftype,
7748 S390_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
7749 NULL, NULL_TREE);
fd3cd001
UW
7750
7751 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
6e34d3a3
JM
7752 lang_hooks.builtin_function ("__builtin_set_thread_pointer", ftype,
7753 S390_BUILTIN_SET_THREAD_POINTER, BUILT_IN_MD,
7754 NULL, NULL_TREE);
fd3cd001
UW
7755}
7756
7757/* Expand an expression EXP that calls a built-in function,
7758 with result going to TARGET if that's convenient
7759 (and in mode MODE if that's convenient).
7760 SUBTARGET may be used as the target for computing one of EXP's operands.
7761 IGNORE is nonzero if the value is to be ignored. */
7762
7763static rtx
9c808aad
AJ
7764s390_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
7765 enum machine_mode mode ATTRIBUTE_UNUSED,
7766 int ignore ATTRIBUTE_UNUSED)
fd3cd001
UW
7767{
7768#define MAX_ARGS 2
7769
c7453384 7770 unsigned int const *code_for_builtin =
fd3cd001
UW
7771 TARGET_64BIT ? code_for_builtin_64 : code_for_builtin_31;
7772
7773 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7774 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7775 tree arglist = TREE_OPERAND (exp, 1);
7776 enum insn_code icode;
7777 rtx op[MAX_ARGS], pat;
7778 int arity;
7779 bool nonvoid;
7780
7781 if (fcode >= S390_BUILTIN_max)
7782 internal_error ("bad builtin fcode");
7783 icode = code_for_builtin[fcode];
7784 if (icode == 0)
7785 internal_error ("bad builtin fcode");
7786
7787 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
7788
7789 for (arglist = TREE_OPERAND (exp, 1), arity = 0;
7790 arglist;
7791 arglist = TREE_CHAIN (arglist), arity++)
7792 {
7793 const struct insn_operand_data *insn_op;
7794
7795 tree arg = TREE_VALUE (arglist);
7796 if (arg == error_mark_node)
7797 return NULL_RTX;
7798 if (arity > MAX_ARGS)
7799 return NULL_RTX;
7800
7801 insn_op = &insn_data[icode].operand[arity + nonvoid];
7802
7803 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, 0);
7804
7805 if (!(*insn_op->predicate) (op[arity], insn_op->mode))
7806 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
7807 }
7808
7809 if (nonvoid)
7810 {
7811 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7812 if (!target
7813 || GET_MODE (target) != tmode
7814 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
7815 target = gen_reg_rtx (tmode);
7816 }
7817
7818 switch (arity)
7819 {
7820 case 0:
7821 pat = GEN_FCN (icode) (target);
7822 break;
7823 case 1:
7824 if (nonvoid)
7825 pat = GEN_FCN (icode) (target, op[0]);
7826 else
7827 pat = GEN_FCN (icode) (op[0]);
7828 break;
7829 case 2:
7830 pat = GEN_FCN (icode) (target, op[0], op[1]);
7831 break;
7832 default:
7833 abort ();
7834 }
7835 if (!pat)
7836 return NULL_RTX;
7837 emit_insn (pat);
7838
7839 if (nonvoid)
7840 return target;
7841 else
7842 return const0_rtx;
7843}
7844
7845
994fe660
UW
7846/* Output assembly code for the trampoline template to
7847 stdio stream FILE.
7848
7849 On S/390, we use gpr 1 internally in the trampoline code;
7850 gpr 0 is used to hold the static chain. */
9db1d521
HP
7851
7852void
9c808aad 7853s390_trampoline_template (FILE *file)
9db1d521 7854{
cadc42db
UW
7855 rtx op[2];
7856 op[0] = gen_rtx_REG (Pmode, 0);
7857 op[1] = gen_rtx_REG (Pmode, 1);
7858
9db1d521
HP
7859 if (TARGET_64BIT)
7860 {
cadc42db
UW
7861 output_asm_insn ("basr\t%1,0", op);
7862 output_asm_insn ("lmg\t%0,%1,14(%1)", op);
7863 output_asm_insn ("br\t%1", op);
7864 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 10));
9db1d521
HP
7865 }
7866 else
7867 {
cadc42db
UW
7868 output_asm_insn ("basr\t%1,0", op);
7869 output_asm_insn ("lm\t%0,%1,6(%1)", op);
7870 output_asm_insn ("br\t%1", op);
7871 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 8));
9db1d521
HP
7872 }
7873}
7874
994fe660
UW
7875/* Emit RTL insns to initialize the variable parts of a trampoline.
7876 FNADDR is an RTX for the address of the function's pure code.
7877 CXT is an RTX for the static chain value for the function. */
7878
9db1d521 7879void
9c808aad 7880s390_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
9db1d521 7881{
a322288b 7882 emit_move_insn (gen_rtx_MEM (Pmode,
c7453384 7883 memory_address (Pmode,
cadc42db 7884 plus_constant (addr, (TARGET_64BIT ? 16 : 8)))), cxt);
a322288b 7885 emit_move_insn (gen_rtx_MEM (Pmode,
c7453384 7886 memory_address (Pmode,
cadc42db 7887 plus_constant (addr, (TARGET_64BIT ? 24 : 12)))), fnaddr);
9db1d521 7888}
4023fb28
UW
7889
7890/* Return rtx for 64-bit constant formed from the 32-bit subwords
7891 LOW and HIGH, independent of the host word size. */
7892
7893rtx
9c808aad 7894s390_gen_rtx_const_DI (int high, int low)
4023fb28
UW
7895{
7896#if HOST_BITS_PER_WIDE_INT >= 64
7897 HOST_WIDE_INT val;
7898 val = (HOST_WIDE_INT)high;
7899 val <<= 32;
7900 val |= (HOST_WIDE_INT)low;
c7453384 7901
4023fb28
UW
7902 return GEN_INT (val);
7903#else
7904#if HOST_BITS_PER_WIDE_INT >= 32
7905 return immed_double_const ((HOST_WIDE_INT)low, (HOST_WIDE_INT)high, DImode);
7906#else
7907 abort ();
7908#endif
7909#endif
c7453384 7910}
4023fb28
UW
7911
7912/* Output assembler code to FILE to increment profiler label # LABELNO
7913 for profiling a function entry. */
7914
7915void
9c808aad 7916s390_function_profiler (FILE *file, int labelno)
4023fb28
UW
7917{
7918 rtx op[7];
7919
7920 char label[128];
63a1ff86 7921 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
4023fb28
UW
7922
7923 fprintf (file, "# function profiler \n");
7924
7925 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
7926 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
7927 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
7928
7929 op[2] = gen_rtx_REG (Pmode, 1);
7930 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
114278e7 7931 SYMBOL_REF_FLAGS (op[3]) = SYMBOL_FLAG_LOCAL;
4023fb28
UW
7932
7933 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
7934 if (flag_pic)
7935 {
fd7643fb 7936 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), UNSPEC_PLT);
4023fb28
UW
7937 op[4] = gen_rtx_CONST (Pmode, op[4]);
7938 }
7939
7940 if (TARGET_64BIT)
7941 {
7942 output_asm_insn ("stg\t%0,%1", op);
7943 output_asm_insn ("larl\t%2,%3", op);
7944 output_asm_insn ("brasl\t%0,%4", op);
7945 output_asm_insn ("lg\t%0,%1", op);
7946 }
7947 else if (!flag_pic)
7948 {
7949 op[6] = gen_label_rtx ();
7950
7951 output_asm_insn ("st\t%0,%1", op);
7952 output_asm_insn ("bras\t%2,%l6", op);
4023fb28 7953 output_asm_insn (".long\t%4", op);
14b3e8ef 7954 output_asm_insn (".long\t%3", op);
47798692 7955 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
4023fb28
UW
7956 output_asm_insn ("l\t%0,0(%2)", op);
7957 output_asm_insn ("l\t%2,4(%2)", op);
7958 output_asm_insn ("basr\t%0,%0", op);
7959 output_asm_insn ("l\t%0,%1", op);
7960 }
7961 else
7962 {
7963 op[5] = gen_label_rtx ();
7964 op[6] = gen_label_rtx ();
7965
7966 output_asm_insn ("st\t%0,%1", op);
7967 output_asm_insn ("bras\t%2,%l6", op);
47798692 7968 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[5]));
4023fb28 7969 output_asm_insn (".long\t%4-%l5", op);
14b3e8ef 7970 output_asm_insn (".long\t%3-%l5", op);
47798692 7971 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
4023fb28
UW
7972 output_asm_insn ("lr\t%0,%2", op);
7973 output_asm_insn ("a\t%0,0(%2)", op);
7974 output_asm_insn ("a\t%2,4(%2)", op);
7975 output_asm_insn ("basr\t%0,%0", op);
7976 output_asm_insn ("l\t%0,%1", op);
7977 }
7978}
7979
fd3cd001 7980/* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
114278e7 7981 into its SYMBOL_REF_FLAGS. */
fb49053f
RH
7982
7983static void
9c808aad 7984s390_encode_section_info (tree decl, rtx rtl, int first)
fb49053f 7985{
c6a2438a 7986 default_encode_section_info (decl, rtl, first);
e23795ea 7987
114278e7
RH
7988 /* If a variable has a forced alignment to < 2 bytes, mark it with
7989 SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL operand. */
c7453384 7990 if (TREE_CODE (decl) == VAR_DECL
114278e7 7991 && DECL_USER_ALIGN (decl) && DECL_ALIGN (decl) < 16)
c6a2438a 7992 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_ALIGN1;
fd3cd001
UW
7993}
7994
3062825f 7995/* Output thunk to FILE that implements a C++ virtual function call (with
c7453384 7996 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
3062825f
UW
7997 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
7998 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
7999 relative to the resulting this pointer. */
8000
c590b625 8001static void
9c808aad
AJ
8002s390_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
8003 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
8004 tree function)
483ab821 8005{
89ce1c8f
JJ
8006 rtx op[10];
8007 int nonlocal = 0;
3062825f
UW
8008
8009 /* Operand 0 is the target function. */
8010 op[0] = XEXP (DECL_RTL (function), 0);
114278e7 8011 if (flag_pic && !SYMBOL_REF_LOCAL_P (op[0]))
3062825f 8012 {
89ce1c8f
JJ
8013 nonlocal = 1;
8014 op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]),
fd7643fb 8015 TARGET_64BIT ? UNSPEC_PLT : UNSPEC_GOT);
3062825f
UW
8016 op[0] = gen_rtx_CONST (Pmode, op[0]);
8017 }
8018
8019 /* Operand 1 is the 'this' pointer. */
61f71b34 8020 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
3062825f
UW
8021 op[1] = gen_rtx_REG (Pmode, 3);
8022 else
8023 op[1] = gen_rtx_REG (Pmode, 2);
8024
8025 /* Operand 2 is the delta. */
8026 op[2] = GEN_INT (delta);
8027
8028 /* Operand 3 is the vcall_offset. */
8029 op[3] = GEN_INT (vcall_offset);
8030
8031 /* Operand 4 is the temporary register. */
8032 op[4] = gen_rtx_REG (Pmode, 1);
8033
8034 /* Operands 5 to 8 can be used as labels. */
8035 op[5] = NULL_RTX;
8036 op[6] = NULL_RTX;
8037 op[7] = NULL_RTX;
8038 op[8] = NULL_RTX;
8039
89ce1c8f
JJ
8040 /* Operand 9 can be used for temporary register. */
8041 op[9] = NULL_RTX;
8042
3062825f
UW
8043 /* Generate code. */
8044 if (TARGET_64BIT)
8045 {
8046 /* Setup literal pool pointer if required. */
c7453384 8047 if ((!DISP_IN_RANGE (delta)
f19a9af7 8048 && !CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
c7453384 8049 || (!DISP_IN_RANGE (vcall_offset)
f19a9af7 8050 && !CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K")))
3062825f
UW
8051 {
8052 op[5] = gen_label_rtx ();
8053 output_asm_insn ("larl\t%4,%5", op);
8054 }
8055
8056 /* Add DELTA to this pointer. */
8057 if (delta)
8058 {
f19a9af7 8059 if (CONST_OK_FOR_CONSTRAINT_P (delta, 'J', "J"))
3062825f 8060 output_asm_insn ("la\t%1,%2(%1)", op);
d3632d41
UW
8061 else if (DISP_IN_RANGE (delta))
8062 output_asm_insn ("lay\t%1,%2(%1)", op);
f19a9af7 8063 else if (CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
3062825f
UW
8064 output_asm_insn ("aghi\t%1,%2", op);
8065 else
8066 {
8067 op[6] = gen_label_rtx ();
8068 output_asm_insn ("agf\t%1,%6-%5(%4)", op);
8069 }
8070 }
8071
8072 /* Perform vcall adjustment. */
8073 if (vcall_offset)
8074 {
d3632d41 8075 if (DISP_IN_RANGE (vcall_offset))
3062825f
UW
8076 {
8077 output_asm_insn ("lg\t%4,0(%1)", op);
8078 output_asm_insn ("ag\t%1,%3(%4)", op);
8079 }
f19a9af7 8080 else if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K"))
3062825f
UW
8081 {
8082 output_asm_insn ("lghi\t%4,%3", op);
8083 output_asm_insn ("ag\t%4,0(%1)", op);
8084 output_asm_insn ("ag\t%1,0(%4)", op);
8085 }
8086 else
8087 {
8088 op[7] = gen_label_rtx ();
8089 output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
8090 output_asm_insn ("ag\t%4,0(%1)", op);
8091 output_asm_insn ("ag\t%1,0(%4)", op);
8092 }
8093 }
c7453384 8094
3062825f
UW
8095 /* Jump to target. */
8096 output_asm_insn ("jg\t%0", op);
8097
8098 /* Output literal pool if required. */
8099 if (op[5])
8100 {
8101 output_asm_insn (".align\t4", op);
47798692
UW
8102 targetm.asm_out.internal_label (file, "L",
8103 CODE_LABEL_NUMBER (op[5]));
3062825f
UW
8104 }
8105 if (op[6])
8106 {
47798692
UW
8107 targetm.asm_out.internal_label (file, "L",
8108 CODE_LABEL_NUMBER (op[6]));
3062825f
UW
8109 output_asm_insn (".long\t%2", op);
8110 }
8111 if (op[7])
8112 {
47798692
UW
8113 targetm.asm_out.internal_label (file, "L",
8114 CODE_LABEL_NUMBER (op[7]));
3062825f
UW
8115 output_asm_insn (".long\t%3", op);
8116 }
8117 }
8118 else
8119 {
8120 /* Setup base pointer if required. */
8121 if (!vcall_offset
d3632d41 8122 || (!DISP_IN_RANGE (delta)
f19a9af7 8123 && !CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
d3632d41 8124 || (!DISP_IN_RANGE (delta)
f19a9af7 8125 && !CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K")))
3062825f
UW
8126 {
8127 op[5] = gen_label_rtx ();
8128 output_asm_insn ("basr\t%4,0", op);
47798692
UW
8129 targetm.asm_out.internal_label (file, "L",
8130 CODE_LABEL_NUMBER (op[5]));
3062825f
UW
8131 }
8132
8133 /* Add DELTA to this pointer. */
8134 if (delta)
8135 {
f19a9af7 8136 if (CONST_OK_FOR_CONSTRAINT_P (delta, 'J', "J"))
3062825f 8137 output_asm_insn ("la\t%1,%2(%1)", op);
d3632d41
UW
8138 else if (DISP_IN_RANGE (delta))
8139 output_asm_insn ("lay\t%1,%2(%1)", op);
f19a9af7 8140 else if (CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
3062825f
UW
8141 output_asm_insn ("ahi\t%1,%2", op);
8142 else
8143 {
8144 op[6] = gen_label_rtx ();
8145 output_asm_insn ("a\t%1,%6-%5(%4)", op);
8146 }
8147 }
8148
8149 /* Perform vcall adjustment. */
8150 if (vcall_offset)
8151 {
f19a9af7 8152 if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'J', "J"))
3062825f
UW
8153 {
8154 output_asm_insn ("lg\t%4,0(%1)", op);
8155 output_asm_insn ("a\t%1,%3(%4)", op);
8156 }
d3632d41
UW
8157 else if (DISP_IN_RANGE (vcall_offset))
8158 {
8159 output_asm_insn ("lg\t%4,0(%1)", op);
8160 output_asm_insn ("ay\t%1,%3(%4)", op);
8161 }
f19a9af7 8162 else if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K"))
3062825f
UW
8163 {
8164 output_asm_insn ("lhi\t%4,%3", op);
8165 output_asm_insn ("a\t%4,0(%1)", op);
8166 output_asm_insn ("a\t%1,0(%4)", op);
8167 }
8168 else
8169 {
8170 op[7] = gen_label_rtx ();
8171 output_asm_insn ("l\t%4,%7-%5(%4)", op);
8172 output_asm_insn ("a\t%4,0(%1)", op);
8173 output_asm_insn ("a\t%1,0(%4)", op);
8174 }
8175
8176 /* We had to clobber the base pointer register.
8177 Re-setup the base pointer (with a different base). */
8178 op[5] = gen_label_rtx ();
8179 output_asm_insn ("basr\t%4,0", op);
47798692
UW
8180 targetm.asm_out.internal_label (file, "L",
8181 CODE_LABEL_NUMBER (op[5]));
3062825f
UW
8182 }
8183
8184 /* Jump to target. */
8185 op[8] = gen_label_rtx ();
89ce1c8f 8186
3062825f
UW
8187 if (!flag_pic)
8188 output_asm_insn ("l\t%4,%8-%5(%4)", op);
89ce1c8f 8189 else if (!nonlocal)
3062825f 8190 output_asm_insn ("a\t%4,%8-%5(%4)", op);
89ce1c8f
JJ
8191 /* We cannot call through .plt, since .plt requires %r12 loaded. */
8192 else if (flag_pic == 1)
8193 {
8194 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8195 output_asm_insn ("l\t%4,%0(%4)", op);
8196 }
8197 else if (flag_pic == 2)
8198 {
8199 op[9] = gen_rtx_REG (Pmode, 0);
8200 output_asm_insn ("l\t%9,%8-4-%5(%4)", op);
8201 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8202 output_asm_insn ("ar\t%4,%9", op);
8203 output_asm_insn ("l\t%4,0(%4)", op);
8204 }
8205
3062825f
UW
8206 output_asm_insn ("br\t%4", op);
8207
8208 /* Output literal pool. */
8209 output_asm_insn (".align\t4", op);
89ce1c8f
JJ
8210
8211 if (nonlocal && flag_pic == 2)
8212 output_asm_insn (".long\t%0", op);
8213 if (nonlocal)
8214 {
8215 op[0] = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
8216 SYMBOL_REF_FLAGS (op[0]) = SYMBOL_FLAG_LOCAL;
8217 }
8218
47798692 8219 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[8]));
3062825f
UW
8220 if (!flag_pic)
8221 output_asm_insn (".long\t%0", op);
8222 else
8223 output_asm_insn (".long\t%0-%5", op);
8224
8225 if (op[6])
8226 {
47798692
UW
8227 targetm.asm_out.internal_label (file, "L",
8228 CODE_LABEL_NUMBER (op[6]));
3062825f
UW
8229 output_asm_insn (".long\t%2", op);
8230 }
8231 if (op[7])
8232 {
47798692
UW
8233 targetm.asm_out.internal_label (file, "L",
8234 CODE_LABEL_NUMBER (op[7]));
3062825f
UW
8235 output_asm_insn (".long\t%3", op);
8236 }
8237 }
483ab821 8238}
3062825f 8239
c7453384 8240bool
9c808aad 8241s390_valid_pointer_mode (enum machine_mode mode)
c7453384
EC
8242{
8243 return (mode == SImode || (TARGET_64BIT && mode == DImode));
8244}
8245
29742ba4
HP
8246/* How to allocate a 'struct machine_function'. */
8247
8248static struct machine_function *
9c808aad 8249s390_init_machine_status (void)
29742ba4
HP
8250{
8251 return ggc_alloc_cleared (sizeof (struct machine_function));
8252}
8253
ed9676cf
AK
8254/* Checks whether the given ARGUMENT_LIST would use a caller
8255 saved register. This is used to decide whether sibling call
8256 optimization could be performed on the respective function
8257 call. */
8258
8259static bool
8260s390_call_saved_register_used (tree argument_list)
8261{
8262 CUMULATIVE_ARGS cum;
8263 tree parameter;
8264 enum machine_mode mode;
8265 tree type;
8266 rtx parm_rtx;
8267 int reg;
8268
8269 INIT_CUMULATIVE_ARGS (cum, NULL, NULL, 0, 0);
8270
8271 while (argument_list)
8272 {
8273 parameter = TREE_VALUE (argument_list);
8274 argument_list = TREE_CHAIN (argument_list);
8275
8276 if (!parameter)
8277 abort();
8278
8279 /* For an undeclared variable passed as parameter we will get
8280 an ERROR_MARK node here. */
8281 if (TREE_CODE (parameter) == ERROR_MARK)
8282 return true;
8283
8284 if (! (type = TREE_TYPE (parameter)))
8285 abort();
38899e29 8286
ed9676cf
AK
8287 if (! (mode = TYPE_MODE (TREE_TYPE (parameter))))
8288 abort();
8289
8cd5a4e0 8290 if (pass_by_reference (&cum, mode, type, true))
ed9676cf
AK
8291 {
8292 mode = Pmode;
8293 type = build_pointer_type (type);
8294 }
38899e29 8295
ed9676cf
AK
8296 parm_rtx = s390_function_arg (&cum, mode, type, 0);
8297
8298 s390_function_arg_advance (&cum, mode, type, 0);
38899e29 8299
ed9676cf
AK
8300 if (parm_rtx && REG_P (parm_rtx))
8301 {
8302 for (reg = 0;
38899e29 8303 reg < HARD_REGNO_NREGS (REGNO (parm_rtx), GET_MODE (parm_rtx));
ed9676cf
AK
8304 reg++)
8305 if (! call_used_regs[reg + REGNO (parm_rtx)])
8306 return true;
8307 }
8308 }
8309 return false;
8310}
8311
38899e29
EC
8312/* Return true if the given call expression can be
8313 turned into a sibling call.
ed9676cf
AK
8314 DECL holds the declaration of the function to be called whereas
8315 EXP is the call expression itself. */
38899e29 8316
ed9676cf
AK
8317static bool
8318s390_function_ok_for_sibcall (tree decl, tree exp)
8319{
8320 /* The TPF epilogue uses register 1. */
3839e36a 8321 if (TARGET_TPF_PROFILING)
ed9676cf
AK
8322 return false;
8323
38899e29 8324 /* The 31 bit PLT code uses register 12 (GOT pointer - caller saved)
ed9676cf
AK
8325 which would have to be restored before the sibcall. */
8326 if (!TARGET_64BIT && flag_pic && decl && TREE_PUBLIC (decl))
8327 return false;
8328
8329 /* Register 6 on s390 is available as an argument register but unfortunately
8330 "caller saved". This makes functions needing this register for arguments
38899e29 8331 not suitable for sibcalls. */
ed9676cf
AK
8332 if (TREE_OPERAND (exp, 1)
8333 && s390_call_saved_register_used (TREE_OPERAND (exp, 1)))
8334 return false;
8335
8336 return true;
8337}
8338
ffdda752
AK
8339/* Return the fixed registers used for condition codes. */
8340
8341static bool
8342s390_fixed_condition_code_regs (unsigned int *p1, unsigned int *p2)
8343{
8344 *p1 = CC_REGNUM;
8345 *p2 = INVALID_REGNUM;
8346
8347 return true;
8348}
8349
38899e29
EC
8350/* This function is used by the call expanders of the machine description.
8351 It emits the call insn itself together with the necessary operations
ed9676cf
AK
8352 to adjust the target address and returns the emitted insn.
8353 ADDR_LOCATION is the target address rtx
8354 TLS_CALL the location of the thread-local symbol
8355 RESULT_REG the register where the result of the call should be stored
8356 RETADDR_REG the register where the return address should be stored
8357 If this parameter is NULL_RTX the call is considered
8358 to be a sibling call. */
8359
8360rtx
38899e29 8361s390_emit_call (rtx addr_location, rtx tls_call, rtx result_reg,
ed9676cf
AK
8362 rtx retaddr_reg)
8363{
8364 bool plt_call = false;
8365 rtx insn;
8366 rtx call;
8367 rtx clobber;
8368 rtvec vec;
8369
8370 /* Direct function calls need special treatment. */
8371 if (GET_CODE (addr_location) == SYMBOL_REF)
8372 {
8373 /* When calling a global routine in PIC mode, we must
8374 replace the symbol itself with the PLT stub. */
8375 if (flag_pic && !SYMBOL_REF_LOCAL_P (addr_location))
8376 {
38899e29
EC
8377 addr_location = gen_rtx_UNSPEC (Pmode,
8378 gen_rtvec (1, addr_location),
ed9676cf
AK
8379 UNSPEC_PLT);
8380 addr_location = gen_rtx_CONST (Pmode, addr_location);
8381 plt_call = true;
8382 }
38899e29 8383
ed9676cf
AK
8384 /* Unless we can use the bras(l) insn, force the
8385 routine address into a register. */
8386 if (!TARGET_SMALL_EXEC && !TARGET_CPU_ZARCH)
8387 {
8388 if (flag_pic)
8389 addr_location = legitimize_pic_address (addr_location, 0);
8390 else
8391 addr_location = force_reg (Pmode, addr_location);
8392 }
38899e29 8393 }
ed9676cf
AK
8394
8395 /* If it is already an indirect call or the code above moved the
38899e29 8396 SYMBOL_REF to somewhere else make sure the address can be found in
ed9676cf
AK
8397 register 1. */
8398 if (retaddr_reg == NULL_RTX
8399 && GET_CODE (addr_location) != SYMBOL_REF
8400 && !plt_call)
8401 {
8402 emit_move_insn (gen_rtx_REG (Pmode, SIBCALL_REGNUM), addr_location);
8403 addr_location = gen_rtx_REG (Pmode, SIBCALL_REGNUM);
8404 }
38899e29 8405
ed9676cf
AK
8406 addr_location = gen_rtx_MEM (QImode, addr_location);
8407 call = gen_rtx_CALL (VOIDmode, addr_location, const0_rtx);
8408
8409 if (result_reg != NULL_RTX)
8410 call = gen_rtx_SET (VOIDmode, result_reg, call);
38899e29 8411
ed9676cf
AK
8412 if (retaddr_reg != NULL_RTX)
8413 {
8414 clobber = gen_rtx_CLOBBER (VOIDmode, retaddr_reg);
8415
8416 if (tls_call != NULL_RTX)
38899e29 8417 vec = gen_rtvec (3, call, clobber,
ed9676cf
AK
8418 gen_rtx_USE (VOIDmode, tls_call));
8419 else
8420 vec = gen_rtvec (2, call, clobber);
8421
8422 call = gen_rtx_PARALLEL (VOIDmode, vec);
8423 }
8424
8425 insn = emit_call_insn (call);
38899e29 8426
ed9676cf
AK
8427 /* 31-bit PLT stubs and tls calls use the GOT register implicitly. */
8428 if ((!TARGET_64BIT && plt_call) || tls_call != NULL_RTX)
8429 {
38899e29 8430 /* s390_function_ok_for_sibcall should
ed9676cf
AK
8431 have denied sibcalls in this case. */
8432 if (retaddr_reg == NULL_RTX)
8433 abort ();
38899e29 8434
ed9676cf
AK
8435 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
8436 }
8437 return insn;
8438}
8439
38899e29
EC
8440/* Implement CONDITIONAL_REGISTER_USAGE. */
8441
8442void
8443s390_conditional_register_usage (void)
8444{
8445 int i;
8446
8447 if (flag_pic)
8448 {
8449 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
8450 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
8451 }
8452 if (TARGET_CPU_ZARCH)
8453 {
8454 fixed_regs[RETURN_REGNUM] = 0;
8455 call_used_regs[RETURN_REGNUM] = 0;
8456 }
8457 if (TARGET_64BIT)
8458 {
8459 for (i = 24; i < 32; i++)
8460 call_used_regs[i] = call_really_used_regs[i] = 0;
8461 }
8462 else
8463 {
8464 for (i = 18; i < 20; i++)
8465 call_used_regs[i] = call_really_used_regs[i] = 0;
8466 }
8a512b77
AK
8467
8468 if (TARGET_SOFT_FLOAT)
8469 {
8470 for (i = 16; i < 32; i++)
8471 call_used_regs[i] = fixed_regs[i] = 1;
8472 }
38899e29
EC
8473}
8474
4798630c
D
8475/* Corresponding function to eh_return expander. */
8476
8477static GTY(()) rtx s390_tpf_eh_return_symbol;
8478void
8479s390_emit_tpf_eh_return (rtx target)
8480{
8481 rtx insn, reg;
8482
8483 if (!s390_tpf_eh_return_symbol)
8484 s390_tpf_eh_return_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tpf_eh_return");
8485
8486 reg = gen_rtx_REG (Pmode, 2);
8487
8488 emit_move_insn (reg, target);
8489 insn = s390_emit_call (s390_tpf_eh_return_symbol, NULL_RTX, reg,
8490 gen_rtx_REG (Pmode, RETURN_REGNUM));
8491 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), reg);
8492
8493 emit_move_insn (EH_RETURN_HANDLER_RTX, reg);
8494}
38899e29 8495
29742ba4 8496#include "gt-s390.h"