]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/s390/s390.c
alias.c (record_set, [...]): Constify.
[thirdparty/gcc.git] / gcc / config / s390 / s390.c
CommitLineData
9db1d521 1/* Subroutines used for code generation on IBM S/390 and zSeries
6fb5fa3c
DB
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
3 2007 Free Software Foundation, Inc.
9db1d521 4 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
f314b9b1 5 Ulrich Weigand (uweigand@de.ibm.com).
9db1d521 6
58add37a 7This file is part of GCC.
9db1d521 8
58add37a
UW
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
11Software Foundation; either version 2, or (at your option) any later
12version.
9db1d521 13
58add37a
UW
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
9db1d521
HP
18
19You should have received a copy of the GNU General Public License
58add37a 20along with GCC; see the file COPYING. If not, write to the Free
39d14dda
KC
21Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2202110-1301, USA. */
9db1d521
HP
23
24#include "config.h"
9db1d521 25#include "system.h"
4977bab6
ZW
26#include "coretypes.h"
27#include "tm.h"
9db1d521
HP
28#include "rtl.h"
29#include "tree.h"
30#include "tm_p.h"
31#include "regs.h"
32#include "hard-reg-set.h"
33#include "real.h"
34#include "insn-config.h"
35#include "conditions.h"
36#include "output.h"
37#include "insn-attr.h"
38#include "flags.h"
39#include "except.h"
40#include "function.h"
41#include "recog.h"
42#include "expr.h"
7c82a1ed 43#include "reload.h"
9db1d521
HP
44#include "toplev.h"
45#include "basic-block.h"
4023fb28 46#include "integrate.h"
9db1d521
HP
47#include "ggc.h"
48#include "target.h"
49#include "target-def.h"
0d3c08b6 50#include "debug.h"
f1e639b1 51#include "langhooks.h"
a41c6c53 52#include "optabs.h"
63694b5e 53#include "tree-gimple.h"
6fb5fa3c 54#include "df.h"
9db1d521 55
114278e7 56
017e0eb9
MD
57/* Define the specific costs for a given cpu. */
58
59struct processor_costs
60{
98fd0d70 61 /* multiplication */
017e0eb9
MD
62 const int m; /* cost of an M instruction. */
63 const int mghi; /* cost of an MGHI instruction. */
64 const int mh; /* cost of an MH instruction. */
65 const int mhi; /* cost of an MHI instruction. */
2742a1ed 66 const int ml; /* cost of an ML instruction. */
017e0eb9
MD
67 const int mr; /* cost of an MR instruction. */
68 const int ms; /* cost of an MS instruction. */
69 const int msg; /* cost of an MSG instruction. */
70 const int msgf; /* cost of an MSGF instruction. */
71 const int msgfr; /* cost of an MSGFR instruction. */
72 const int msgr; /* cost of an MSGR instruction. */
73 const int msr; /* cost of an MSR instruction. */
74 const int mult_df; /* cost of multiplication in DFmode. */
f61a2c7d 75 const int mxbr;
98fd0d70 76 /* square root */
f61a2c7d 77 const int sqxbr; /* cost of square root in TFmode. */
2742a1ed
MD
78 const int sqdbr; /* cost of square root in DFmode. */
79 const int sqebr; /* cost of square root in SFmode. */
98fd0d70 80 /* multiply and add */
b75d6bab
MD
81 const int madbr; /* cost of multiply and add in DFmode. */
82 const int maebr; /* cost of multiply and add in SFmode. */
98fd0d70 83 /* division */
f61a2c7d 84 const int dxbr;
98fd0d70 85 const int ddbr;
98fd0d70 86 const int debr;
6fa5b390
MD
87 const int dlgr;
88 const int dlr;
89 const int dr;
90 const int dsgfr;
91 const int dsgr;
017e0eb9
MD
92};
93
94const struct processor_costs *s390_cost;
95
96static const
97struct processor_costs z900_cost =
98{
99 COSTS_N_INSNS (5), /* M */
100 COSTS_N_INSNS (10), /* MGHI */
101 COSTS_N_INSNS (5), /* MH */
102 COSTS_N_INSNS (4), /* MHI */
2742a1ed 103 COSTS_N_INSNS (5), /* ML */
017e0eb9
MD
104 COSTS_N_INSNS (5), /* MR */
105 COSTS_N_INSNS (4), /* MS */
106 COSTS_N_INSNS (15), /* MSG */
107 COSTS_N_INSNS (7), /* MSGF */
108 COSTS_N_INSNS (7), /* MSGFR */
109 COSTS_N_INSNS (10), /* MSGR */
110 COSTS_N_INSNS (4), /* MSR */
111 COSTS_N_INSNS (7), /* multiplication in DFmode */
f61a2c7d
AK
112 COSTS_N_INSNS (13), /* MXBR */
113 COSTS_N_INSNS (136), /* SQXBR */
2742a1ed
MD
114 COSTS_N_INSNS (44), /* SQDBR */
115 COSTS_N_INSNS (35), /* SQEBR */
b75d6bab
MD
116 COSTS_N_INSNS (18), /* MADBR */
117 COSTS_N_INSNS (13), /* MAEBR */
f61a2c7d 118 COSTS_N_INSNS (134), /* DXBR */
98fd0d70 119 COSTS_N_INSNS (30), /* DDBR */
98fd0d70 120 COSTS_N_INSNS (27), /* DEBR */
6fa5b390
MD
121 COSTS_N_INSNS (220), /* DLGR */
122 COSTS_N_INSNS (34), /* DLR */
123 COSTS_N_INSNS (34), /* DR */
124 COSTS_N_INSNS (32), /* DSGFR */
125 COSTS_N_INSNS (32), /* DSGR */
017e0eb9
MD
126};
127
128static const
129struct processor_costs z990_cost =
130{
131 COSTS_N_INSNS (4), /* M */
132 COSTS_N_INSNS (2), /* MGHI */
133 COSTS_N_INSNS (2), /* MH */
134 COSTS_N_INSNS (2), /* MHI */
2742a1ed 135 COSTS_N_INSNS (4), /* ML */
017e0eb9
MD
136 COSTS_N_INSNS (4), /* MR */
137 COSTS_N_INSNS (5), /* MS */
138 COSTS_N_INSNS (6), /* MSG */
139 COSTS_N_INSNS (4), /* MSGF */
140 COSTS_N_INSNS (4), /* MSGFR */
141 COSTS_N_INSNS (4), /* MSGR */
142 COSTS_N_INSNS (4), /* MSR */
143 COSTS_N_INSNS (1), /* multiplication in DFmode */
f61a2c7d
AK
144 COSTS_N_INSNS (28), /* MXBR */
145 COSTS_N_INSNS (130), /* SQXBR */
2742a1ed
MD
146 COSTS_N_INSNS (66), /* SQDBR */
147 COSTS_N_INSNS (38), /* SQEBR */
b75d6bab
MD
148 COSTS_N_INSNS (1), /* MADBR */
149 COSTS_N_INSNS (1), /* MAEBR */
f61a2c7d 150 COSTS_N_INSNS (60), /* DXBR */
98fd0d70 151 COSTS_N_INSNS (40), /* DDBR */
142cd70f 152 COSTS_N_INSNS (26), /* DEBR */
6fa5b390
MD
153 COSTS_N_INSNS (176), /* DLGR */
154 COSTS_N_INSNS (31), /* DLR */
155 COSTS_N_INSNS (31), /* DR */
156 COSTS_N_INSNS (31), /* DSGFR */
157 COSTS_N_INSNS (31), /* DSGR */
017e0eb9
MD
158};
159
ec24698e
UW
160static const
161struct processor_costs z9_109_cost =
162{
163 COSTS_N_INSNS (4), /* M */
164 COSTS_N_INSNS (2), /* MGHI */
165 COSTS_N_INSNS (2), /* MH */
166 COSTS_N_INSNS (2), /* MHI */
167 COSTS_N_INSNS (4), /* ML */
168 COSTS_N_INSNS (4), /* MR */
169 COSTS_N_INSNS (5), /* MS */
170 COSTS_N_INSNS (6), /* MSG */
171 COSTS_N_INSNS (4), /* MSGF */
172 COSTS_N_INSNS (4), /* MSGFR */
173 COSTS_N_INSNS (4), /* MSGR */
174 COSTS_N_INSNS (4), /* MSR */
175 COSTS_N_INSNS (1), /* multiplication in DFmode */
f61a2c7d
AK
176 COSTS_N_INSNS (28), /* MXBR */
177 COSTS_N_INSNS (130), /* SQXBR */
ec24698e
UW
178 COSTS_N_INSNS (66), /* SQDBR */
179 COSTS_N_INSNS (38), /* SQEBR */
180 COSTS_N_INSNS (1), /* MADBR */
181 COSTS_N_INSNS (1), /* MAEBR */
f61a2c7d 182 COSTS_N_INSNS (60), /* DXBR */
ec24698e 183 COSTS_N_INSNS (40), /* DDBR */
142cd70f 184 COSTS_N_INSNS (26), /* DEBR */
ec24698e
UW
185 COSTS_N_INSNS (30), /* DLGR */
186 COSTS_N_INSNS (23), /* DLR */
187 COSTS_N_INSNS (23), /* DR */
188 COSTS_N_INSNS (24), /* DSGFR */
189 COSTS_N_INSNS (24), /* DSGR */
190};
017e0eb9 191
9db1d521
HP
192extern int reload_completed;
193
9db1d521
HP
194/* Save information from a "cmpxx" operation until the branch or scc is
195 emitted. */
196rtx s390_compare_op0, s390_compare_op1;
197
e0374221
AS
198/* Save the result of a compare_and_swap until the branch or scc is
199 emitted. */
200rtx s390_compare_emitted = NULL_RTX;
201
994fe660
UW
202/* Structure used to hold the components of a S/390 memory
203 address. A legitimate address on S/390 is of the general
204 form
205 base + index + displacement
206 where any of the components is optional.
207
208 base and index are registers of the class ADDR_REGS,
209 displacement is an unsigned 12-bit immediate constant. */
9db1d521
HP
210
211struct s390_address
212{
213 rtx base;
214 rtx indx;
215 rtx disp;
3ed99cc9 216 bool pointer;
f01cf809 217 bool literal_pool;
9db1d521
HP
218};
219
be2c2a4b 220/* Which cpu are we tuning for. */
f5db779b 221enum processor_type s390_tune = PROCESSOR_max;
f13e0d4e 222enum processor_flags s390_tune_flags;
1fec52be
HP
223/* Which instruction set architecture to use. */
224enum processor_type s390_arch;
f13e0d4e 225enum processor_flags s390_arch_flags;
d75f90f1
AK
226
227HOST_WIDE_INT s390_warn_framesize = 0;
d75f90f1
AK
228HOST_WIDE_INT s390_stack_size = 0;
229HOST_WIDE_INT s390_stack_guard = 0;
230
adf39f8f
AK
231/* The following structure is embedded in the machine
232 specific part of struct function. */
233
234struct s390_frame_layout GTY (())
235{
236 /* Offset within stack frame. */
237 HOST_WIDE_INT gprs_offset;
238 HOST_WIDE_INT f0_offset;
239 HOST_WIDE_INT f4_offset;
240 HOST_WIDE_INT f8_offset;
241 HOST_WIDE_INT backchain_offset;
fb3712f6
AK
242
243 /* Number of first and last gpr where slots in the register
244 save area are reserved for. */
245 int first_save_gpr_slot;
246 int last_save_gpr_slot;
247
29742ba4 248 /* Number of first and last gpr to be saved, restored. */
4023fb28
UW
249 int first_save_gpr;
250 int first_restore_gpr;
251 int last_save_gpr;
b767fc11 252 int last_restore_gpr;
4023fb28 253
adf39f8f
AK
254 /* Bits standing for floating point registers. Set, if the
255 respective register has to be saved. Starting with reg 16 (f0)
256 at the rightmost bit.
257 Bit 15 - 8 7 6 5 4 3 2 1 0
258 fpr 15 - 8 7 5 3 1 6 4 2 0
259 reg 31 - 24 23 22 21 20 19 18 17 16 */
260 unsigned int fpr_bitmap;
261
262 /* Number of floating point registers f8-f15 which must be saved. */
263 int high_fprs;
264
dc4477f5
AK
265 /* Set if return address needs to be saved.
266 This flag is set by s390_return_addr_rtx if it could not use
267 the initial value of r14 and therefore depends on r14 saved
268 to the stack. */
adf39f8f
AK
269 bool save_return_addr_p;
270
29742ba4 271 /* Size of stack frame. */
4023fb28 272 HOST_WIDE_INT frame_size;
adf39f8f
AK
273};
274
275/* Define the structure for the machine field in struct function. */
276
277struct machine_function GTY(())
278{
279 struct s390_frame_layout frame_layout;
fd3cd001 280
585539a1
UW
281 /* Literal pool base register. */
282 rtx base_reg;
283
91086990
UW
284 /* True if we may need to perform branch splitting. */
285 bool split_branches_pending_p;
286
3cd045d1
UW
287 /* True during final stage of literal pool processing. */
288 bool decomposed_literal_pool_addresses_ok_p;
289
fd3cd001
UW
290 /* Some local-dynamic TLS symbol name. */
291 const char *some_ld_name;
7bcebb25
AK
292
293 bool has_landing_pad_p;
4023fb28
UW
294};
295
adf39f8f
AK
296/* Few accessor macros for struct cfun->machine->s390_frame_layout. */
297
298#define cfun_frame_layout (cfun->machine->frame_layout)
299#define cfun_save_high_fprs_p (!!cfun_frame_layout.high_fprs)
fb3712f6
AK
300#define cfun_gprs_save_area_size ((cfun_frame_layout.last_save_gpr_slot - \
301 cfun_frame_layout.first_save_gpr_slot + 1) * UNITS_PER_WORD)
adf39f8f
AK
302#define cfun_set_fpr_bit(BITNUM) (cfun->machine->frame_layout.fpr_bitmap |= \
303 (1 << (BITNUM)))
304#define cfun_fpr_bit_p(BITNUM) (!!(cfun->machine->frame_layout.fpr_bitmap & \
305 (1 << (BITNUM))))
306
29a79fcf
UW
307/* Number of GPRs and FPRs used for argument passing. */
308#define GP_ARG_NUM_REG 5
309#define FP_ARG_NUM_REG (TARGET_64BIT? 4 : 2)
310
b5c67a49
AK
311/* A couple of shortcuts. */
312#define CONST_OK_FOR_J(x) \
313 CONST_OK_FOR_CONSTRAINT_P((x), 'J', "J")
314#define CONST_OK_FOR_K(x) \
315 CONST_OK_FOR_CONSTRAINT_P((x), 'K', "K")
ec24698e
UW
316#define CONST_OK_FOR_Os(x) \
317 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Os")
318#define CONST_OK_FOR_Op(x) \
319 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Op")
320#define CONST_OK_FOR_On(x) \
321 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "On")
b5c67a49 322
74aa8b4b
AK
323#define REGNO_PAIR_OK(REGNO, MODE) \
324 (HARD_REGNO_NREGS ((REGNO), (MODE)) == 1 || !((REGNO) & 1))
325
c7ff6e7a
AK
326static enum machine_mode
327s390_libgcc_cmp_return_mode (void)
328{
329 return TARGET_64BIT ? DImode : SImode;
330}
331
332static enum machine_mode
333s390_libgcc_shift_count_mode (void)
334{
335 return TARGET_64BIT ? DImode : SImode;
336}
337
4dc19cc0
AK
338/* Return true if the back end supports mode MODE. */
339static bool
340s390_scalar_mode_supported_p (enum machine_mode mode)
341{
342 if (DECIMAL_FLOAT_MODE_P (mode))
343 return true;
344 else
345 return default_scalar_mode_supported_p (mode);
346}
347
7bcebb25
AK
348/* Set the has_landing_pad_p flag in struct machine_function to VALUE. */
349
350void
351s390_set_has_landing_pad_p (bool value)
352{
353 cfun->machine->has_landing_pad_p = value;
354}
29a79fcf 355
69950452
AS
356/* If two condition code modes are compatible, return a condition code
357 mode which is compatible with both. Otherwise, return
358 VOIDmode. */
359
360static enum machine_mode
361s390_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
362{
363 if (m1 == m2)
364 return m1;
365
366 switch (m1)
367 {
368 case CCZmode:
369 if (m2 == CCUmode || m2 == CCTmode || m2 == CCZ1mode
370 || m2 == CCSmode || m2 == CCSRmode || m2 == CCURmode)
371 return m2;
372 return VOIDmode;
373
374 case CCSmode:
375 case CCUmode:
376 case CCTmode:
377 case CCSRmode:
378 case CCURmode:
379 case CCZ1mode:
380 if (m2 == CCZmode)
381 return m1;
382
383 return VOIDmode;
384
385 default:
386 return VOIDmode;
387 }
388 return VOIDmode;
389}
390
994fe660 391/* Return true if SET either doesn't set the CC register, or else
c7453384 392 the source and destination have matching CC modes and that
994fe660 393 CC mode is at least as constrained as REQ_MODE. */
c7453384 394
3ed99cc9 395static bool
9c808aad 396s390_match_ccmode_set (rtx set, enum machine_mode req_mode)
9db1d521 397{
994fe660 398 enum machine_mode set_mode;
9db1d521 399
8d933e31 400 gcc_assert (GET_CODE (set) == SET);
9db1d521
HP
401
402 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
403 return 1;
404
405 set_mode = GET_MODE (SET_DEST (set));
406 switch (set_mode)
407 {
9db1d521 408 case CCSmode:
07893d4f 409 case CCSRmode:
9db1d521 410 case CCUmode:
07893d4f 411 case CCURmode:
ba956982 412 case CCLmode:
07893d4f
UW
413 case CCL1mode:
414 case CCL2mode:
5d880bd2 415 case CCL3mode:
07893d4f
UW
416 case CCT1mode:
417 case CCT2mode:
418 case CCT3mode:
419 if (req_mode != set_mode)
ba956982
UW
420 return 0;
421 break;
07893d4f 422
9db1d521 423 case CCZmode:
07893d4f
UW
424 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
425 && req_mode != CCSRmode && req_mode != CCURmode)
9db1d521
HP
426 return 0;
427 break;
0a3bdf9d
UW
428
429 case CCAPmode:
430 case CCANmode:
431 if (req_mode != CCAmode)
432 return 0;
433 break;
c7453384 434
9db1d521 435 default:
8d933e31 436 gcc_unreachable ();
9db1d521 437 }
c7453384 438
9db1d521
HP
439 return (GET_MODE (SET_SRC (set)) == set_mode);
440}
441
c7453384
EC
442/* Return true if every SET in INSN that sets the CC register
443 has source and destination with matching CC modes and that
444 CC mode is at least as constrained as REQ_MODE.
07893d4f 445 If REQ_MODE is VOIDmode, always return false. */
c7453384 446
3ed99cc9 447bool
9c808aad 448s390_match_ccmode (rtx insn, enum machine_mode req_mode)
9db1d521
HP
449{
450 int i;
451
07893d4f
UW
452 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
453 if (req_mode == VOIDmode)
3ed99cc9 454 return false;
07893d4f 455
9db1d521
HP
456 if (GET_CODE (PATTERN (insn)) == SET)
457 return s390_match_ccmode_set (PATTERN (insn), req_mode);
458
459 if (GET_CODE (PATTERN (insn)) == PARALLEL)
460 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
461 {
462 rtx set = XVECEXP (PATTERN (insn), 0, i);
463 if (GET_CODE (set) == SET)
464 if (!s390_match_ccmode_set (set, req_mode))
3ed99cc9 465 return false;
9db1d521
HP
466 }
467
3ed99cc9 468 return true;
9db1d521
HP
469}
470
c7453384 471/* If a test-under-mask instruction can be used to implement
07893d4f 472 (compare (and ... OP1) OP2), return the CC mode required
c7453384 473 to do that. Otherwise, return VOIDmode.
07893d4f
UW
474 MIXED is true if the instruction can distinguish between
475 CC1 and CC2 for mixed selected bits (TMxx), it is false
476 if the instruction cannot (TM). */
477
478enum machine_mode
3ed99cc9 479s390_tm_ccmode (rtx op1, rtx op2, bool mixed)
07893d4f
UW
480{
481 int bit0, bit1;
482
483 /* ??? Fixme: should work on CONST_DOUBLE as well. */
484 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
485 return VOIDmode;
486
00bda920
AK
487 /* Selected bits all zero: CC0.
488 e.g.: int a; if ((a & (16 + 128)) == 0) */
07893d4f
UW
489 if (INTVAL (op2) == 0)
490 return CCTmode;
491
00bda920
AK
492 /* Selected bits all one: CC3.
493 e.g.: int a; if ((a & (16 + 128)) == 16 + 128) */
07893d4f
UW
494 if (INTVAL (op2) == INTVAL (op1))
495 return CCT3mode;
496
00bda920
AK
497 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. e.g.:
498 int a;
499 if ((a & (16 + 128)) == 16) -> CCT1
500 if ((a & (16 + 128)) == 128) -> CCT2 */
07893d4f
UW
501 if (mixed)
502 {
503 bit1 = exact_log2 (INTVAL (op2));
504 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
505 if (bit0 != -1 && bit1 != -1)
506 return bit0 > bit1 ? CCT1mode : CCT2mode;
507 }
508
509 return VOIDmode;
510}
511
c7453384
EC
512/* Given a comparison code OP (EQ, NE, etc.) and the operands
513 OP0 and OP1 of a COMPARE, return the mode to be used for the
ba956982
UW
514 comparison. */
515
516enum machine_mode
9c808aad 517s390_select_ccmode (enum rtx_code code, rtx op0, rtx op1)
ba956982
UW
518{
519 switch (code)
520 {
521 case EQ:
522 case NE:
26a89301
UW
523 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
524 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
525 return CCAPmode;
0a3bdf9d 526 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
b5c67a49 527 && CONST_OK_FOR_K (INTVAL (XEXP (op0, 1))))
0a3bdf9d 528 return CCAPmode;
3ef093a8
AK
529 if ((GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
530 || GET_CODE (op1) == NEG)
531 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
ba956982
UW
532 return CCLmode;
533
07893d4f
UW
534 if (GET_CODE (op0) == AND)
535 {
536 /* Check whether we can potentially do it via TM. */
537 enum machine_mode ccmode;
538 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
539 if (ccmode != VOIDmode)
540 {
541 /* Relax CCTmode to CCZmode to allow fall-back to AND
542 if that turns out to be beneficial. */
543 return ccmode == CCTmode ? CCZmode : ccmode;
544 }
545 }
546
c7453384 547 if (register_operand (op0, HImode)
07893d4f
UW
548 && GET_CODE (op1) == CONST_INT
549 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
550 return CCT3mode;
c7453384 551 if (register_operand (op0, QImode)
07893d4f
UW
552 && GET_CODE (op1) == CONST_INT
553 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
554 return CCT3mode;
555
ba956982
UW
556 return CCZmode;
557
558 case LE:
559 case LT:
560 case GE:
561 case GT:
00bda920
AK
562 /* The only overflow condition of NEG and ABS happens when
563 -INT_MAX is used as parameter, which stays negative. So
564 we have an overflow from a positive value to a negative.
565 Using CCAP mode the resulting cc can be used for comparisons. */
26a89301
UW
566 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
567 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
568 return CCAPmode;
00bda920
AK
569
570 /* If constants are involved in an add instruction it is possible to use
571 the resulting cc for comparisons with zero. Knowing the sign of the
35fd3193 572 constant the overflow behavior gets predictable. e.g.:
00bda920
AK
573 int a, b; if ((b = a + c) > 0)
574 with c as a constant value: c < 0 -> CCAN and c >= 0 -> CCAP */
26a89301 575 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
b5c67a49 576 && CONST_OK_FOR_K (INTVAL (XEXP (op0, 1))))
26a89301
UW
577 {
578 if (INTVAL (XEXP((op0), 1)) < 0)
579 return CCANmode;
580 else
581 return CCAPmode;
582 }
583 /* Fall through. */
ba956982
UW
584 case UNORDERED:
585 case ORDERED:
586 case UNEQ:
587 case UNLE:
588 case UNLT:
589 case UNGE:
590 case UNGT:
591 case LTGT:
07893d4f
UW
592 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
593 && GET_CODE (op1) != CONST_INT)
594 return CCSRmode;
ba956982
UW
595 return CCSmode;
596
ba956982
UW
597 case LTU:
598 case GEU:
3ef093a8
AK
599 if (GET_CODE (op0) == PLUS
600 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
07893d4f
UW
601 return CCL1mode;
602
603 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
604 && GET_CODE (op1) != CONST_INT)
605 return CCURmode;
606 return CCUmode;
607
608 case LEU:
ba956982 609 case GTU:
3ef093a8
AK
610 if (GET_CODE (op0) == MINUS
611 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
07893d4f
UW
612 return CCL2mode;
613
614 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
615 && GET_CODE (op1) != CONST_INT)
616 return CCURmode;
ba956982
UW
617 return CCUmode;
618
619 default:
8d933e31 620 gcc_unreachable ();
ba956982
UW
621 }
622}
623
68f9c5e2
UW
624/* Replace the comparison OP0 CODE OP1 by a semantically equivalent one
625 that we can implement more efficiently. */
626
627void
628s390_canonicalize_comparison (enum rtx_code *code, rtx *op0, rtx *op1)
629{
630 /* Convert ZERO_EXTRACT back to AND to enable TM patterns. */
631 if ((*code == EQ || *code == NE)
632 && *op1 == const0_rtx
633 && GET_CODE (*op0) == ZERO_EXTRACT
634 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
635 && GET_CODE (XEXP (*op0, 2)) == CONST_INT
636 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
637 {
638 rtx inner = XEXP (*op0, 0);
639 HOST_WIDE_INT modesize = GET_MODE_BITSIZE (GET_MODE (inner));
640 HOST_WIDE_INT len = INTVAL (XEXP (*op0, 1));
641 HOST_WIDE_INT pos = INTVAL (XEXP (*op0, 2));
642
643 if (len > 0 && len < modesize
644 && pos >= 0 && pos + len <= modesize
645 && modesize <= HOST_BITS_PER_WIDE_INT)
646 {
647 unsigned HOST_WIDE_INT block;
648 block = ((unsigned HOST_WIDE_INT) 1 << len) - 1;
649 block <<= modesize - pos - len;
650
651 *op0 = gen_rtx_AND (GET_MODE (inner), inner,
652 gen_int_mode (block, GET_MODE (inner)));
653 }
654 }
655
656 /* Narrow AND of memory against immediate to enable TM. */
657 if ((*code == EQ || *code == NE)
658 && *op1 == const0_rtx
659 && GET_CODE (*op0) == AND
660 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
661 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
662 {
663 rtx inner = XEXP (*op0, 0);
664 rtx mask = XEXP (*op0, 1);
665
666 /* Ignore paradoxical SUBREGs if all extra bits are masked out. */
667 if (GET_CODE (inner) == SUBREG
668 && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (inner)))
669 && (GET_MODE_SIZE (GET_MODE (inner))
670 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner))))
671 && ((INTVAL (mask)
672 & GET_MODE_MASK (GET_MODE (inner))
673 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (inner))))
674 == 0))
675 inner = SUBREG_REG (inner);
676
677 /* Do not change volatile MEMs. */
678 if (MEM_P (inner) && !MEM_VOLATILE_P (inner))
679 {
680 int part = s390_single_part (XEXP (*op0, 1),
681 GET_MODE (inner), QImode, 0);
682 if (part >= 0)
683 {
684 mask = gen_int_mode (s390_extract_part (mask, QImode, 0), QImode);
685 inner = adjust_address_nv (inner, QImode, part);
686 *op0 = gen_rtx_AND (QImode, inner, mask);
687 }
688 }
689 }
690
691 /* Narrow comparisons against 0xffff to HImode if possible. */
68f9c5e2
UW
692 if ((*code == EQ || *code == NE)
693 && GET_CODE (*op1) == CONST_INT
694 && INTVAL (*op1) == 0xffff
695 && SCALAR_INT_MODE_P (GET_MODE (*op0))
696 && (nonzero_bits (*op0, GET_MODE (*op0))
697 & ~(unsigned HOST_WIDE_INT) 0xffff) == 0)
698 {
699 *op0 = gen_lowpart (HImode, *op0);
700 *op1 = constm1_rtx;
701 }
5b022de5 702
638e37c2 703 /* Remove redundant UNSPEC_CCU_TO_INT conversions if possible. */
5b022de5 704 if (GET_CODE (*op0) == UNSPEC
638e37c2 705 && XINT (*op0, 1) == UNSPEC_CCU_TO_INT
5b022de5
UW
706 && XVECLEN (*op0, 0) == 1
707 && GET_MODE (XVECEXP (*op0, 0, 0)) == CCUmode
708 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
709 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
710 && *op1 == const0_rtx)
711 {
712 enum rtx_code new_code = UNKNOWN;
713 switch (*code)
714 {
715 case EQ: new_code = EQ; break;
716 case NE: new_code = NE; break;
02887425
UW
717 case LT: new_code = GTU; break;
718 case GT: new_code = LTU; break;
719 case LE: new_code = GEU; break;
720 case GE: new_code = LEU; break;
5b022de5
UW
721 default: break;
722 }
723
724 if (new_code != UNKNOWN)
725 {
726 *op0 = XVECEXP (*op0, 0, 0);
727 *code = new_code;
728 }
729 }
69950452 730
638e37c2
WG
731 /* Remove redundant UNSPEC_CCZ_TO_INT conversions if possible. */
732 if (GET_CODE (*op0) == UNSPEC
733 && XINT (*op0, 1) == UNSPEC_CCZ_TO_INT
734 && XVECLEN (*op0, 0) == 1
735 && GET_MODE (XVECEXP (*op0, 0, 0)) == CCZmode
736 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
737 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
738 && *op1 == const0_rtx)
739 {
740 enum rtx_code new_code = UNKNOWN;
741 switch (*code)
742 {
743 case EQ: new_code = EQ; break;
744 case NE: new_code = NE; break;
745 default: break;
746 }
747
748 if (new_code != UNKNOWN)
749 {
750 *op0 = XVECEXP (*op0, 0, 0);
751 *code = new_code;
752 }
753 }
754
69950452
AS
755 /* Simplify cascaded EQ, NE with const0_rtx. */
756 if ((*code == NE || *code == EQ)
757 && (GET_CODE (*op0) == EQ || GET_CODE (*op0) == NE)
758 && GET_MODE (*op0) == SImode
759 && GET_MODE (XEXP (*op0, 0)) == CCZ1mode
760 && REG_P (XEXP (*op0, 0))
761 && XEXP (*op0, 1) == const0_rtx
762 && *op1 == const0_rtx)
763 {
764 if ((*code == EQ && GET_CODE (*op0) == NE)
765 || (*code == NE && GET_CODE (*op0) == EQ))
766 *code = EQ;
767 else
768 *code = NE;
769 *op0 = XEXP (*op0, 0);
770 }
c5b2a111
UW
771
772 /* Prefer register over memory as first operand. */
773 if (MEM_P (*op0) && REG_P (*op1))
774 {
775 rtx tem = *op0; *op0 = *op1; *op1 = tem;
776 *code = swap_condition (*code);
777 }
68f9c5e2
UW
778}
779
6590e19a
UW
780/* Emit a compare instruction suitable to implement the comparison
781 OP0 CODE OP1. Return the correct condition RTL to be placed in
782 the IF_THEN_ELSE of the conditional branch testing the result. */
783
784rtx
785s390_emit_compare (enum rtx_code code, rtx op0, rtx op1)
786{
787 enum machine_mode mode = s390_select_ccmode (code, op0, op1);
e0374221 788 rtx ret = NULL_RTX;
6590e19a 789
e0374221 790 /* Do not output a redundant compare instruction if a compare_and_swap
69950452
AS
791 pattern already computed the result and the machine modes are compatible. */
792 if (s390_compare_emitted
793 && (s390_cc_modes_compatible (GET_MODE (s390_compare_emitted), mode)
794 == GET_MODE (s390_compare_emitted)))
e0374221
AS
795 ret = gen_rtx_fmt_ee (code, VOIDmode, s390_compare_emitted, const0_rtx);
796 else
797 {
798 rtx cc = gen_rtx_REG (mode, CC_REGNUM);
799
800 emit_insn (gen_rtx_SET (VOIDmode, cc, gen_rtx_COMPARE (mode, op0, op1)));
801 ret = gen_rtx_fmt_ee (code, VOIDmode, cc, const0_rtx);
802 }
803 s390_compare_emitted = NULL_RTX;
804 return ret;
6590e19a
UW
805}
806
8bb501bb
AK
807/* Emit a SImode compare and swap instruction setting MEM to NEW if OLD
808 matches CMP.
809 Return the correct condition RTL to be placed in the IF_THEN_ELSE of the
810 conditional branch testing the result. */
811
812static rtx
813s390_emit_compare_and_swap (enum rtx_code code, rtx old, rtx mem, rtx cmp, rtx new)
814{
815 rtx ret;
816
817 emit_insn (gen_sync_compare_and_swap_ccsi (old, mem, cmp, new));
818 ret = gen_rtx_fmt_ee (code, VOIDmode, s390_compare_emitted, const0_rtx);
819
820 s390_compare_emitted = NULL_RTX;
821
822 return ret;
823}
824
6590e19a
UW
825/* Emit a jump instruction to TARGET. If COND is NULL_RTX, emit an
826 unconditional jump, else a conditional jump under condition COND. */
827
828void
829s390_emit_jump (rtx target, rtx cond)
830{
831 rtx insn;
832
833 target = gen_rtx_LABEL_REF (VOIDmode, target);
834 if (cond)
835 target = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, target, pc_rtx);
836
837 insn = gen_rtx_SET (VOIDmode, pc_rtx, target);
838 emit_jump_insn (insn);
839}
840
c7453384 841/* Return branch condition mask to implement a branch
5b022de5 842 specified by CODE. Return -1 for invalid comparisons. */
ba956982 843
0bfc3f69 844int
9c808aad 845s390_branch_condition_mask (rtx code)
c7453384 846{
ba956982
UW
847 const int CC0 = 1 << 3;
848 const int CC1 = 1 << 2;
849 const int CC2 = 1 << 1;
850 const int CC3 = 1 << 0;
851
8d933e31
AS
852 gcc_assert (GET_CODE (XEXP (code, 0)) == REG);
853 gcc_assert (REGNO (XEXP (code, 0)) == CC_REGNUM);
854 gcc_assert (XEXP (code, 1) == const0_rtx);
ba956982
UW
855
856 switch (GET_MODE (XEXP (code, 0)))
857 {
858 case CCZmode:
69950452 859 case CCZ1mode:
ba956982
UW
860 switch (GET_CODE (code))
861 {
862 case EQ: return CC0;
863 case NE: return CC1 | CC2 | CC3;
5b022de5 864 default: return -1;
ba956982
UW
865 }
866 break;
867
07893d4f
UW
868 case CCT1mode:
869 switch (GET_CODE (code))
870 {
871 case EQ: return CC1;
872 case NE: return CC0 | CC2 | CC3;
5b022de5 873 default: return -1;
07893d4f
UW
874 }
875 break;
876
877 case CCT2mode:
878 switch (GET_CODE (code))
879 {
880 case EQ: return CC2;
881 case NE: return CC0 | CC1 | CC3;
5b022de5 882 default: return -1;
07893d4f
UW
883 }
884 break;
885
886 case CCT3mode:
887 switch (GET_CODE (code))
888 {
889 case EQ: return CC3;
890 case NE: return CC0 | CC1 | CC2;
5b022de5 891 default: return -1;
07893d4f
UW
892 }
893 break;
894
ba956982
UW
895 case CCLmode:
896 switch (GET_CODE (code))
897 {
898 case EQ: return CC0 | CC2;
899 case NE: return CC1 | CC3;
5b022de5 900 default: return -1;
07893d4f
UW
901 }
902 break;
903
904 case CCL1mode:
905 switch (GET_CODE (code))
906 {
907 case LTU: return CC2 | CC3; /* carry */
908 case GEU: return CC0 | CC1; /* no carry */
5b022de5 909 default: return -1;
07893d4f
UW
910 }
911 break;
912
913 case CCL2mode:
914 switch (GET_CODE (code))
915 {
916 case GTU: return CC0 | CC1; /* borrow */
917 case LEU: return CC2 | CC3; /* no borrow */
5b022de5 918 default: return -1;
ba956982
UW
919 }
920 break;
921
5d880bd2
UW
922 case CCL3mode:
923 switch (GET_CODE (code))
924 {
925 case EQ: return CC0 | CC2;
926 case NE: return CC1 | CC3;
927 case LTU: return CC1;
928 case GTU: return CC3;
929 case LEU: return CC1 | CC2;
930 case GEU: return CC2 | CC3;
5b022de5 931 default: return -1;
5d880bd2
UW
932 }
933
ba956982
UW
934 case CCUmode:
935 switch (GET_CODE (code))
936 {
937 case EQ: return CC0;
938 case NE: return CC1 | CC2 | CC3;
939 case LTU: return CC1;
940 case GTU: return CC2;
941 case LEU: return CC0 | CC1;
942 case GEU: return CC0 | CC2;
5b022de5 943 default: return -1;
ba956982
UW
944 }
945 break;
946
07893d4f
UW
947 case CCURmode:
948 switch (GET_CODE (code))
949 {
950 case EQ: return CC0;
951 case NE: return CC2 | CC1 | CC3;
952 case LTU: return CC2;
953 case GTU: return CC1;
954 case LEU: return CC0 | CC2;
955 case GEU: return CC0 | CC1;
5b022de5 956 default: return -1;
07893d4f
UW
957 }
958 break;
959
0a3bdf9d
UW
960 case CCAPmode:
961 switch (GET_CODE (code))
962 {
963 case EQ: return CC0;
964 case NE: return CC1 | CC2 | CC3;
965 case LT: return CC1 | CC3;
966 case GT: return CC2;
967 case LE: return CC0 | CC1 | CC3;
968 case GE: return CC0 | CC2;
5b022de5 969 default: return -1;
0a3bdf9d
UW
970 }
971 break;
972
973 case CCANmode:
974 switch (GET_CODE (code))
975 {
976 case EQ: return CC0;
977 case NE: return CC1 | CC2 | CC3;
978 case LT: return CC1;
979 case GT: return CC2 | CC3;
980 case LE: return CC0 | CC1;
981 case GE: return CC0 | CC2 | CC3;
5b022de5 982 default: return -1;
0a3bdf9d
UW
983 }
984 break;
985
ba956982
UW
986 case CCSmode:
987 switch (GET_CODE (code))
988 {
989 case EQ: return CC0;
990 case NE: return CC1 | CC2 | CC3;
991 case LT: return CC1;
992 case GT: return CC2;
993 case LE: return CC0 | CC1;
994 case GE: return CC0 | CC2;
995 case UNORDERED: return CC3;
996 case ORDERED: return CC0 | CC1 | CC2;
997 case UNEQ: return CC0 | CC3;
998 case UNLT: return CC1 | CC3;
999 case UNGT: return CC2 | CC3;
1000 case UNLE: return CC0 | CC1 | CC3;
1001 case UNGE: return CC0 | CC2 | CC3;
1002 case LTGT: return CC1 | CC2;
5b022de5 1003 default: return -1;
ba956982 1004 }
07893d4f
UW
1005 break;
1006
1007 case CCSRmode:
1008 switch (GET_CODE (code))
1009 {
1010 case EQ: return CC0;
1011 case NE: return CC2 | CC1 | CC3;
1012 case LT: return CC2;
1013 case GT: return CC1;
1014 case LE: return CC0 | CC2;
1015 case GE: return CC0 | CC1;
1016 case UNORDERED: return CC3;
1017 case ORDERED: return CC0 | CC2 | CC1;
1018 case UNEQ: return CC0 | CC3;
1019 case UNLT: return CC2 | CC3;
1020 case UNGT: return CC1 | CC3;
1021 case UNLE: return CC0 | CC2 | CC3;
1022 case UNGE: return CC0 | CC1 | CC3;
1023 case LTGT: return CC2 | CC1;
5b022de5 1024 default: return -1;
07893d4f
UW
1025 }
1026 break;
ba956982
UW
1027
1028 default:
5b022de5 1029 return -1;
ba956982
UW
1030 }
1031}
1032
c7453384
EC
1033/* If INV is false, return assembler mnemonic string to implement
1034 a branch specified by CODE. If INV is true, return mnemonic
ba956982
UW
1035 for the corresponding inverted branch. */
1036
1037static const char *
9c808aad 1038s390_branch_condition_mnemonic (rtx code, int inv)
ba956982 1039{
0139adca 1040 static const char *const mnemonic[16] =
ba956982
UW
1041 {
1042 NULL, "o", "h", "nle",
1043 "l", "nhe", "lh", "ne",
1044 "e", "nlh", "he", "nl",
1045 "le", "nh", "no", NULL
1046 };
1047
1048 int mask = s390_branch_condition_mask (code);
5b022de5 1049 gcc_assert (mask >= 0);
ba956982
UW
1050
1051 if (inv)
1052 mask ^= 15;
1053
8d933e31 1054 gcc_assert (mask >= 1 && mask <= 14);
ba956982
UW
1055
1056 return mnemonic[mask];
1057}
1058
f19a9af7
AK
1059/* Return the part of op which has a value different from def.
1060 The size of the part is determined by mode.
38899e29 1061 Use this function only if you already know that op really
f19a9af7 1062 contains such a part. */
4023fb28 1063
f19a9af7
AK
1064unsigned HOST_WIDE_INT
1065s390_extract_part (rtx op, enum machine_mode mode, int def)
4023fb28 1066{
f19a9af7
AK
1067 unsigned HOST_WIDE_INT value = 0;
1068 int max_parts = HOST_BITS_PER_WIDE_INT / GET_MODE_BITSIZE (mode);
1069 int part_bits = GET_MODE_BITSIZE (mode);
c4d50129
AK
1070 unsigned HOST_WIDE_INT part_mask
1071 = ((unsigned HOST_WIDE_INT)1 << part_bits) - 1;
f19a9af7 1072 int i;
38899e29 1073
f19a9af7 1074 for (i = 0; i < max_parts; i++)
4023fb28 1075 {
f19a9af7
AK
1076 if (i == 0)
1077 value = (unsigned HOST_WIDE_INT) INTVAL (op);
4023fb28 1078 else
f19a9af7 1079 value >>= part_bits;
38899e29 1080
f19a9af7
AK
1081 if ((value & part_mask) != (def & part_mask))
1082 return value & part_mask;
4023fb28 1083 }
38899e29 1084
8d933e31 1085 gcc_unreachable ();
4023fb28
UW
1086}
1087
1088/* If OP is an integer constant of mode MODE with exactly one
f19a9af7
AK
1089 part of mode PART_MODE unequal to DEF, return the number of that
1090 part. Otherwise, return -1. */
4023fb28
UW
1091
1092int
38899e29
EC
1093s390_single_part (rtx op,
1094 enum machine_mode mode,
f19a9af7
AK
1095 enum machine_mode part_mode,
1096 int def)
1097{
1098 unsigned HOST_WIDE_INT value = 0;
1099 int n_parts = GET_MODE_SIZE (mode) / GET_MODE_SIZE (part_mode);
c4d50129
AK
1100 unsigned HOST_WIDE_INT part_mask
1101 = ((unsigned HOST_WIDE_INT)1 << GET_MODE_BITSIZE (part_mode)) - 1;
f19a9af7
AK
1102 int i, part = -1;
1103
1104 if (GET_CODE (op) != CONST_INT)
1105 return -1;
38899e29 1106
f19a9af7
AK
1107 for (i = 0; i < n_parts; i++)
1108 {
1109 if (i == 0)
1110 value = (unsigned HOST_WIDE_INT) INTVAL (op);
4023fb28 1111 else
f19a9af7 1112 value >>= GET_MODE_BITSIZE (part_mode);
38899e29 1113
f19a9af7
AK
1114 if ((value & part_mask) != (def & part_mask))
1115 {
1116 if (part != -1)
1117 return -1;
1118 else
1119 part = i;
1120 }
4023fb28 1121 }
f19a9af7 1122 return part == -1 ? -1 : n_parts - 1 - part;
4023fb28
UW
1123}
1124
c7453384
EC
1125/* Check whether we can (and want to) split a double-word
1126 move in mode MODE from SRC to DST into two single-word
dc65c307
UW
1127 moves, moving the subword FIRST_SUBWORD first. */
1128
1129bool
9c808aad 1130s390_split_ok_p (rtx dst, rtx src, enum machine_mode mode, int first_subword)
dc65c307
UW
1131{
1132 /* Floating point registers cannot be split. */
1133 if (FP_REG_P (src) || FP_REG_P (dst))
1134 return false;
1135
fae778eb 1136 /* We don't need to split if operands are directly accessible. */
dc65c307
UW
1137 if (s_operand (src, mode) || s_operand (dst, mode))
1138 return false;
1139
1140 /* Non-offsettable memory references cannot be split. */
1141 if ((GET_CODE (src) == MEM && !offsettable_memref_p (src))
1142 || (GET_CODE (dst) == MEM && !offsettable_memref_p (dst)))
1143 return false;
1144
1145 /* Moving the first subword must not clobber a register
1146 needed to move the second subword. */
1147 if (register_operand (dst, mode))
1148 {
1149 rtx subreg = operand_subword (dst, first_subword, 0, mode);
1150 if (reg_overlap_mentioned_p (subreg, src))
1151 return false;
1152 }
1153
1154 return true;
1155}
1156
bcf8c1cc
AK
1157/* Return true if it can be proven that [MEM1, MEM1 + SIZE]
1158 and [MEM2, MEM2 + SIZE] do overlap and false
1159 otherwise. */
1160
1161bool
1162s390_overlap_p (rtx mem1, rtx mem2, HOST_WIDE_INT size)
1163{
1164 rtx addr1, addr2, addr_delta;
1165 HOST_WIDE_INT delta;
1166
1167 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
1168 return true;
1169
1170 if (size == 0)
1171 return false;
1172
1173 addr1 = XEXP (mem1, 0);
1174 addr2 = XEXP (mem2, 0);
1175
1176 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
1177
1178 /* This overlapping check is used by peepholes merging memory block operations.
1179 Overlapping operations would otherwise be recognized by the S/390 hardware
1180 and would fall back to a slower implementation. Allowing overlapping
1181 operations would lead to slow code but not to wrong code. Therefore we are
5116a5d2 1182 somewhat optimistic if we cannot prove that the memory blocks are
bcf8c1cc
AK
1183 overlapping.
1184 That's why we return false here although this may accept operations on
1185 overlapping memory areas. */
1186 if (!addr_delta || GET_CODE (addr_delta) != CONST_INT)
1187 return false;
1188
1189 delta = INTVAL (addr_delta);
1190
1191 if (delta == 0
1192 || (delta > 0 && delta < size)
1193 || (delta < 0 && -delta < size))
1194 return true;
1195
1196 return false;
1197}
1198
19b63d8e
UW
1199/* Check whether the address of memory reference MEM2 equals exactly
1200 the address of memory reference MEM1 plus DELTA. Return true if
1201 we can prove this to be the case, false otherwise. */
1202
1203bool
1204s390_offset_p (rtx mem1, rtx mem2, rtx delta)
1205{
1206 rtx addr1, addr2, addr_delta;
1207
1208 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
1209 return false;
1210
1211 addr1 = XEXP (mem1, 0);
1212 addr2 = XEXP (mem2, 0);
1213
1214 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
1215 if (!addr_delta || !rtx_equal_p (addr_delta, delta))
1216 return false;
1217
1218 return true;
1219}
1220
8cb66696
UW
1221/* Expand logical operator CODE in mode MODE with operands OPERANDS. */
1222
1223void
1224s390_expand_logical_operator (enum rtx_code code, enum machine_mode mode,
1225 rtx *operands)
1226{
1227 enum machine_mode wmode = mode;
1228 rtx dst = operands[0];
1229 rtx src1 = operands[1];
1230 rtx src2 = operands[2];
1231 rtx op, clob, tem;
1232
1233 /* If we cannot handle the operation directly, use a temp register. */
1234 if (!s390_logical_operator_ok_p (operands))
1235 dst = gen_reg_rtx (mode);
1236
1237 /* QImode and HImode patterns make sense only if we have a destination
1238 in memory. Otherwise perform the operation in SImode. */
1239 if ((mode == QImode || mode == HImode) && GET_CODE (dst) != MEM)
1240 wmode = SImode;
1241
1242 /* Widen operands if required. */
1243 if (mode != wmode)
1244 {
1245 if (GET_CODE (dst) == SUBREG
1246 && (tem = simplify_subreg (wmode, dst, mode, 0)) != 0)
1247 dst = tem;
1248 else if (REG_P (dst))
1249 dst = gen_rtx_SUBREG (wmode, dst, 0);
1250 else
1251 dst = gen_reg_rtx (wmode);
1252
1253 if (GET_CODE (src1) == SUBREG
1254 && (tem = simplify_subreg (wmode, src1, mode, 0)) != 0)
1255 src1 = tem;
1256 else if (GET_MODE (src1) != VOIDmode)
1257 src1 = gen_rtx_SUBREG (wmode, force_reg (mode, src1), 0);
1258
1259 if (GET_CODE (src2) == SUBREG
1260 && (tem = simplify_subreg (wmode, src2, mode, 0)) != 0)
1261 src2 = tem;
1262 else if (GET_MODE (src2) != VOIDmode)
1263 src2 = gen_rtx_SUBREG (wmode, force_reg (mode, src2), 0);
1264 }
1265
1266 /* Emit the instruction. */
1267 op = gen_rtx_SET (VOIDmode, dst, gen_rtx_fmt_ee (code, wmode, src1, src2));
1268 clob = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
1269 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, op, clob)));
1270
1271 /* Fix up the destination if needed. */
1272 if (dst != operands[0])
1273 emit_move_insn (operands[0], gen_lowpart (mode, dst));
1274}
1275
1276/* Check whether OPERANDS are OK for a logical operation (AND, IOR, XOR). */
1277
1278bool
1279s390_logical_operator_ok_p (rtx *operands)
1280{
1281 /* If the destination operand is in memory, it needs to coincide
1282 with one of the source operands. After reload, it has to be
1283 the first source operand. */
1284 if (GET_CODE (operands[0]) == MEM)
1285 return rtx_equal_p (operands[0], operands[1])
1286 || (!reload_completed && rtx_equal_p (operands[0], operands[2]));
1287
1288 return true;
1289}
1290
0dfa6c5e
UW
1291/* Narrow logical operation CODE of memory operand MEMOP with immediate
1292 operand IMMOP to switch from SS to SI type instructions. */
1293
1294void
1295s390_narrow_logical_operator (enum rtx_code code, rtx *memop, rtx *immop)
1296{
1297 int def = code == AND ? -1 : 0;
1298 HOST_WIDE_INT mask;
1299 int part;
1300
1301 gcc_assert (GET_CODE (*memop) == MEM);
1302 gcc_assert (!MEM_VOLATILE_P (*memop));
1303
1304 mask = s390_extract_part (*immop, QImode, def);
1305 part = s390_single_part (*immop, GET_MODE (*memop), QImode, def);
1306 gcc_assert (part >= 0);
1307
1308 *memop = adjust_address (*memop, QImode, part);
1309 *immop = gen_int_mode (mask, QImode);
1310}
1311
ba956982 1312
ab96de7e
AS
1313/* How to allocate a 'struct machine_function'. */
1314
1315static struct machine_function *
1316s390_init_machine_status (void)
1317{
1318 return ggc_alloc_cleared (sizeof (struct machine_function));
1319}
1320
c7453384 1321/* Change optimizations to be performed, depending on the
994fe660
UW
1322 optimization level.
1323
1324 LEVEL is the optimization level specified; 2 if `-O2' is
1325 specified, 1 if `-O' is specified, and 0 if neither is specified.
1326
5e7a8ee0 1327 SIZE is nonzero if `-Os' is specified and zero otherwise. */
9db1d521
HP
1328
1329void
9c808aad 1330optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
9db1d521 1331{
8e509cf9
UW
1332 /* ??? There are apparently still problems with -fcaller-saves. */
1333 flag_caller_saves = 0;
2120e3cd
UW
1334
1335 /* By default, always emit DWARF-2 unwind info. This allows debugging
1336 without maintaining a stack frame back-chain. */
1337 flag_asynchronous_unwind_tables = 1;
8daf098e
AS
1338
1339 /* Use MVCLE instructions to decrease code size if requested. */
1340 if (size != 0)
1341 target_flags |= MASK_MVCLE;
9db1d521
HP
1342}
1343
f5db779b
RS
1344/* Return true if ARG is the name of a processor. Set *TYPE and *FLAGS
1345 to the associated processor_type and processor_flags if so. */
1346
1347static bool
1348s390_handle_arch_option (const char *arg,
1349 enum processor_type *type,
1350 enum processor_flags *flags)
4023fb28 1351{
1fec52be
HP
1352 static struct pta
1353 {
1354 const char *const name; /* processor name or nickname. */
1355 const enum processor_type processor;
f13e0d4e 1356 const enum processor_flags flags;
1fec52be
HP
1357 }
1358 const processor_alias_table[] =
1359 {
f13e0d4e
UW
1360 {"g5", PROCESSOR_9672_G5, PF_IEEE_FLOAT},
1361 {"g6", PROCESSOR_9672_G6, PF_IEEE_FLOAT},
1362 {"z900", PROCESSOR_2064_Z900, PF_IEEE_FLOAT | PF_ZARCH},
c7453384 1363 {"z990", PROCESSOR_2084_Z990, PF_IEEE_FLOAT | PF_ZARCH
f13e0d4e 1364 | PF_LONG_DISPLACEMENT},
ec24698e
UW
1365 {"z9-109", PROCESSOR_2094_Z9_109, PF_IEEE_FLOAT | PF_ZARCH
1366 | PF_LONG_DISPLACEMENT | PF_EXTIMM},
3443392a
AK
1367 {"z9-ec", PROCESSOR_2094_Z9_109, PF_IEEE_FLOAT | PF_ZARCH
1368 | PF_LONG_DISPLACEMENT | PF_EXTIMM | PF_DFP },
1fec52be 1369 };
f5db779b
RS
1370 size_t i;
1371
1372 for (i = 0; i < ARRAY_SIZE (processor_alias_table); i++)
1373 if (strcmp (arg, processor_alias_table[i].name) == 0)
1374 {
1375 *type = processor_alias_table[i].processor;
1376 *flags = processor_alias_table[i].flags;
1377 return true;
1378 }
1379 return false;
1380}
1381
1382/* Implement TARGET_HANDLE_OPTION. */
1fec52be 1383
f5db779b
RS
1384static bool
1385s390_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
1386{
1387 switch (code)
1388 {
1389 case OPT_march_:
f5db779b
RS
1390 return s390_handle_arch_option (arg, &s390_arch, &s390_arch_flags);
1391
1392 case OPT_mstack_guard_:
1393 if (sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_stack_guard) != 1)
1394 return false;
1395 if (exact_log2 (s390_stack_guard) == -1)
1396 error ("stack guard value must be an exact power of 2");
1397 return true;
1398
1399 case OPT_mstack_size_:
1400 if (sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_stack_size) != 1)
1401 return false;
1402 if (exact_log2 (s390_stack_size) == -1)
1403 error ("stack size must be an exact power of 2");
1404 return true;
1405
1406 case OPT_mtune_:
1407 return s390_handle_arch_option (arg, &s390_tune, &s390_tune_flags);
1408
1409 case OPT_mwarn_framesize_:
1410 return sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_warn_framesize) == 1;
1411
1412 default:
1413 return true;
1414 }
1415}
1fec52be 1416
f5db779b
RS
1417void
1418override_options (void)
1419{
29742ba4
HP
1420 /* Set up function hooks. */
1421 init_machine_status = s390_init_machine_status;
f13e0d4e
UW
1422
1423 /* Architecture mode defaults according to ABI. */
1424 if (!(target_flags_explicit & MASK_ZARCH))
1425 {
1426 if (TARGET_64BIT)
1427 target_flags |= MASK_ZARCH;
1428 else
1429 target_flags &= ~MASK_ZARCH;
1430 }
1431
1432 /* Determine processor architectural level. */
1fec52be 1433 if (!s390_arch_string)
f5db779b
RS
1434 {
1435 s390_arch_string = TARGET_ZARCH? "z900" : "g5";
1436 s390_handle_arch_option (s390_arch_string, &s390_arch, &s390_arch_flags);
1437 }
1fec52be 1438
f13e0d4e 1439 /* Determine processor to tune for. */
f5db779b 1440 if (s390_tune == PROCESSOR_max)
1fec52be 1441 {
f13e0d4e
UW
1442 s390_tune = s390_arch;
1443 s390_tune_flags = s390_arch_flags;
1fec52be
HP
1444 }
1445
f13e0d4e 1446 /* Sanity checks. */
3443392a 1447 if (TARGET_ZARCH && !TARGET_CPU_ZARCH)
c85ce869 1448 error ("z/Architecture mode not supported on %s", s390_arch_string);
f13e0d4e 1449 if (TARGET_64BIT && !TARGET_ZARCH)
c85ce869 1450 error ("64-bit ABI not supported in ESA/390 mode");
d75f90f1 1451
3443392a
AK
1452 if (TARGET_HARD_DFP && (!TARGET_CPU_DFP || !TARGET_ZARCH))
1453 {
1454 if (target_flags_explicit & MASK_SOFT_DFP)
1455 {
1456 if (!TARGET_CPU_DFP)
1457 error ("Hardware decimal floating point instructions"
1458 " not available on %s", s390_arch_string);
1459 if (!TARGET_ZARCH)
1460 error ("Hardware decimal floating point instructions"
1461 " not available in ESA/390 mode");
1462 }
1463 else
1464 target_flags |= MASK_SOFT_DFP;
1465 }
1466
1467 if ((target_flags_explicit & MASK_SOFT_FLOAT) && TARGET_SOFT_FLOAT)
1468 {
1469 if ((target_flags_explicit & MASK_SOFT_DFP) && TARGET_HARD_DFP)
1470 error ("-mhard-dfp can't be used in conjunction with -msoft-float");
1471
1472 target_flags |= MASK_SOFT_DFP;
1473 }
1474
017e0eb9 1475 /* Set processor cost function. */
ec24698e
UW
1476 if (s390_tune == PROCESSOR_2094_Z9_109)
1477 s390_cost = &z9_109_cost;
1478 else if (s390_tune == PROCESSOR_2084_Z990)
017e0eb9
MD
1479 s390_cost = &z990_cost;
1480 else
1481 s390_cost = &z900_cost;
ec24698e 1482
6b78f6be
AK
1483 if (TARGET_BACKCHAIN && TARGET_PACKED_STACK && TARGET_HARD_FLOAT)
1484 error ("-mbackchain -mpacked-stack -mhard-float are not supported "
c85ce869 1485 "in combination");
6b78f6be 1486
f5db779b 1487 if (s390_stack_size)
d75f90f1 1488 {
690e7b63 1489 if (s390_stack_guard >= s390_stack_size)
f5db779b 1490 error ("stack size must be greater than the stack guard value");
f695eccf
AK
1491 else if (s390_stack_size > 1 << 16)
1492 error ("stack size must not be greater than 64k");
d75f90f1 1493 }
f5db779b 1494 else if (s390_stack_guard)
d75f90f1 1495 error ("-mstack-guard implies use of -mstack-size");
ed965309
JJ
1496
1497#ifdef TARGET_DEFAULT_LONG_DOUBLE_128
1498 if (!(target_flags_explicit & MASK_LONG_DOUBLE_128))
1499 target_flags |= MASK_LONG_DOUBLE_128;
1500#endif
29742ba4 1501}
9db1d521
HP
1502
1503/* Map for smallest class containing reg regno. */
1504
0139adca 1505const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
9db1d521
HP
1506{ GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1507 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1508 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1509 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1510 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1511 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1512 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1513 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
c5aa1d12
UW
1514 ADDR_REGS, CC_REGS, ADDR_REGS, ADDR_REGS,
1515 ACCESS_REGS, ACCESS_REGS
9db1d521
HP
1516};
1517
077dab3b
HP
1518/* Return attribute type of insn. */
1519
1520static enum attr_type
9c808aad 1521s390_safe_attr_type (rtx insn)
077dab3b
HP
1522{
1523 if (recog_memoized (insn) >= 0)
1524 return get_attr_type (insn);
1525 else
1526 return TYPE_NONE;
1527}
9db1d521 1528
d3632d41
UW
1529/* Return true if DISP is a valid short displacement. */
1530
3ed99cc9 1531static bool
9c808aad 1532s390_short_displacement (rtx disp)
d3632d41
UW
1533{
1534 /* No displacement is OK. */
1535 if (!disp)
3ed99cc9 1536 return true;
d3632d41
UW
1537
1538 /* Integer displacement in range. */
1539 if (GET_CODE (disp) == CONST_INT)
1540 return INTVAL (disp) >= 0 && INTVAL (disp) < 4096;
1541
1542 /* GOT offset is not OK, the GOT can be large. */
1543 if (GET_CODE (disp) == CONST
1544 && GET_CODE (XEXP (disp, 0)) == UNSPEC
227a39fa
UW
1545 && (XINT (XEXP (disp, 0), 1) == UNSPEC_GOT
1546 || XINT (XEXP (disp, 0), 1) == UNSPEC_GOTNTPOFF))
3ed99cc9 1547 return false;
d3632d41
UW
1548
1549 /* All other symbolic constants are literal pool references,
1550 which are OK as the literal pool must be small. */
1551 if (GET_CODE (disp) == CONST)
3ed99cc9 1552 return true;
d3632d41 1553
3ed99cc9 1554 return false;
d3632d41
UW
1555}
1556
ab96de7e
AS
1557/* Decompose a RTL expression ADDR for a memory address into
1558 its components, returned in OUT.
ccfc6cc8 1559
3ed99cc9 1560 Returns false if ADDR is not a valid memory address, true
ab96de7e
AS
1561 otherwise. If OUT is NULL, don't return the components,
1562 but check for validity only.
ccfc6cc8 1563
ab96de7e
AS
1564 Note: Only addresses in canonical form are recognized.
1565 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1566 canonical form so that they will be recognized. */
f19a9af7 1567
ab96de7e 1568static int
5d81b82b 1569s390_decompose_address (rtx addr, struct s390_address *out)
ab96de7e
AS
1570{
1571 HOST_WIDE_INT offset = 0;
1572 rtx base = NULL_RTX;
1573 rtx indx = NULL_RTX;
1574 rtx disp = NULL_RTX;
1575 rtx orig_disp;
3ed99cc9
AS
1576 bool pointer = false;
1577 bool base_ptr = false;
1578 bool indx_ptr = false;
f01cf809
UW
1579 bool literal_pool = false;
1580
1581 /* We may need to substitute the literal pool base register into the address
1582 below. However, at this point we do not know which register is going to
1583 be used as base, so we substitute the arg pointer register. This is going
1584 to be treated as holding a pointer below -- it shouldn't be used for any
1585 other purpose. */
1586 rtx fake_pool_base = gen_rtx_REG (Pmode, ARG_POINTER_REGNUM);
0dfa6c5e 1587
ab96de7e 1588 /* Decompose address into base + index + displacement. */
0dfa6c5e 1589
ab96de7e
AS
1590 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
1591 base = addr;
0dfa6c5e 1592
ab96de7e 1593 else if (GET_CODE (addr) == PLUS)
e221ef54 1594 {
ab96de7e
AS
1595 rtx op0 = XEXP (addr, 0);
1596 rtx op1 = XEXP (addr, 1);
1597 enum rtx_code code0 = GET_CODE (op0);
1598 enum rtx_code code1 = GET_CODE (op1);
e221ef54 1599
ab96de7e
AS
1600 if (code0 == REG || code0 == UNSPEC)
1601 {
1602 if (code1 == REG || code1 == UNSPEC)
1603 {
1604 indx = op0; /* index + base */
1605 base = op1;
1606 }
e221ef54 1607
ab96de7e
AS
1608 else
1609 {
1610 base = op0; /* base + displacement */
1611 disp = op1;
1612 }
1613 }
ccfc6cc8 1614
ab96de7e 1615 else if (code0 == PLUS)
d3632d41 1616 {
ab96de7e
AS
1617 indx = XEXP (op0, 0); /* index + base + disp */
1618 base = XEXP (op0, 1);
1619 disp = op1;
d3632d41 1620 }
d3632d41 1621
ab96de7e 1622 else
d3632d41 1623 {
3ed99cc9 1624 return false;
d3632d41 1625 }
ab96de7e 1626 }
d3632d41 1627
ab96de7e
AS
1628 else
1629 disp = addr; /* displacement */
d3632d41 1630
ab96de7e
AS
1631 /* Extract integer part of displacement. */
1632 orig_disp = disp;
1633 if (disp)
1634 {
1635 if (GET_CODE (disp) == CONST_INT)
d3632d41 1636 {
ab96de7e
AS
1637 offset = INTVAL (disp);
1638 disp = NULL_RTX;
d3632d41 1639 }
ab96de7e
AS
1640 else if (GET_CODE (disp) == CONST
1641 && GET_CODE (XEXP (disp, 0)) == PLUS
1642 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
1643 {
1644 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
1645 disp = XEXP (XEXP (disp, 0), 0);
1646 }
1647 }
d3632d41 1648
ab96de7e
AS
1649 /* Strip off CONST here to avoid special case tests later. */
1650 if (disp && GET_CODE (disp) == CONST)
1651 disp = XEXP (disp, 0);
ac32b25e 1652
ab96de7e
AS
1653 /* We can convert literal pool addresses to
1654 displacements by basing them off the base register. */
1655 if (disp && GET_CODE (disp) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (disp))
1656 {
1657 /* Either base or index must be free to hold the base register. */
1658 if (!base)
f01cf809 1659 base = fake_pool_base, literal_pool = true;
ab96de7e 1660 else if (!indx)
f01cf809 1661 indx = fake_pool_base, literal_pool = true;
ab96de7e 1662 else
3ed99cc9 1663 return false;
ab96de7e
AS
1664
1665 /* Mark up the displacement. */
1666 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp),
1667 UNSPEC_LTREL_OFFSET);
d3632d41 1668 }
ccfc6cc8 1669
ab96de7e
AS
1670 /* Validate base register. */
1671 if (base)
1672 {
1673 if (GET_CODE (base) == UNSPEC)
1674 switch (XINT (base, 1))
1675 {
1676 case UNSPEC_LTREF:
1677 if (!disp)
1678 disp = gen_rtx_UNSPEC (Pmode,
1679 gen_rtvec (1, XVECEXP (base, 0, 0)),
1680 UNSPEC_LTREL_OFFSET);
1681 else
3ed99cc9 1682 return false;
ccfc6cc8 1683
f01cf809 1684 base = XVECEXP (base, 0, 1);
ab96de7e 1685 break;
f19a9af7 1686
ab96de7e 1687 case UNSPEC_LTREL_BASE:
f01cf809
UW
1688 if (XVECLEN (base, 0) == 1)
1689 base = fake_pool_base, literal_pool = true;
1690 else
1691 base = XVECEXP (base, 0, 1);
ab96de7e 1692 break;
f19a9af7 1693
ab96de7e 1694 default:
3ed99cc9 1695 return false;
ab96de7e 1696 }
f19a9af7 1697
93fa8428
AK
1698 if (!REG_P (base)
1699 || (GET_MODE (base) != SImode
1700 && GET_MODE (base) != Pmode))
3ed99cc9 1701 return false;
ab96de7e 1702
f01cf809 1703 if (REGNO (base) == STACK_POINTER_REGNUM
ab96de7e
AS
1704 || REGNO (base) == FRAME_POINTER_REGNUM
1705 || ((reload_completed || reload_in_progress)
1706 && frame_pointer_needed
1707 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
1708 || REGNO (base) == ARG_POINTER_REGNUM
1709 || (flag_pic
1710 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
3ed99cc9 1711 pointer = base_ptr = true;
f01cf809
UW
1712
1713 if ((reload_completed || reload_in_progress)
1714 && base == cfun->machine->base_reg)
1715 pointer = base_ptr = literal_pool = true;
ab96de7e
AS
1716 }
1717
1718 /* Validate index register. */
1719 if (indx)
f19a9af7 1720 {
ab96de7e
AS
1721 if (GET_CODE (indx) == UNSPEC)
1722 switch (XINT (indx, 1))
1723 {
1724 case UNSPEC_LTREF:
1725 if (!disp)
1726 disp = gen_rtx_UNSPEC (Pmode,
1727 gen_rtvec (1, XVECEXP (indx, 0, 0)),
1728 UNSPEC_LTREL_OFFSET);
1729 else
3ed99cc9 1730 return false;
f19a9af7 1731
f01cf809 1732 indx = XVECEXP (indx, 0, 1);
ab96de7e 1733 break;
f19a9af7 1734
ab96de7e 1735 case UNSPEC_LTREL_BASE:
f01cf809
UW
1736 if (XVECLEN (indx, 0) == 1)
1737 indx = fake_pool_base, literal_pool = true;
1738 else
1739 indx = XVECEXP (indx, 0, 1);
ab96de7e 1740 break;
f19a9af7 1741
ab96de7e 1742 default:
3ed99cc9 1743 return false;
ab96de7e 1744 }
f19a9af7 1745
93fa8428
AK
1746 if (!REG_P (indx)
1747 || (GET_MODE (indx) != SImode
1748 && GET_MODE (indx) != Pmode))
3ed99cc9 1749 return false;
f19a9af7 1750
f01cf809 1751 if (REGNO (indx) == STACK_POINTER_REGNUM
ab96de7e
AS
1752 || REGNO (indx) == FRAME_POINTER_REGNUM
1753 || ((reload_completed || reload_in_progress)
1754 && frame_pointer_needed
1755 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
1756 || REGNO (indx) == ARG_POINTER_REGNUM
1757 || (flag_pic
1758 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
3ed99cc9 1759 pointer = indx_ptr = true;
f01cf809
UW
1760
1761 if ((reload_completed || reload_in_progress)
1762 && indx == cfun->machine->base_reg)
1763 pointer = indx_ptr = literal_pool = true;
ab96de7e 1764 }
38899e29 1765
ab96de7e
AS
1766 /* Prefer to use pointer as base, not index. */
1767 if (base && indx && !base_ptr
1768 && (indx_ptr || (!REG_POINTER (base) && REG_POINTER (indx))))
1769 {
1770 rtx tmp = base;
1771 base = indx;
1772 indx = tmp;
1773 }
f19a9af7 1774
ab96de7e
AS
1775 /* Validate displacement. */
1776 if (!disp)
1777 {
63296cb1
AK
1778 /* If virtual registers are involved, the displacement will change later
1779 anyway as the virtual registers get eliminated. This could make a
1780 valid displacement invalid, but it is more likely to make an invalid
1781 displacement valid, because we sometimes access the register save area
1782 via negative offsets to one of those registers.
ab96de7e
AS
1783 Thus we don't check the displacement for validity here. If after
1784 elimination the displacement turns out to be invalid after all,
1785 this is fixed up by reload in any case. */
1786 if (base != arg_pointer_rtx
1787 && indx != arg_pointer_rtx
1788 && base != return_address_pointer_rtx
63296cb1
AK
1789 && indx != return_address_pointer_rtx
1790 && base != frame_pointer_rtx
1791 && indx != frame_pointer_rtx
1792 && base != virtual_stack_vars_rtx
1793 && indx != virtual_stack_vars_rtx)
ab96de7e 1794 if (!DISP_IN_RANGE (offset))
3ed99cc9 1795 return false;
ab96de7e
AS
1796 }
1797 else
1798 {
1799 /* All the special cases are pointers. */
3ed99cc9 1800 pointer = true;
f19a9af7 1801
ab96de7e
AS
1802 /* In the small-PIC case, the linker converts @GOT
1803 and @GOTNTPOFF offsets to possible displacements. */
1804 if (GET_CODE (disp) == UNSPEC
1805 && (XINT (disp, 1) == UNSPEC_GOT
1806 || XINT (disp, 1) == UNSPEC_GOTNTPOFF)
ab96de7e
AS
1807 && flag_pic == 1)
1808 {
1809 ;
1810 }
f19a9af7 1811
ab96de7e 1812 /* Accept chunkified literal pool symbol references. */
3cd045d1
UW
1813 else if (cfun && cfun->machine
1814 && cfun->machine->decomposed_literal_pool_addresses_ok_p
1815 && GET_CODE (disp) == MINUS
ab96de7e
AS
1816 && GET_CODE (XEXP (disp, 0)) == LABEL_REF
1817 && GET_CODE (XEXP (disp, 1)) == LABEL_REF)
1818 {
1819 ;
1820 }
f19a9af7 1821
ab96de7e
AS
1822 /* Accept literal pool references. */
1823 else if (GET_CODE (disp) == UNSPEC
1824 && XINT (disp, 1) == UNSPEC_LTREL_OFFSET)
1825 {
1826 orig_disp = gen_rtx_CONST (Pmode, disp);
1827 if (offset)
1828 {
1829 /* If we have an offset, make sure it does not
1830 exceed the size of the constant pool entry. */
1831 rtx sym = XVECEXP (disp, 0, 0);
1832 if (offset >= GET_MODE_SIZE (get_pool_mode (sym)))
3ed99cc9 1833 return false;
f19a9af7 1834
ab96de7e
AS
1835 orig_disp = plus_constant (orig_disp, offset);
1836 }
1837 }
1838
1839 else
3ed99cc9 1840 return false;
f19a9af7
AK
1841 }
1842
ab96de7e 1843 if (!base && !indx)
3ed99cc9 1844 pointer = true;
ab96de7e
AS
1845
1846 if (out)
1847 {
1848 out->base = base;
1849 out->indx = indx;
1850 out->disp = orig_disp;
1851 out->pointer = pointer;
f01cf809 1852 out->literal_pool = literal_pool;
ab96de7e
AS
1853 }
1854
3ed99cc9 1855 return true;
f19a9af7
AK
1856}
1857
d98ad410
UW
1858/* Decompose a RTL expression OP for a shift count into its components,
1859 and return the base register in BASE and the offset in OFFSET.
1860
d98ad410
UW
1861 Return true if OP is a valid shift count, false if not. */
1862
1863bool
4989e88a 1864s390_decompose_shift_count (rtx op, rtx *base, HOST_WIDE_INT *offset)
d98ad410
UW
1865{
1866 HOST_WIDE_INT off = 0;
1867
d98ad410
UW
1868 /* We can have an integer constant, an address register,
1869 or a sum of the two. */
1870 if (GET_CODE (op) == CONST_INT)
1871 {
1872 off = INTVAL (op);
1873 op = NULL_RTX;
1874 }
1875 if (op && GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
1876 {
1877 off = INTVAL (XEXP (op, 1));
1878 op = XEXP (op, 0);
1879 }
1880 while (op && GET_CODE (op) == SUBREG)
1881 op = SUBREG_REG (op);
1882
1883 if (op && GET_CODE (op) != REG)
1884 return false;
1885
1886 if (offset)
1887 *offset = off;
1888 if (base)
1889 *base = op;
1890
1891 return true;
1892}
1893
1894
ab96de7e 1895/* Return true if CODE is a valid address without index. */
3c50106f 1896
ab96de7e
AS
1897bool
1898s390_legitimate_address_without_index_p (rtx op)
1899{
1900 struct s390_address addr;
1901
1902 if (!s390_decompose_address (XEXP (op, 0), &addr))
1903 return false;
1904 if (addr.indx)
1905 return false;
1906
1907 return true;
1908}
1909
cd8dc1f9
WG
1910
1911/* Evaluates constraint strings described by the regular expression
1912 ([A|B](Q|R|S|T))|U|W and returns 1 if OP is a valid operand for the
1913 constraint given in STR, or 0 else. */
ab96de7e
AS
1914
1915int
cd8dc1f9 1916s390_mem_constraint (const char *str, rtx op)
ab96de7e
AS
1917{
1918 struct s390_address addr;
cd8dc1f9 1919 char c = str[0];
ab96de7e
AS
1920
1921 /* Check for offsettable variants of memory constraints. */
1922 if (c == 'A')
1923 {
1924 /* Only accept non-volatile MEMs. */
1925 if (!MEM_P (op) || MEM_VOLATILE_P (op))
1926 return 0;
1927
1928 if ((reload_completed || reload_in_progress)
cd8dc1f9 1929 ? !offsettable_memref_p (op) : !offsettable_nonstrict_memref_p (op))
ab96de7e
AS
1930 return 0;
1931
1932 c = str[1];
1933 }
1934
1935 /* Check for non-literal-pool variants of memory constraints. */
1936 else if (c == 'B')
1937 {
1938 if (GET_CODE (op) != MEM)
1939 return 0;
1940 if (!s390_decompose_address (XEXP (op, 0), &addr))
1941 return 0;
f01cf809 1942 if (addr.literal_pool)
ab96de7e
AS
1943 return 0;
1944
1945 c = str[1];
1946 }
1947
1948 switch (c)
1949 {
1950 case 'Q':
1951 if (GET_CODE (op) != MEM)
1952 return 0;
1953 if (!s390_decompose_address (XEXP (op, 0), &addr))
1954 return 0;
1955 if (addr.indx)
1956 return 0;
1957
1958 if (TARGET_LONG_DISPLACEMENT)
1959 {
1960 if (!s390_short_displacement (addr.disp))
1961 return 0;
1962 }
1963 break;
1964
1965 case 'R':
1966 if (GET_CODE (op) != MEM)
1967 return 0;
1968
1969 if (TARGET_LONG_DISPLACEMENT)
1970 {
1971 if (!s390_decompose_address (XEXP (op, 0), &addr))
1972 return 0;
1973 if (!s390_short_displacement (addr.disp))
1974 return 0;
1975 }
1976 break;
1977
1978 case 'S':
1979 if (!TARGET_LONG_DISPLACEMENT)
1980 return 0;
1981 if (GET_CODE (op) != MEM)
1982 return 0;
1983 if (!s390_decompose_address (XEXP (op, 0), &addr))
1984 return 0;
1985 if (addr.indx)
1986 return 0;
1987 if (s390_short_displacement (addr.disp))
1988 return 0;
1989 break;
1990
1991 case 'T':
1992 if (!TARGET_LONG_DISPLACEMENT)
1993 return 0;
1994 if (GET_CODE (op) != MEM)
1995 return 0;
1996 /* Any invalid address here will be fixed up by reload,
1997 so accept it for the most generic constraint. */
1998 if (s390_decompose_address (XEXP (op, 0), &addr)
1999 && s390_short_displacement (addr.disp))
2000 return 0;
2001 break;
2002
2003 case 'U':
2004 if (TARGET_LONG_DISPLACEMENT)
2005 {
2006 if (!s390_decompose_address (op, &addr))
2007 return 0;
2008 if (!s390_short_displacement (addr.disp))
2009 return 0;
2010 }
2011 break;
2012
2013 case 'W':
2014 if (!TARGET_LONG_DISPLACEMENT)
2015 return 0;
2016 /* Any invalid address here will be fixed up by reload,
2017 so accept it for the most generic constraint. */
2018 if (s390_decompose_address (op, &addr)
2019 && s390_short_displacement (addr.disp))
2020 return 0;
2021 break;
2022
2023 case 'Y':
d98ad410
UW
2024 /* Simply check for the basic form of a shift count. Reload will
2025 take care of making sure we have a proper base register. */
4989e88a 2026 if (!s390_decompose_shift_count (op, NULL, NULL))
d98ad410
UW
2027 return 0;
2028 break;
ab96de7e
AS
2029
2030 default:
2031 return 0;
2032 }
2033
2034 return 1;
2035}
2036
cd8dc1f9
WG
2037
2038
2039/* Evaluates constraint strings starting with letter O. Input
2040 parameter C is the second letter following the "O" in the constraint
2041 string. Returns 1 if VALUE meets the respective constraint and 0
2042 otherwise. */
ab96de7e 2043
d096725d 2044int
cd8dc1f9 2045s390_O_constraint_str (const char c, HOST_WIDE_INT value)
d096725d 2046{
cd8dc1f9
WG
2047 if (!TARGET_EXTIMM)
2048 return 0;
d096725d 2049
cd8dc1f9 2050 switch (c)
d096725d 2051 {
cd8dc1f9
WG
2052 case 's':
2053 return trunc_int_for_mode (value, SImode) == value;
2054
2055 case 'p':
2056 return value == 0
2057 || s390_single_part (GEN_INT (value), DImode, SImode, 0) == 1;
2058
2059 case 'n':
2060 return value == -1
2061 || s390_single_part (GEN_INT (value), DImode, SImode, -1) == 1;
2062
d096725d 2063 default:
cd8dc1f9 2064 gcc_unreachable ();
d096725d
AS
2065 }
2066}
2067
cd8dc1f9
WG
2068
2069/* Evaluates constraint strings starting with letter N. Parameter STR
2070 contains the letters following letter "N" in the constraint string.
2071 Returns true if VALUE matches the constraint. */
d096725d 2072
ab96de7e 2073int
cd8dc1f9 2074s390_N_constraint_str (const char *str, HOST_WIDE_INT value)
ab96de7e
AS
2075{
2076 enum machine_mode mode, part_mode;
2077 int def;
2078 int part, part_goal;
2079
ab96de7e 2080
cd8dc1f9
WG
2081 if (str[0] == 'x')
2082 part_goal = -1;
2083 else
2084 part_goal = str[0] - '0';
ab96de7e 2085
cd8dc1f9
WG
2086 switch (str[1])
2087 {
2088 case 'Q':
2089 part_mode = QImode;
ab96de7e 2090 break;
cd8dc1f9
WG
2091 case 'H':
2092 part_mode = HImode;
ec24698e 2093 break;
cd8dc1f9
WG
2094 case 'S':
2095 part_mode = SImode;
2096 break;
2097 default:
2098 return 0;
2099 }
ec24698e 2100
cd8dc1f9
WG
2101 switch (str[2])
2102 {
2103 case 'H':
2104 mode = HImode;
2105 break;
2106 case 'S':
2107 mode = SImode;
2108 break;
2109 case 'D':
2110 mode = DImode;
2111 break;
2112 default:
2113 return 0;
2114 }
11598938 2115
cd8dc1f9
WG
2116 switch (str[3])
2117 {
2118 case '0':
2119 def = 0;
2120 break;
2121 case 'F':
2122 def = -1;
2123 break;
ab96de7e
AS
2124 default:
2125 return 0;
2126 }
2127
cd8dc1f9
WG
2128 if (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (part_mode))
2129 return 0;
2130
2131 part = s390_single_part (GEN_INT (value), mode, part_mode, def);
2132 if (part < 0)
2133 return 0;
2134 if (part_goal != -1 && part_goal != part)
2135 return 0;
2136
ab96de7e
AS
2137 return 1;
2138}
2139
cd8dc1f9
WG
2140
2141/* Returns true if the input parameter VALUE is a float zero. */
2142
2143int
2144s390_float_const_zero_p (rtx value)
2145{
2146 return (GET_MODE_CLASS (GET_MODE (value)) == MODE_FLOAT
2147 && value == CONST0_RTX (GET_MODE (value)));
2148}
2149
2150
ab96de7e
AS
2151/* Compute a (partial) cost for rtx X. Return true if the complete
2152 cost has been computed, and false if subexpressions should be
2153 scanned. In either case, *TOTAL contains the cost result.
2154 CODE contains GET_CODE (x), OUTER_CODE contains the code
2155 of the superexpression of x. */
2156
2157static bool
2158s390_rtx_costs (rtx x, int code, int outer_code, int *total)
3c50106f
RH
2159{
2160 switch (code)
2161 {
2162 case CONST:
3c50106f 2163 case CONST_INT:
3c50106f
RH
2164 case LABEL_REF:
2165 case SYMBOL_REF:
2166 case CONST_DOUBLE:
6fa5b390 2167 case MEM:
3c50106f
RH
2168 *total = 0;
2169 return true;
2170
2171 case ASHIFT:
2172 case ASHIFTRT:
2173 case LSHIFTRT:
017e0eb9
MD
2174 case ROTATE:
2175 case ROTATERT:
3c50106f
RH
2176 case AND:
2177 case IOR:
2178 case XOR:
3c50106f
RH
2179 case NEG:
2180 case NOT:
2181 *total = COSTS_N_INSNS (1);
017e0eb9 2182 return false;
3c50106f 2183
2742a1ed
MD
2184 case PLUS:
2185 case MINUS:
2186 /* Check for multiply and add. */
b75d6bab 2187 if ((GET_MODE (x) == DFmode || GET_MODE (x) == SFmode)
2742a1ed 2188 && GET_CODE (XEXP (x, 0)) == MULT
142cd70f 2189 && TARGET_HARD_FLOAT && TARGET_FUSED_MADD)
2742a1ed
MD
2190 {
2191 /* This is the multiply and add case. */
b75d6bab
MD
2192 if (GET_MODE (x) == DFmode)
2193 *total = s390_cost->madbr;
2194 else
2195 *total = s390_cost->maebr;
2196 *total += rtx_cost (XEXP (XEXP (x, 0), 0), MULT)
2742a1ed
MD
2197 + rtx_cost (XEXP (XEXP (x, 0), 1), MULT)
2198 + rtx_cost (XEXP (x, 1), code);
2199 return true; /* Do not do an additional recursive descent. */
2200 }
2201 *total = COSTS_N_INSNS (1);
2202 return false;
2203
017e0eb9
MD
2204 case MULT:
2205 switch (GET_MODE (x))
2206 {
2207 case SImode:
2742a1ed 2208 {
017e0eb9
MD
2209 rtx left = XEXP (x, 0);
2210 rtx right = XEXP (x, 1);
2211 if (GET_CODE (right) == CONST_INT
b5c67a49 2212 && CONST_OK_FOR_K (INTVAL (right)))
017e0eb9
MD
2213 *total = s390_cost->mhi;
2214 else if (GET_CODE (left) == SIGN_EXTEND)
2215 *total = s390_cost->mh;
2216 else
2217 *total = s390_cost->ms; /* msr, ms, msy */
2218 break;
2219 }
2220 case DImode:
2221 {
2222 rtx left = XEXP (x, 0);
2223 rtx right = XEXP (x, 1);
2224 if (TARGET_64BIT)
2225 {
2226 if (GET_CODE (right) == CONST_INT
b5c67a49 2227 && CONST_OK_FOR_K (INTVAL (right)))
017e0eb9
MD
2228 *total = s390_cost->mghi;
2229 else if (GET_CODE (left) == SIGN_EXTEND)
2230 *total = s390_cost->msgf;
2231 else
2232 *total = s390_cost->msg; /* msgr, msg */
2233 }
2234 else /* TARGET_31BIT */
2235 {
2236 if (GET_CODE (left) == SIGN_EXTEND
2237 && GET_CODE (right) == SIGN_EXTEND)
2238 /* mulsidi case: mr, m */
2239 *total = s390_cost->m;
2742a1ed
MD
2240 else if (GET_CODE (left) == ZERO_EXTEND
2241 && GET_CODE (right) == ZERO_EXTEND
2242 && TARGET_CPU_ZARCH)
2243 /* umulsidi case: ml, mlr */
2244 *total = s390_cost->ml;
017e0eb9
MD
2245 else
2246 /* Complex calculation is required. */
2247 *total = COSTS_N_INSNS (40);
2248 }
2249 break;
2250 }
2251 case SFmode:
2252 case DFmode:
2253 *total = s390_cost->mult_df;
2254 break;
f61a2c7d
AK
2255 case TFmode:
2256 *total = s390_cost->mxbr;
2257 break;
017e0eb9
MD
2258 default:
2259 return false;
2260 }
2261 return false;
3c50106f 2262
6fa5b390
MD
2263 case UDIV:
2264 case UMOD:
2265 if (GET_MODE (x) == TImode) /* 128 bit division */
2266 *total = s390_cost->dlgr;
2267 else if (GET_MODE (x) == DImode)
2268 {
2269 rtx right = XEXP (x, 1);
2270 if (GET_CODE (right) == ZERO_EXTEND) /* 64 by 32 bit division */
2271 *total = s390_cost->dlr;
2272 else /* 64 by 64 bit division */
2273 *total = s390_cost->dlgr;
2274 }
2275 else if (GET_MODE (x) == SImode) /* 32 bit division */
2276 *total = s390_cost->dlr;
2277 return false;
2278
3c50106f 2279 case DIV:
6fa5b390
MD
2280 case MOD:
2281 if (GET_MODE (x) == DImode)
2282 {
2283 rtx right = XEXP (x, 1);
2284 if (GET_CODE (right) == ZERO_EXTEND) /* 64 by 32 bit division */
2285 if (TARGET_64BIT)
2286 *total = s390_cost->dsgfr;
2287 else
2288 *total = s390_cost->dr;
2289 else /* 64 by 64 bit division */
2290 *total = s390_cost->dsgr;
2291 }
2292 else if (GET_MODE (x) == SImode) /* 32 bit division */
2293 *total = s390_cost->dlr;
2294 else if (GET_MODE (x) == SFmode)
98fd0d70 2295 {
142cd70f 2296 *total = s390_cost->debr;
98fd0d70
MD
2297 }
2298 else if (GET_MODE (x) == DFmode)
2299 {
142cd70f 2300 *total = s390_cost->ddbr;
98fd0d70 2301 }
f61a2c7d
AK
2302 else if (GET_MODE (x) == TFmode)
2303 {
142cd70f 2304 *total = s390_cost->dxbr;
f61a2c7d 2305 }
017e0eb9
MD
2306 return false;
2307
2742a1ed
MD
2308 case SQRT:
2309 if (GET_MODE (x) == SFmode)
2310 *total = s390_cost->sqebr;
f61a2c7d 2311 else if (GET_MODE (x) == DFmode)
2742a1ed 2312 *total = s390_cost->sqdbr;
f61a2c7d
AK
2313 else /* TFmode */
2314 *total = s390_cost->sqxbr;
2742a1ed
MD
2315 return false;
2316
017e0eb9 2317 case SIGN_EXTEND:
2742a1ed 2318 case ZERO_EXTEND:
6fa5b390
MD
2319 if (outer_code == MULT || outer_code == DIV || outer_code == MOD
2320 || outer_code == PLUS || outer_code == MINUS
2321 || outer_code == COMPARE)
017e0eb9
MD
2322 *total = 0;
2323 return false;
3c50106f 2324
6fa5b390
MD
2325 case COMPARE:
2326 *total = COSTS_N_INSNS (1);
2327 if (GET_CODE (XEXP (x, 0)) == AND
2328 && GET_CODE (XEXP (x, 1)) == CONST_INT
2329 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2330 {
2331 rtx op0 = XEXP (XEXP (x, 0), 0);
2332 rtx op1 = XEXP (XEXP (x, 0), 1);
2333 rtx op2 = XEXP (x, 1);
2334
2335 if (memory_operand (op0, GET_MODE (op0))
2336 && s390_tm_ccmode (op1, op2, 0) != VOIDmode)
2337 return true;
2338 if (register_operand (op0, GET_MODE (op0))
2339 && s390_tm_ccmode (op1, op2, 1) != VOIDmode)
2340 return true;
2341 }
2342 return false;
2343
3c50106f
RH
2344 default:
2345 return false;
2346 }
2347}
2348
dea09b1b
UW
2349/* Return the cost of an address rtx ADDR. */
2350
dcefdf67 2351static int
9c808aad 2352s390_address_cost (rtx addr)
dea09b1b
UW
2353{
2354 struct s390_address ad;
2355 if (!s390_decompose_address (addr, &ad))
2356 return 1000;
2357
2358 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
2359}
2360
fd3cd001
UW
2361/* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
2362 otherwise return 0. */
2363
2364int
5d81b82b 2365tls_symbolic_operand (rtx op)
fd3cd001 2366{
fd3cd001
UW
2367 if (GET_CODE (op) != SYMBOL_REF)
2368 return 0;
114278e7 2369 return SYMBOL_REF_TLS_MODEL (op);
fd3cd001 2370}
9db1d521 2371\f
c5aa1d12
UW
2372/* Split DImode access register reference REG (on 64-bit) into its constituent
2373 low and high parts, and store them into LO and HI. Note that gen_lowpart/
2374 gen_highpart cannot be used as they assume all registers are word-sized,
2375 while our access registers have only half that size. */
2376
2377void
2378s390_split_access_reg (rtx reg, rtx *lo, rtx *hi)
2379{
2380 gcc_assert (TARGET_64BIT);
2381 gcc_assert (ACCESS_REG_P (reg));
2382 gcc_assert (GET_MODE (reg) == DImode);
2383 gcc_assert (!(REGNO (reg) & 1));
2384
2385 *lo = gen_rtx_REG (SImode, REGNO (reg) + 1);
2386 *hi = gen_rtx_REG (SImode, REGNO (reg));
2387}
9db1d521 2388
994fe660 2389/* Return true if OP contains a symbol reference */
9db1d521 2390
3ed99cc9 2391bool
9c808aad 2392symbolic_reference_mentioned_p (rtx op)
9db1d521 2393{
5d81b82b
AS
2394 const char *fmt;
2395 int i;
9db1d521
HP
2396
2397 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
2398 return 1;
2399
2400 fmt = GET_RTX_FORMAT (GET_CODE (op));
2401 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2402 {
2403 if (fmt[i] == 'E')
2404 {
5d81b82b 2405 int j;
9db1d521
HP
2406
2407 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2408 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2409 return 1;
2410 }
2411
2412 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
2413 return 1;
2414 }
2415
2416 return 0;
2417}
2418
fd3cd001
UW
2419/* Return true if OP contains a reference to a thread-local symbol. */
2420
3ed99cc9 2421bool
9c808aad 2422tls_symbolic_reference_mentioned_p (rtx op)
fd3cd001 2423{
5d81b82b
AS
2424 const char *fmt;
2425 int i;
fd3cd001
UW
2426
2427 if (GET_CODE (op) == SYMBOL_REF)
2428 return tls_symbolic_operand (op);
2429
2430 fmt = GET_RTX_FORMAT (GET_CODE (op));
2431 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2432 {
2433 if (fmt[i] == 'E')
2434 {
5d81b82b 2435 int j;
fd3cd001
UW
2436
2437 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2438 if (tls_symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
3ed99cc9 2439 return true;
fd3cd001
UW
2440 }
2441
2442 else if (fmt[i] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op, i)))
3ed99cc9 2443 return true;
fd3cd001
UW
2444 }
2445
3ed99cc9 2446 return false;
fd3cd001
UW
2447}
2448
9db1d521 2449
c7453384
EC
2450/* Return true if OP is a legitimate general operand when
2451 generating PIC code. It is given that flag_pic is on
994fe660
UW
2452 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2453
9db1d521 2454int
5d81b82b 2455legitimate_pic_operand_p (rtx op)
9db1d521 2456{
4023fb28 2457 /* Accept all non-symbolic constants. */
9db1d521
HP
2458 if (!SYMBOLIC_CONST (op))
2459 return 1;
2460
c7453384 2461 /* Reject everything else; must be handled
fd3cd001 2462 via emit_symbolic_move. */
9db1d521
HP
2463 return 0;
2464}
2465
994fe660
UW
2466/* Returns true if the constant value OP is a legitimate general operand.
2467 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2468
9db1d521 2469int
5d81b82b 2470legitimate_constant_p (rtx op)
9db1d521 2471{
4023fb28 2472 /* Accept all non-symbolic constants. */
9db1d521
HP
2473 if (!SYMBOLIC_CONST (op))
2474 return 1;
2475
fd3cd001 2476 /* Accept immediate LARL operands. */
9e8327e3 2477 if (TARGET_CPU_ZARCH && larl_operand (op, VOIDmode))
fd3cd001
UW
2478 return 1;
2479
2480 /* Thread-local symbols are never legal constants. This is
2481 so that emit_call knows that computing such addresses
2482 might require a function call. */
2483 if (TLS_SYMBOLIC_CONST (op))
2484 return 0;
2485
9db1d521
HP
2486 /* In the PIC case, symbolic constants must *not* be
2487 forced into the literal pool. We accept them here,
fd3cd001 2488 so that they will be handled by emit_symbolic_move. */
9db1d521
HP
2489 if (flag_pic)
2490 return 1;
2491
9db1d521
HP
2492 /* All remaining non-PIC symbolic constants are
2493 forced into the literal pool. */
2494 return 0;
2495}
2496
fd3cd001
UW
2497/* Determine if it's legal to put X into the constant pool. This
2498 is not possible if X contains the address of a symbol that is
2499 not constant (TLS) or not known at final link time (PIC). */
2500
2501static bool
9c808aad 2502s390_cannot_force_const_mem (rtx x)
fd3cd001
UW
2503{
2504 switch (GET_CODE (x))
2505 {
2506 case CONST_INT:
2507 case CONST_DOUBLE:
2508 /* Accept all non-symbolic constants. */
2509 return false;
2510
2511 case LABEL_REF:
2512 /* Labels are OK iff we are non-PIC. */
2513 return flag_pic != 0;
2514
2515 case SYMBOL_REF:
2516 /* 'Naked' TLS symbol references are never OK,
2517 non-TLS symbols are OK iff we are non-PIC. */
2518 if (tls_symbolic_operand (x))
2519 return true;
2520 else
2521 return flag_pic != 0;
2522
2523 case CONST:
2524 return s390_cannot_force_const_mem (XEXP (x, 0));
2525 case PLUS:
2526 case MINUS:
2527 return s390_cannot_force_const_mem (XEXP (x, 0))
2528 || s390_cannot_force_const_mem (XEXP (x, 1));
2529
2530 case UNSPEC:
2531 switch (XINT (x, 1))
2532 {
2533 /* Only lt-relative or GOT-relative UNSPECs are OK. */
fd7643fb
UW
2534 case UNSPEC_LTREL_OFFSET:
2535 case UNSPEC_GOT:
2536 case UNSPEC_GOTOFF:
2537 case UNSPEC_PLTOFF:
fd3cd001
UW
2538 case UNSPEC_TLSGD:
2539 case UNSPEC_TLSLDM:
2540 case UNSPEC_NTPOFF:
2541 case UNSPEC_DTPOFF:
2542 case UNSPEC_GOTNTPOFF:
2543 case UNSPEC_INDNTPOFF:
2544 return false;
2545
9bb86f41
UW
2546 /* If the literal pool shares the code section, be put
2547 execute template placeholders into the pool as well. */
2548 case UNSPEC_INSN:
2549 return TARGET_CPU_ZARCH;
2550
fd3cd001
UW
2551 default:
2552 return true;
2553 }
2554 break;
2555
2556 default:
8d933e31 2557 gcc_unreachable ();
fd3cd001
UW
2558 }
2559}
2560
4023fb28 2561/* Returns true if the constant value OP is a legitimate general
c7453384 2562 operand during and after reload. The difference to
4023fb28
UW
2563 legitimate_constant_p is that this function will not accept
2564 a constant that would need to be forced to the literal pool
2565 before it can be used as operand. */
2566
3ed99cc9 2567bool
5d81b82b 2568legitimate_reload_constant_p (rtx op)
4023fb28 2569{
d3632d41 2570 /* Accept la(y) operands. */
c7453384 2571 if (GET_CODE (op) == CONST_INT
d3632d41 2572 && DISP_IN_RANGE (INTVAL (op)))
3ed99cc9 2573 return true;
d3632d41 2574
ec24698e 2575 /* Accept l(g)hi/l(g)fi operands. */
4023fb28 2576 if (GET_CODE (op) == CONST_INT
ec24698e 2577 && (CONST_OK_FOR_K (INTVAL (op)) || CONST_OK_FOR_Os (INTVAL (op))))
3ed99cc9 2578 return true;
4023fb28
UW
2579
2580 /* Accept lliXX operands. */
9e8327e3 2581 if (TARGET_ZARCH
11598938
UW
2582 && GET_CODE (op) == CONST_INT
2583 && trunc_int_for_mode (INTVAL (op), word_mode) == INTVAL (op)
2584 && s390_single_part (op, word_mode, HImode, 0) >= 0)
3ed99cc9 2585 return true;
4023fb28 2586
ec24698e
UW
2587 if (TARGET_EXTIMM
2588 && GET_CODE (op) == CONST_INT
2589 && trunc_int_for_mode (INTVAL (op), word_mode) == INTVAL (op)
2590 && s390_single_part (op, word_mode, SImode, 0) >= 0)
2591 return true;
2592
4023fb28 2593 /* Accept larl operands. */
9e8327e3 2594 if (TARGET_CPU_ZARCH
4023fb28 2595 && larl_operand (op, VOIDmode))
3ed99cc9 2596 return true;
4023fb28 2597
d096725d
AS
2598 /* Accept lzXX operands. */
2599 if (GET_CODE (op) == CONST_DOUBLE
2600 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', "G"))
2601 return true;
2602
11598938
UW
2603 /* Accept double-word operands that can be split. */
2604 if (GET_CODE (op) == CONST_INT
2605 && trunc_int_for_mode (INTVAL (op), word_mode) != INTVAL (op))
2606 {
2607 enum machine_mode dword_mode = word_mode == SImode ? DImode : TImode;
2608 rtx hi = operand_subword (op, 0, 0, dword_mode);
2609 rtx lo = operand_subword (op, 1, 0, dword_mode);
2610 return legitimate_reload_constant_p (hi)
2611 && legitimate_reload_constant_p (lo);
2612 }
2613
4023fb28 2614 /* Everything else cannot be handled without reload. */
3ed99cc9 2615 return false;
4023fb28
UW
2616}
2617
2618/* Given an rtx OP being reloaded into a reg required to be in class CLASS,
2619 return the class of reg to actually use. */
2620
2621enum reg_class
9c808aad 2622s390_preferred_reload_class (rtx op, enum reg_class class)
4023fb28 2623{
4023fb28
UW
2624 switch (GET_CODE (op))
2625 {
2626 /* Constants we cannot reload must be forced into the
0796c16a
UW
2627 literal pool. */
2628
4023fb28
UW
2629 case CONST_DOUBLE:
2630 case CONST_INT:
0796c16a 2631 if (legitimate_reload_constant_p (op))
4023fb28 2632 return class;
0796c16a
UW
2633 else
2634 return NO_REGS;
4023fb28
UW
2635
2636 /* If a symbolic constant or a PLUS is reloaded,
14b3e8ef
UW
2637 it is most likely being used as an address, so
2638 prefer ADDR_REGS. If 'class' is not a superset
2639 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
4023fb28
UW
2640 case PLUS:
2641 case LABEL_REF:
2642 case SYMBOL_REF:
2643 case CONST:
14b3e8ef
UW
2644 if (reg_class_subset_p (ADDR_REGS, class))
2645 return ADDR_REGS;
2646 else
2647 return NO_REGS;
4023fb28
UW
2648
2649 default:
2650 break;
2651 }
2652
2653 return class;
2654}
9db1d521 2655
833cd70a
AK
2656/* Inform reload about cases where moving X with a mode MODE to a register in
2657 CLASS requires an extra scratch or immediate register. Return the class
2658 needed for the immediate register. */
f61a2c7d 2659
833cd70a
AK
2660static enum reg_class
2661s390_secondary_reload (bool in_p, rtx x, enum reg_class class,
2662 enum machine_mode mode, secondary_reload_info *sri)
2663{
2664 /* Intermediate register needed. */
02887425 2665 if (reg_classes_intersect_p (CC_REGS, class))
9dc62c00
AK
2666 return GENERAL_REGS;
2667
833cd70a
AK
2668 /* We need a scratch register when loading a PLUS expression which
2669 is not a legitimate operand of the LOAD ADDRESS instruction. */
2670 if (in_p && s390_plus_operand (x, mode))
2671 sri->icode = (TARGET_64BIT ?
2672 CODE_FOR_reloaddi_plus : CODE_FOR_reloadsi_plus);
2673
7fa7289d 2674 /* Performing a multiword move from or to memory we have to make sure the
833cd70a
AK
2675 second chunk in memory is addressable without causing a displacement
2676 overflow. If that would be the case we calculate the address in
2677 a scratch register. */
2678 if (MEM_P (x)
2679 && GET_CODE (XEXP (x, 0)) == PLUS
2680 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2681 && !DISP_IN_RANGE (INTVAL (XEXP (XEXP (x, 0), 1))
0ca89db7 2682 + GET_MODE_SIZE (mode) - 1))
833cd70a 2683 {
7fa7289d 2684 /* For GENERAL_REGS a displacement overflow is no problem if occurring
833cd70a
AK
2685 in a s_operand address since we may fallback to lm/stm. So we only
2686 have to care about overflows in the b+i+d case. */
2687 if ((reg_classes_intersect_p (GENERAL_REGS, class)
2688 && s390_class_max_nregs (GENERAL_REGS, mode) > 1
2689 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS)
2690 /* For FP_REGS no lm/stm is available so this check is triggered
2691 for displacement overflows in b+i+d and b+d like addresses. */
2692 || (reg_classes_intersect_p (FP_REGS, class)
2693 && s390_class_max_nregs (FP_REGS, mode) > 1))
2694 {
2695 if (in_p)
2696 sri->icode = (TARGET_64BIT ?
2697 CODE_FOR_reloaddi_nonoffmem_in :
2698 CODE_FOR_reloadsi_nonoffmem_in);
2699 else
2700 sri->icode = (TARGET_64BIT ?
2701 CODE_FOR_reloaddi_nonoffmem_out :
2702 CODE_FOR_reloadsi_nonoffmem_out);
2703 }
2704 }
9dc62c00 2705
833cd70a 2706 /* Either scratch or no register needed. */
dc65c307
UW
2707 return NO_REGS;
2708}
2709
f3e9edff
UW
2710/* Generate code to load SRC, which is PLUS that is not a
2711 legitimate operand for the LA instruction, into TARGET.
2712 SCRATCH may be used as scratch register. */
2713
2714void
5d81b82b
AS
2715s390_expand_plus_operand (rtx target, rtx src,
2716 rtx scratch)
f3e9edff 2717{
7974fe63 2718 rtx sum1, sum2;
b808c04c 2719 struct s390_address ad;
6a4e49c1 2720
6a4e49c1 2721 /* src must be a PLUS; get its two operands. */
8d933e31
AS
2722 gcc_assert (GET_CODE (src) == PLUS);
2723 gcc_assert (GET_MODE (src) == Pmode);
f3e9edff 2724
7c82a1ed
UW
2725 /* Check if any of the two operands is already scheduled
2726 for replacement by reload. This can happen e.g. when
2727 float registers occur in an address. */
2728 sum1 = find_replacement (&XEXP (src, 0));
2729 sum2 = find_replacement (&XEXP (src, 1));
ccfc6cc8 2730 src = gen_rtx_PLUS (Pmode, sum1, sum2);
ccfc6cc8 2731
7974fe63
UW
2732 /* If the address is already strictly valid, there's nothing to do. */
2733 if (!s390_decompose_address (src, &ad)
93fa8428
AK
2734 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
2735 || (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx))))
f3e9edff 2736 {
7974fe63
UW
2737 /* Otherwise, one of the operands cannot be an address register;
2738 we reload its value into the scratch register. */
2739 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
2740 {
2741 emit_move_insn (scratch, sum1);
2742 sum1 = scratch;
2743 }
2744 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
2745 {
2746 emit_move_insn (scratch, sum2);
2747 sum2 = scratch;
2748 }
f3e9edff 2749
7974fe63
UW
2750 /* According to the way these invalid addresses are generated
2751 in reload.c, it should never happen (at least on s390) that
2752 *neither* of the PLUS components, after find_replacements
2753 was applied, is an address register. */
2754 if (sum1 == scratch && sum2 == scratch)
2755 {
2756 debug_rtx (src);
8d933e31 2757 gcc_unreachable ();
7974fe63 2758 }
f3e9edff 2759
7974fe63 2760 src = gen_rtx_PLUS (Pmode, sum1, sum2);
f3e9edff
UW
2761 }
2762
2763 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
2764 is only ever performed on addresses, so we can mark the
2765 sum as legitimate for LA in any case. */
a41c6c53 2766 s390_load_address (target, src);
f3e9edff
UW
2767}
2768
2769
3ed99cc9 2770/* Return true if ADDR is a valid memory address.
ab96de7e 2771 STRICT specifies whether strict register checking applies. */
9db1d521 2772
3ed99cc9 2773bool
ab96de7e 2774legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
5d81b82b 2775 rtx addr, int strict)
9db1d521 2776{
ab96de7e
AS
2777 struct s390_address ad;
2778 if (!s390_decompose_address (addr, &ad))
3ed99cc9 2779 return false;
b808c04c
UW
2780
2781 if (strict)
2782 {
93fa8428 2783 if (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
3ed99cc9 2784 return false;
93fa8428
AK
2785
2786 if (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx)))
3ed99cc9 2787 return false;
b808c04c
UW
2788 }
2789 else
2790 {
93fa8428
AK
2791 if (ad.base
2792 && !(REGNO (ad.base) >= FIRST_PSEUDO_REGISTER
2793 || REGNO_REG_CLASS (REGNO (ad.base)) == ADDR_REGS))
3ed99cc9 2794 return false;
93fa8428
AK
2795
2796 if (ad.indx
2797 && !(REGNO (ad.indx) >= FIRST_PSEUDO_REGISTER
2798 || REGNO_REG_CLASS (REGNO (ad.indx)) == ADDR_REGS))
2799 return false;
b808c04c 2800 }
3ed99cc9 2801 return true;
9db1d521
HP
2802}
2803
3ed99cc9 2804/* Return true if OP is a valid operand for the LA instruction.
ba956982
UW
2805 In 31-bit, we need to prove that the result is used as an
2806 address, as LA performs only a 31-bit addition. */
2807
3ed99cc9 2808bool
5d81b82b 2809legitimate_la_operand_p (rtx op)
ba956982
UW
2810{
2811 struct s390_address addr;
b808c04c 2812 if (!s390_decompose_address (op, &addr))
3ed99cc9 2813 return false;
ba956982 2814
3ed99cc9 2815 return (TARGET_64BIT || addr.pointer);
f3e9edff 2816}
ba956982 2817
3ed99cc9 2818/* Return true if it is valid *and* preferable to use LA to
e1d5ee28 2819 compute the sum of OP1 and OP2. */
c7453384 2820
3ed99cc9 2821bool
e1d5ee28 2822preferred_la_operand_p (rtx op1, rtx op2)
100c7420
UW
2823{
2824 struct s390_address addr;
e1d5ee28
UW
2825
2826 if (op2 != const0_rtx)
2827 op1 = gen_rtx_PLUS (Pmode, op1, op2);
2828
2829 if (!s390_decompose_address (op1, &addr))
3ed99cc9 2830 return false;
93fa8428 2831 if (addr.base && !REGNO_OK_FOR_BASE_P (REGNO (addr.base)))
3ed99cc9 2832 return false;
93fa8428 2833 if (addr.indx && !REGNO_OK_FOR_INDEX_P (REGNO (addr.indx)))
3ed99cc9 2834 return false;
100c7420
UW
2835
2836 if (!TARGET_64BIT && !addr.pointer)
3ed99cc9 2837 return false;
100c7420
UW
2838
2839 if (addr.pointer)
3ed99cc9 2840 return true;
100c7420 2841
4888ec5d
UW
2842 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
2843 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
3ed99cc9 2844 return true;
100c7420 2845
3ed99cc9 2846 return false;
100c7420
UW
2847}
2848
a41c6c53
UW
2849/* Emit a forced load-address operation to load SRC into DST.
2850 This will use the LOAD ADDRESS instruction even in situations
2851 where legitimate_la_operand_p (SRC) returns false. */
ba956982 2852
a41c6c53 2853void
9c808aad 2854s390_load_address (rtx dst, rtx src)
f3e9edff 2855{
a41c6c53
UW
2856 if (TARGET_64BIT)
2857 emit_move_insn (dst, src);
2858 else
2859 emit_insn (gen_force_la_31 (dst, src));
ba956982
UW
2860}
2861
9db1d521
HP
2862/* Return a legitimate reference for ORIG (an address) using the
2863 register REG. If REG is 0, a new pseudo is generated.
2864
2865 There are two types of references that must be handled:
2866
2867 1. Global data references must load the address from the GOT, via
2868 the PIC reg. An insn is emitted to do this load, and the reg is
2869 returned.
2870
2871 2. Static data references, constant pool addresses, and code labels
2872 compute the address as an offset from the GOT, whose base is in
114278e7 2873 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
9db1d521
HP
2874 differentiate them from global data objects. The returned
2875 address is the PIC reg + an unspec constant.
2876
2877 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2878 reg also appears in the address. */
2879
2880rtx
9c808aad 2881legitimize_pic_address (rtx orig, rtx reg)
9db1d521
HP
2882{
2883 rtx addr = orig;
2884 rtx new = orig;
2885 rtx base;
2886
cf9d7618
ANM
2887 gcc_assert (!TLS_SYMBOLIC_CONST (addr));
2888
9db1d521 2889 if (GET_CODE (addr) == LABEL_REF
114278e7 2890 || (GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (addr)))
9db1d521
HP
2891 {
2892 /* This is a local symbol. */
9e8327e3 2893 if (TARGET_CPU_ZARCH && larl_operand (addr, VOIDmode))
9db1d521 2894 {
c7453384
EC
2895 /* Access local symbols PC-relative via LARL.
2896 This is the same as in the non-PIC case, so it is
d65f7478 2897 handled automatically ... */
9db1d521
HP
2898 }
2899 else
2900 {
fd7643fb 2901 /* Access local symbols relative to the GOT. */
9db1d521
HP
2902
2903 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2904
fd7643fb 2905 if (reload_in_progress || reload_completed)
6fb5fa3c 2906 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
fd7643fb
UW
2907
2908 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTOFF);
e23795ea
UW
2909 addr = gen_rtx_CONST (Pmode, addr);
2910 addr = force_const_mem (Pmode, addr);
9db1d521
HP
2911 emit_move_insn (temp, addr);
2912
fd7643fb 2913 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
9db1d521
HP
2914 if (reg != 0)
2915 {
6aea2f6d 2916 s390_load_address (reg, new);
9db1d521
HP
2917 new = reg;
2918 }
2919 }
2920 }
2921 else if (GET_CODE (addr) == SYMBOL_REF)
2922 {
2923 if (reg == 0)
2924 reg = gen_reg_rtx (Pmode);
2925
2926 if (flag_pic == 1)
2927 {
2928 /* Assume GOT offset < 4k. This is handled the same way
fd7643fb 2929 in both 31- and 64-bit code (@GOT). */
9db1d521 2930
c3cc6b78 2931 if (reload_in_progress || reload_completed)
6fb5fa3c 2932 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
9db1d521 2933
fd7643fb 2934 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
9db1d521
HP
2935 new = gen_rtx_CONST (Pmode, new);
2936 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
542a8afa 2937 new = gen_const_mem (Pmode, new);
9db1d521
HP
2938 emit_move_insn (reg, new);
2939 new = reg;
2940 }
9e8327e3 2941 else if (TARGET_CPU_ZARCH)
9db1d521
HP
2942 {
2943 /* If the GOT offset might be >= 4k, we determine the position
2944 of the GOT entry via a PC-relative LARL (@GOTENT). */
2945
2946 rtx temp = gen_reg_rtx (Pmode);
2947
fd7643fb 2948 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTENT);
9db1d521
HP
2949 new = gen_rtx_CONST (Pmode, new);
2950 emit_move_insn (temp, new);
2951
542a8afa 2952 new = gen_const_mem (Pmode, temp);
9db1d521
HP
2953 emit_move_insn (reg, new);
2954 new = reg;
2955 }
2956 else
2957 {
c7453384 2958 /* If the GOT offset might be >= 4k, we have to load it
9db1d521
HP
2959 from the literal pool (@GOT). */
2960
2961 rtx temp = gen_reg_rtx (Pmode);
2962
c3cc6b78 2963 if (reload_in_progress || reload_completed)
6fb5fa3c 2964 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
9db1d521 2965
fd7643fb 2966 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
e23795ea
UW
2967 addr = gen_rtx_CONST (Pmode, addr);
2968 addr = force_const_mem (Pmode, addr);
9db1d521
HP
2969 emit_move_insn (temp, addr);
2970
2971 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
542a8afa 2972 new = gen_const_mem (Pmode, new);
9db1d521
HP
2973 emit_move_insn (reg, new);
2974 new = reg;
2975 }
c7453384 2976 }
9db1d521
HP
2977 else
2978 {
2979 if (GET_CODE (addr) == CONST)
2980 {
2981 addr = XEXP (addr, 0);
2982 if (GET_CODE (addr) == UNSPEC)
2983 {
8d933e31 2984 gcc_assert (XVECLEN (addr, 0) == 1);
9db1d521
HP
2985 switch (XINT (addr, 1))
2986 {
fd7643fb 2987 /* If someone moved a GOT-relative UNSPEC
9db1d521 2988 out of the literal pool, force them back in. */
fd7643fb
UW
2989 case UNSPEC_GOTOFF:
2990 case UNSPEC_PLTOFF:
e23795ea 2991 new = force_const_mem (Pmode, orig);
9db1d521
HP
2992 break;
2993
fd7643fb
UW
2994 /* @GOT is OK as is if small. */
2995 case UNSPEC_GOT:
2996 if (flag_pic == 2)
2997 new = force_const_mem (Pmode, orig);
2998 break;
2999
9db1d521 3000 /* @GOTENT is OK as is. */
fd7643fb 3001 case UNSPEC_GOTENT:
9db1d521
HP
3002 break;
3003
3004 /* @PLT is OK as is on 64-bit, must be converted to
fd7643fb
UW
3005 GOT-relative @PLTOFF on 31-bit. */
3006 case UNSPEC_PLT:
9e8327e3 3007 if (!TARGET_CPU_ZARCH)
9db1d521
HP
3008 {
3009 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3010
fd7643fb 3011 if (reload_in_progress || reload_completed)
6fb5fa3c 3012 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
fd7643fb 3013
9db1d521 3014 addr = XVECEXP (addr, 0, 0);
c7453384 3015 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
fd7643fb 3016 UNSPEC_PLTOFF);
e23795ea
UW
3017 addr = gen_rtx_CONST (Pmode, addr);
3018 addr = force_const_mem (Pmode, addr);
9db1d521
HP
3019 emit_move_insn (temp, addr);
3020
fd7643fb 3021 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
9db1d521
HP
3022 if (reg != 0)
3023 {
6aea2f6d 3024 s390_load_address (reg, new);
9db1d521
HP
3025 new = reg;
3026 }
3027 }
3028 break;
3029
3030 /* Everything else cannot happen. */
3031 default:
8d933e31 3032 gcc_unreachable ();
9db1d521
HP
3033 }
3034 }
8d933e31
AS
3035 else
3036 gcc_assert (GET_CODE (addr) == PLUS);
9db1d521
HP
3037 }
3038 if (GET_CODE (addr) == PLUS)
3039 {
3040 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
cf9d7618
ANM
3041
3042 gcc_assert (!TLS_SYMBOLIC_CONST (op0));
3043 gcc_assert (!TLS_SYMBOLIC_CONST (op1));
3044
c7453384 3045 /* Check first to see if this is a constant offset
9db1d521
HP
3046 from a local symbol reference. */
3047 if ((GET_CODE (op0) == LABEL_REF
114278e7 3048 || (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (op0)))
9db1d521
HP
3049 && GET_CODE (op1) == CONST_INT)
3050 {
e064939e
MM
3051 if (TARGET_CPU_ZARCH
3052 && larl_operand (op0, VOIDmode)
3053 && INTVAL (op1) < (HOST_WIDE_INT)1 << 31
3054 && INTVAL (op1) >= -((HOST_WIDE_INT)1 << 31))
9db1d521
HP
3055 {
3056 if (INTVAL (op1) & 1)
3057 {
c7453384 3058 /* LARL can't handle odd offsets, so emit a
9db1d521
HP
3059 pair of LARL and LA. */
3060 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3061
d3632d41 3062 if (!DISP_IN_RANGE (INTVAL (op1)))
9db1d521 3063 {
e064939e 3064 HOST_WIDE_INT even = INTVAL (op1) - 1;
9db1d521 3065 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
b30d2115 3066 op0 = gen_rtx_CONST (Pmode, op0);
a556fd39 3067 op1 = const1_rtx;
9db1d521
HP
3068 }
3069
3070 emit_move_insn (temp, op0);
3071 new = gen_rtx_PLUS (Pmode, temp, op1);
3072
3073 if (reg != 0)
3074 {
6aea2f6d 3075 s390_load_address (reg, new);
9db1d521
HP
3076 new = reg;
3077 }
3078 }
3079 else
3080 {
3081 /* If the offset is even, we can just use LARL.
3082 This will happen automatically. */
3083 }
3084 }
3085 else
3086 {
fd7643fb 3087 /* Access local symbols relative to the GOT. */
9db1d521
HP
3088
3089 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3090
fd7643fb 3091 if (reload_in_progress || reload_completed)
6fb5fa3c 3092 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
fd7643fb 3093
c7453384 3094 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op0),
fd7643fb 3095 UNSPEC_GOTOFF);
e23795ea
UW
3096 addr = gen_rtx_PLUS (Pmode, addr, op1);
3097 addr = gen_rtx_CONST (Pmode, addr);
3098 addr = force_const_mem (Pmode, addr);
cfbab41c 3099 emit_move_insn (temp, addr);
9db1d521 3100
fd7643fb 3101 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
9db1d521
HP
3102 if (reg != 0)
3103 {
6aea2f6d 3104 s390_load_address (reg, new);
9db1d521
HP
3105 new = reg;
3106 }
3107 }
3108 }
3109
fd7643fb 3110 /* Now, check whether it is a GOT relative symbol plus offset
9db1d521
HP
3111 that was pulled out of the literal pool. Force it back in. */
3112
3113 else if (GET_CODE (op0) == UNSPEC
cfbab41c
JJ
3114 && GET_CODE (op1) == CONST_INT
3115 && XINT (op0, 1) == UNSPEC_GOTOFF)
9db1d521 3116 {
8d933e31 3117 gcc_assert (XVECLEN (op0, 0) == 1);
9db1d521 3118
e23795ea 3119 new = force_const_mem (Pmode, orig);
9db1d521
HP
3120 }
3121
3122 /* Otherwise, compute the sum. */
3123 else
3124 {
3125 base = legitimize_pic_address (XEXP (addr, 0), reg);
3126 new = legitimize_pic_address (XEXP (addr, 1),
3127 base == reg ? NULL_RTX : reg);
3128 if (GET_CODE (new) == CONST_INT)
3129 new = plus_constant (base, INTVAL (new));
3130 else
3131 {
3132 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
3133 {
3134 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
3135 new = XEXP (new, 1);
3136 }
3137 new = gen_rtx_PLUS (Pmode, base, new);
3138 }
3139
3140 if (GET_CODE (new) == CONST)
3141 new = XEXP (new, 0);
3142 new = force_operand (new, 0);
3143 }
3144 }
3145 }
3146 return new;
3147}
3148
fd3cd001
UW
3149/* Load the thread pointer into a register. */
3150
7b8acc34
AK
3151rtx
3152s390_get_thread_pointer (void)
fd3cd001 3153{
c5aa1d12 3154 rtx tp = gen_reg_rtx (Pmode);
fd3cd001 3155
c5aa1d12 3156 emit_move_insn (tp, gen_rtx_REG (Pmode, TP_REGNUM));
fd3cd001
UW
3157 mark_reg_pointer (tp, BITS_PER_WORD);
3158
3159 return tp;
3160}
3161
ed9676cf
AK
3162/* Emit a tls call insn. The call target is the SYMBOL_REF stored
3163 in s390_tls_symbol which always refers to __tls_get_offset.
3164 The returned offset is written to RESULT_REG and an USE rtx is
3165 generated for TLS_CALL. */
fd3cd001
UW
3166
3167static GTY(()) rtx s390_tls_symbol;
ed9676cf
AK
3168
3169static void
3170s390_emit_tls_call_insn (rtx result_reg, rtx tls_call)
fd3cd001 3171{
ed9676cf 3172 rtx insn;
38899e29 3173
8d933e31 3174 gcc_assert (flag_pic);
ed9676cf 3175
fd3cd001
UW
3176 if (!s390_tls_symbol)
3177 s390_tls_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_offset");
3178
38899e29
EC
3179 insn = s390_emit_call (s390_tls_symbol, tls_call, result_reg,
3180 gen_rtx_REG (Pmode, RETURN_REGNUM));
ed9676cf
AK
3181
3182 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), result_reg);
3183 CONST_OR_PURE_CALL_P (insn) = 1;
fd3cd001
UW
3184}
3185
3186/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3187 this (thread-local) address. REG may be used as temporary. */
3188
3189static rtx
9c808aad 3190legitimize_tls_address (rtx addr, rtx reg)
fd3cd001
UW
3191{
3192 rtx new, tls_call, temp, base, r2, insn;
3193
3194 if (GET_CODE (addr) == SYMBOL_REF)
3195 switch (tls_symbolic_operand (addr))
3196 {
3197 case TLS_MODEL_GLOBAL_DYNAMIC:
3198 start_sequence ();
3199 r2 = gen_rtx_REG (Pmode, 2);
3200 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_TLSGD);
3201 new = gen_rtx_CONST (Pmode, tls_call);
3202 new = force_const_mem (Pmode, new);
3203 emit_move_insn (r2, new);
ed9676cf 3204 s390_emit_tls_call_insn (r2, tls_call);
fd3cd001
UW
3205 insn = get_insns ();
3206 end_sequence ();
3207
3208 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3209 temp = gen_reg_rtx (Pmode);
3210 emit_libcall_block (insn, temp, r2, new);
3211
7b8acc34 3212 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
fd3cd001
UW
3213 if (reg != 0)
3214 {
3215 s390_load_address (reg, new);
3216 new = reg;
3217 }
3218 break;
3219
3220 case TLS_MODEL_LOCAL_DYNAMIC:
3221 start_sequence ();
3222 r2 = gen_rtx_REG (Pmode, 2);
3223 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM);
3224 new = gen_rtx_CONST (Pmode, tls_call);
3225 new = force_const_mem (Pmode, new);
3226 emit_move_insn (r2, new);
ed9676cf 3227 s390_emit_tls_call_insn (r2, tls_call);
fd3cd001
UW
3228 insn = get_insns ();
3229 end_sequence ();
3230
3231 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_NTPOFF);
3232 temp = gen_reg_rtx (Pmode);
3233 emit_libcall_block (insn, temp, r2, new);
3234
7b8acc34 3235 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
fd3cd001
UW
3236 base = gen_reg_rtx (Pmode);
3237 s390_load_address (base, new);
3238
3239 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_DTPOFF);
3240 new = gen_rtx_CONST (Pmode, new);
3241 new = force_const_mem (Pmode, new);
3242 temp = gen_reg_rtx (Pmode);
3243 emit_move_insn (temp, new);
3244
3245 new = gen_rtx_PLUS (Pmode, base, temp);
3246 if (reg != 0)
3247 {
3248 s390_load_address (reg, new);
3249 new = reg;
3250 }
3251 break;
3252
3253 case TLS_MODEL_INITIAL_EXEC:
3254 if (flag_pic == 1)
3255 {
3256 /* Assume GOT offset < 4k. This is handled the same way
3257 in both 31- and 64-bit code. */
3258
3259 if (reload_in_progress || reload_completed)
6fb5fa3c 3260 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
fd3cd001
UW
3261
3262 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3263 new = gen_rtx_CONST (Pmode, new);
3264 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
542a8afa 3265 new = gen_const_mem (Pmode, new);
fd3cd001
UW
3266 temp = gen_reg_rtx (Pmode);
3267 emit_move_insn (temp, new);
3268 }
9e8327e3 3269 else if (TARGET_CPU_ZARCH)
fd3cd001
UW
3270 {
3271 /* If the GOT offset might be >= 4k, we determine the position
3272 of the GOT entry via a PC-relative LARL. */
3273
3274 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3275 new = gen_rtx_CONST (Pmode, new);
3276 temp = gen_reg_rtx (Pmode);
3277 emit_move_insn (temp, new);
3278
542a8afa 3279 new = gen_const_mem (Pmode, temp);
fd3cd001
UW
3280 temp = gen_reg_rtx (Pmode);
3281 emit_move_insn (temp, new);
3282 }
3283 else if (flag_pic)
3284 {
c7453384 3285 /* If the GOT offset might be >= 4k, we have to load it
fd3cd001
UW
3286 from the literal pool. */
3287
3288 if (reload_in_progress || reload_completed)
6fb5fa3c 3289 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
fd3cd001
UW
3290
3291 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3292 new = gen_rtx_CONST (Pmode, new);
3293 new = force_const_mem (Pmode, new);
3294 temp = gen_reg_rtx (Pmode);
3295 emit_move_insn (temp, new);
3296
3297 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
542a8afa 3298 new = gen_const_mem (Pmode, new);
fd3cd001
UW
3299
3300 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
3301 temp = gen_reg_rtx (Pmode);
3302 emit_insn (gen_rtx_SET (Pmode, temp, new));
3303 }
3304 else
3305 {
3306 /* In position-dependent code, load the absolute address of
3307 the GOT entry from the literal pool. */
3308
3309 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3310 new = gen_rtx_CONST (Pmode, new);
3311 new = force_const_mem (Pmode, new);
3312 temp = gen_reg_rtx (Pmode);
3313 emit_move_insn (temp, new);
3314
3315 new = temp;
542a8afa 3316 new = gen_const_mem (Pmode, new);
fd3cd001
UW
3317 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
3318 temp = gen_reg_rtx (Pmode);
3319 emit_insn (gen_rtx_SET (Pmode, temp, new));
3320 }
3321
7b8acc34 3322 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
fd3cd001
UW
3323 if (reg != 0)
3324 {
3325 s390_load_address (reg, new);
3326 new = reg;
3327 }
3328 break;
3329
3330 case TLS_MODEL_LOCAL_EXEC:
3331 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3332 new = gen_rtx_CONST (Pmode, new);
3333 new = force_const_mem (Pmode, new);
3334 temp = gen_reg_rtx (Pmode);
3335 emit_move_insn (temp, new);
3336
7b8acc34 3337 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
fd3cd001
UW
3338 if (reg != 0)
3339 {
3340 s390_load_address (reg, new);
3341 new = reg;
3342 }
3343 break;
3344
3345 default:
8d933e31 3346 gcc_unreachable ();
fd3cd001
UW
3347 }
3348
3349 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == UNSPEC)
3350 {
3351 switch (XINT (XEXP (addr, 0), 1))
3352 {
3353 case UNSPEC_INDNTPOFF:
8d933e31
AS
3354 gcc_assert (TARGET_CPU_ZARCH);
3355 new = addr;
fd3cd001
UW
3356 break;
3357
3358 default:
8d933e31 3359 gcc_unreachable ();
fd3cd001
UW
3360 }
3361 }
3362
578d1468
UW
3363 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3364 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3365 {
3366 new = XEXP (XEXP (addr, 0), 0);
3367 if (GET_CODE (new) != SYMBOL_REF)
3368 new = gen_rtx_CONST (Pmode, new);
3369
3370 new = legitimize_tls_address (new, reg);
3371 new = plus_constant (new, INTVAL (XEXP (XEXP (addr, 0), 1)));
3372 new = force_operand (new, 0);
3373 }
3374
fd3cd001 3375 else
8d933e31 3376 gcc_unreachable (); /* for now ... */
fd3cd001
UW
3377
3378 return new;
3379}
3380
9db1d521
HP
3381/* Emit insns to move operands[1] into operands[0]. */
3382
3383void
9c808aad 3384emit_symbolic_move (rtx *operands)
9db1d521 3385{
b3a13419 3386 rtx temp = !can_create_pseudo_p () ? operands[0] : gen_reg_rtx (Pmode);
9db1d521 3387
fd3cd001 3388 if (GET_CODE (operands[0]) == MEM)
9db1d521 3389 operands[1] = force_reg (Pmode, operands[1]);
fd3cd001
UW
3390 else if (TLS_SYMBOLIC_CONST (operands[1]))
3391 operands[1] = legitimize_tls_address (operands[1], temp);
3392 else if (flag_pic)
9db1d521
HP
3393 operands[1] = legitimize_pic_address (operands[1], temp);
3394}
3395
994fe660 3396/* Try machine-dependent ways of modifying an illegitimate address X
9db1d521 3397 to be legitimate. If we find one, return the new, valid address.
9db1d521
HP
3398
3399 OLDX is the address as it was before break_out_memory_refs was called.
3400 In some cases it is useful to look at this to decide what needs to be done.
3401
994fe660 3402 MODE is the mode of the operand pointed to by X.
9db1d521
HP
3403
3404 When -fpic is used, special handling is needed for symbolic references.
3405 See comments by legitimize_pic_address for details. */
3406
3407rtx
5d81b82b 3408legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
9c808aad 3409 enum machine_mode mode ATTRIBUTE_UNUSED)
9db1d521 3410{
ba956982 3411 rtx constant_term = const0_rtx;
9db1d521 3412
fd3cd001
UW
3413 if (TLS_SYMBOLIC_CONST (x))
3414 {
3415 x = legitimize_tls_address (x, 0);
3416
3417 if (legitimate_address_p (mode, x, FALSE))
3418 return x;
3419 }
cf9d7618
ANM
3420 else if (GET_CODE (x) == PLUS
3421 && (TLS_SYMBOLIC_CONST (XEXP (x, 0))
3422 || TLS_SYMBOLIC_CONST (XEXP (x, 1))))
3423 {
3424 return x;
3425 }
fd3cd001 3426 else if (flag_pic)
9db1d521 3427 {
ba956982 3428 if (SYMBOLIC_CONST (x)
c7453384
EC
3429 || (GET_CODE (x) == PLUS
3430 && (SYMBOLIC_CONST (XEXP (x, 0))
ba956982
UW
3431 || SYMBOLIC_CONST (XEXP (x, 1)))))
3432 x = legitimize_pic_address (x, 0);
3433
3434 if (legitimate_address_p (mode, x, FALSE))
3435 return x;
9db1d521 3436 }
9db1d521 3437
ba956982 3438 x = eliminate_constant_term (x, &constant_term);
994fe660 3439
61f02ff5
UW
3440 /* Optimize loading of large displacements by splitting them
3441 into the multiple of 4K and the rest; this allows the
c7453384 3442 former to be CSE'd if possible.
61f02ff5
UW
3443
3444 Don't do this if the displacement is added to a register
3445 pointing into the stack frame, as the offsets will
3446 change later anyway. */
3447
3448 if (GET_CODE (constant_term) == CONST_INT
d3632d41
UW
3449 && !TARGET_LONG_DISPLACEMENT
3450 && !DISP_IN_RANGE (INTVAL (constant_term))
61f02ff5
UW
3451 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
3452 {
3453 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
3454 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
3455
3456 rtx temp = gen_reg_rtx (Pmode);
3457 rtx val = force_operand (GEN_INT (upper), temp);
3458 if (val != temp)
3459 emit_move_insn (temp, val);
3460
3461 x = gen_rtx_PLUS (Pmode, x, temp);
3462 constant_term = GEN_INT (lower);
3463 }
3464
ba956982 3465 if (GET_CODE (x) == PLUS)
9db1d521 3466 {
ba956982
UW
3467 if (GET_CODE (XEXP (x, 0)) == REG)
3468 {
5d81b82b
AS
3469 rtx temp = gen_reg_rtx (Pmode);
3470 rtx val = force_operand (XEXP (x, 1), temp);
ba956982
UW
3471 if (val != temp)
3472 emit_move_insn (temp, val);
3473
3474 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
3475 }
3476
3477 else if (GET_CODE (XEXP (x, 1)) == REG)
3478 {
5d81b82b
AS
3479 rtx temp = gen_reg_rtx (Pmode);
3480 rtx val = force_operand (XEXP (x, 0), temp);
ba956982
UW
3481 if (val != temp)
3482 emit_move_insn (temp, val);
3483
3484 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
3485 }
9db1d521 3486 }
ba956982
UW
3487
3488 if (constant_term != const0_rtx)
3489 x = gen_rtx_PLUS (Pmode, x, constant_term);
3490
3491 return x;
9db1d521
HP
3492}
3493
0b540f12
UW
3494/* Try a machine-dependent way of reloading an illegitimate address AD
3495 operand. If we find one, push the reload and and return the new address.
3496
3497 MODE is the mode of the enclosing MEM. OPNUM is the operand number
3498 and TYPE is the reload type of the current reload. */
3499
3500rtx
3501legitimize_reload_address (rtx ad, enum machine_mode mode ATTRIBUTE_UNUSED,
3502 int opnum, int type)
3503{
3504 if (!optimize || TARGET_LONG_DISPLACEMENT)
3505 return NULL_RTX;
3506
3507 if (GET_CODE (ad) == PLUS)
3508 {
3509 rtx tem = simplify_binary_operation (PLUS, Pmode,
3510 XEXP (ad, 0), XEXP (ad, 1));
3511 if (tem)
3512 ad = tem;
3513 }
3514
3515 if (GET_CODE (ad) == PLUS
3516 && GET_CODE (XEXP (ad, 0)) == REG
3517 && GET_CODE (XEXP (ad, 1)) == CONST_INT
3518 && !DISP_IN_RANGE (INTVAL (XEXP (ad, 1))))
3519 {
3520 HOST_WIDE_INT lower = INTVAL (XEXP (ad, 1)) & 0xfff;
3521 HOST_WIDE_INT upper = INTVAL (XEXP (ad, 1)) ^ lower;
3522 rtx cst, tem, new;
3523
3524 cst = GEN_INT (upper);
3525 if (!legitimate_reload_constant_p (cst))
3526 cst = force_const_mem (Pmode, cst);
3527
3528 tem = gen_rtx_PLUS (Pmode, XEXP (ad, 0), cst);
3529 new = gen_rtx_PLUS (Pmode, tem, GEN_INT (lower));
3530
3531 push_reload (XEXP (tem, 1), 0, &XEXP (tem, 1), 0,
3532 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3533 opnum, (enum reload_type) type);
3534 return new;
3535 }
3536
3537 return NULL_RTX;
3538}
3539
a41c6c53
UW
3540/* Emit code to move LEN bytes from DST to SRC. */
3541
3542void
70128ad9 3543s390_expand_movmem (rtx dst, rtx src, rtx len)
a41c6c53 3544{
a41c6c53
UW
3545 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3546 {
3547 if (INTVAL (len) > 0)
70128ad9 3548 emit_insn (gen_movmem_short (dst, src, GEN_INT (INTVAL (len) - 1)));
a41c6c53
UW
3549 }
3550
3551 else if (TARGET_MVCLE)
3552 {
70128ad9 3553 emit_insn (gen_movmem_long (dst, src, convert_to_mode (Pmode, len, 1)));
a41c6c53
UW
3554 }
3555
3556 else
3557 {
3558 rtx dst_addr, src_addr, count, blocks, temp;
70315fcd 3559 rtx loop_start_label = gen_label_rtx ();
6de9cd9a 3560 rtx loop_end_label = gen_label_rtx ();
a41c6c53
UW
3561 rtx end_label = gen_label_rtx ();
3562 enum machine_mode mode;
a41c6c53
UW
3563
3564 mode = GET_MODE (len);
3565 if (mode == VOIDmode)
b9404c99 3566 mode = Pmode;
a41c6c53 3567
a41c6c53
UW
3568 dst_addr = gen_reg_rtx (Pmode);
3569 src_addr = gen_reg_rtx (Pmode);
3570 count = gen_reg_rtx (mode);
3571 blocks = gen_reg_rtx (mode);
3572
3573 convert_move (count, len, 1);
c7453384 3574 emit_cmp_and_jump_insns (count, const0_rtx,
a41c6c53
UW
3575 EQ, NULL_RTX, mode, 1, end_label);
3576
3577 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3578 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
3579 dst = change_address (dst, VOIDmode, dst_addr);
3580 src = change_address (src, VOIDmode, src_addr);
c7453384 3581
a41c6c53
UW
3582 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3583 if (temp != count)
3584 emit_move_insn (count, temp);
3585
c9f59991 3586 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, 0);
a41c6c53
UW
3587 if (temp != blocks)
3588 emit_move_insn (blocks, temp);
3589
6de9cd9a
DN
3590 emit_cmp_and_jump_insns (blocks, const0_rtx,
3591 EQ, NULL_RTX, mode, 1, loop_end_label);
70315fcd
SB
3592
3593 emit_label (loop_start_label);
a41c6c53 3594
70128ad9 3595 emit_insn (gen_movmem_short (dst, src, GEN_INT (255)));
c7453384 3596 s390_load_address (dst_addr,
a41c6c53 3597 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
c7453384 3598 s390_load_address (src_addr,
a41c6c53 3599 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
c7453384 3600
a41c6c53
UW
3601 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3602 if (temp != blocks)
3603 emit_move_insn (blocks, temp);
3604
6de9cd9a
DN
3605 emit_cmp_and_jump_insns (blocks, const0_rtx,
3606 EQ, NULL_RTX, mode, 1, loop_end_label);
70315fcd
SB
3607
3608 emit_jump (loop_start_label);
6de9cd9a 3609 emit_label (loop_end_label);
a41c6c53 3610
70128ad9 3611 emit_insn (gen_movmem_short (dst, src,
b9404c99 3612 convert_to_mode (Pmode, count, 1)));
a41c6c53
UW
3613 emit_label (end_label);
3614 }
3615}
3616
6d057022
AS
3617/* Emit code to set LEN bytes at DST to VAL.
3618 Make use of clrmem if VAL is zero. */
a41c6c53
UW
3619
3620void
6d057022 3621s390_expand_setmem (rtx dst, rtx len, rtx val)
a41c6c53 3622{
c9f59991
AK
3623 if (GET_CODE (len) == CONST_INT && INTVAL (len) == 0)
3624 return;
3625
6d057022
AS
3626 gcc_assert (GET_CODE (val) == CONST_INT || GET_MODE (val) == QImode);
3627
c9f59991 3628 if (GET_CODE (len) == CONST_INT && INTVAL (len) > 0 && INTVAL (len) <= 257)
a41c6c53 3629 {
6d057022 3630 if (val == const0_rtx && INTVAL (len) <= 256)
70128ad9 3631 emit_insn (gen_clrmem_short (dst, GEN_INT (INTVAL (len) - 1)));
6d057022
AS
3632 else
3633 {
3634 /* Initialize memory by storing the first byte. */
3635 emit_move_insn (adjust_address (dst, QImode, 0), val);
3636
3637 if (INTVAL (len) > 1)
3638 {
3639 /* Initiate 1 byte overlap move.
3640 The first byte of DST is propagated through DSTP1.
3641 Prepare a movmem for: DST+1 = DST (length = LEN - 1).
3642 DST is set to size 1 so the rest of the memory location
3643 does not count as source operand. */
3644 rtx dstp1 = adjust_address (dst, VOIDmode, 1);
3645 set_mem_size (dst, const1_rtx);
3646
3647 emit_insn (gen_movmem_short (dstp1, dst,
3648 GEN_INT (INTVAL (len) - 2)));
3649 }
3650 }
a41c6c53
UW
3651 }
3652
3653 else if (TARGET_MVCLE)
3654 {
6d057022
AS
3655 val = force_not_mem (convert_modes (Pmode, QImode, val, 1));
3656 emit_insn (gen_setmem_long (dst, convert_to_mode (Pmode, len, 1), val));
a41c6c53
UW
3657 }
3658
3659 else
3660 {
6d057022 3661 rtx dst_addr, src_addr, count, blocks, temp, dstp1 = NULL_RTX;
70315fcd 3662 rtx loop_start_label = gen_label_rtx ();
6de9cd9a 3663 rtx loop_end_label = gen_label_rtx ();
a41c6c53
UW
3664 rtx end_label = gen_label_rtx ();
3665 enum machine_mode mode;
a41c6c53
UW
3666
3667 mode = GET_MODE (len);
3668 if (mode == VOIDmode)
b9404c99 3669 mode = Pmode;
a41c6c53 3670
a41c6c53
UW
3671 dst_addr = gen_reg_rtx (Pmode);
3672 src_addr = gen_reg_rtx (Pmode);
3673 count = gen_reg_rtx (mode);
3674 blocks = gen_reg_rtx (mode);
3675
3676 convert_move (count, len, 1);
c7453384 3677 emit_cmp_and_jump_insns (count, const0_rtx,
a41c6c53
UW
3678 EQ, NULL_RTX, mode, 1, end_label);
3679
3680 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3681 dst = change_address (dst, VOIDmode, dst_addr);
c7453384 3682
6d057022
AS
3683 if (val == const0_rtx)
3684 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3685 else
3686 {
3687 dstp1 = adjust_address (dst, VOIDmode, 1);
3688 set_mem_size (dst, const1_rtx);
3689
3690 /* Initialize memory by storing the first byte. */
3691 emit_move_insn (adjust_address (dst, QImode, 0), val);
3692
3693 /* If count is 1 we are done. */
3694 emit_cmp_and_jump_insns (count, const1_rtx,
3695 EQ, NULL_RTX, mode, 1, end_label);
3696
3697 temp = expand_binop (mode, add_optab, count, GEN_INT (-2), count, 1, 0);
3698 }
a41c6c53
UW
3699 if (temp != count)
3700 emit_move_insn (count, temp);
3701
c9f59991 3702 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, 0);
a41c6c53
UW
3703 if (temp != blocks)
3704 emit_move_insn (blocks, temp);
3705
6de9cd9a
DN
3706 emit_cmp_and_jump_insns (blocks, const0_rtx,
3707 EQ, NULL_RTX, mode, 1, loop_end_label);
70315fcd
SB
3708
3709 emit_label (loop_start_label);
a41c6c53 3710
6d057022
AS
3711 if (val == const0_rtx)
3712 emit_insn (gen_clrmem_short (dst, GEN_INT (255)));
3713 else
3714 emit_insn (gen_movmem_short (dstp1, dst, GEN_INT (255)));
c7453384 3715 s390_load_address (dst_addr,
a41c6c53 3716 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
c7453384 3717
a41c6c53
UW
3718 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3719 if (temp != blocks)
3720 emit_move_insn (blocks, temp);
3721
6de9cd9a
DN
3722 emit_cmp_and_jump_insns (blocks, const0_rtx,
3723 EQ, NULL_RTX, mode, 1, loop_end_label);
70315fcd
SB
3724
3725 emit_jump (loop_start_label);
6de9cd9a 3726 emit_label (loop_end_label);
a41c6c53 3727
6d057022
AS
3728 if (val == const0_rtx)
3729 emit_insn (gen_clrmem_short (dst, convert_to_mode (Pmode, count, 1)));
3730 else
3731 emit_insn (gen_movmem_short (dstp1, dst, convert_to_mode (Pmode, count, 1)));
a41c6c53
UW
3732 emit_label (end_label);
3733 }
3734}
3735
3736/* Emit code to compare LEN bytes at OP0 with those at OP1,
3737 and return the result in TARGET. */
3738
3739void
9c808aad 3740s390_expand_cmpmem (rtx target, rtx op0, rtx op1, rtx len)
a41c6c53 3741{
5b022de5 3742 rtx ccreg = gen_rtx_REG (CCUmode, CC_REGNUM);
02887425
UW
3743 rtx tmp;
3744
3745 /* As the result of CMPINT is inverted compared to what we need,
3746 we have to swap the operands. */
3747 tmp = op0; op0 = op1; op1 = tmp;
a41c6c53 3748
a41c6c53
UW
3749 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3750 {
3751 if (INTVAL (len) > 0)
3752 {
b9404c99 3753 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (INTVAL (len) - 1)));
02887425 3754 emit_insn (gen_cmpint (target, ccreg));
a41c6c53
UW
3755 }
3756 else
3757 emit_move_insn (target, const0_rtx);
3758 }
9dc62c00 3759 else if (TARGET_MVCLE)
a41c6c53 3760 {
b9404c99 3761 emit_insn (gen_cmpmem_long (op0, op1, convert_to_mode (Pmode, len, 1)));
02887425 3762 emit_insn (gen_cmpint (target, ccreg));
a41c6c53 3763 }
a41c6c53
UW
3764 else
3765 {
3766 rtx addr0, addr1, count, blocks, temp;
70315fcd 3767 rtx loop_start_label = gen_label_rtx ();
6de9cd9a 3768 rtx loop_end_label = gen_label_rtx ();
a41c6c53
UW
3769 rtx end_label = gen_label_rtx ();
3770 enum machine_mode mode;
a41c6c53
UW
3771
3772 mode = GET_MODE (len);
3773 if (mode == VOIDmode)
b9404c99 3774 mode = Pmode;
a41c6c53 3775
a41c6c53
UW
3776 addr0 = gen_reg_rtx (Pmode);
3777 addr1 = gen_reg_rtx (Pmode);
3778 count = gen_reg_rtx (mode);
3779 blocks = gen_reg_rtx (mode);
3780
3781 convert_move (count, len, 1);
c7453384 3782 emit_cmp_and_jump_insns (count, const0_rtx,
a41c6c53
UW
3783 EQ, NULL_RTX, mode, 1, end_label);
3784
3785 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
3786 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
3787 op0 = change_address (op0, VOIDmode, addr0);
3788 op1 = change_address (op1, VOIDmode, addr1);
c7453384 3789
a41c6c53
UW
3790 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3791 if (temp != count)
3792 emit_move_insn (count, temp);
3793
c9f59991 3794 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, 0);
a41c6c53
UW
3795 if (temp != blocks)
3796 emit_move_insn (blocks, temp);
3797
6de9cd9a
DN
3798 emit_cmp_and_jump_insns (blocks, const0_rtx,
3799 EQ, NULL_RTX, mode, 1, loop_end_label);
70315fcd
SB
3800
3801 emit_label (loop_start_label);
a41c6c53 3802
b9404c99 3803 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (255)));
5b022de5 3804 temp = gen_rtx_NE (VOIDmode, ccreg, const0_rtx);
c7453384 3805 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
a41c6c53
UW
3806 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
3807 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
3808 emit_jump_insn (temp);
3809
c7453384 3810 s390_load_address (addr0,
a41c6c53 3811 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
c7453384 3812 s390_load_address (addr1,
a41c6c53 3813 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
c7453384 3814
a41c6c53
UW
3815 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3816 if (temp != blocks)
3817 emit_move_insn (blocks, temp);
3818
6de9cd9a
DN
3819 emit_cmp_and_jump_insns (blocks, const0_rtx,
3820 EQ, NULL_RTX, mode, 1, loop_end_label);
70315fcd
SB
3821
3822 emit_jump (loop_start_label);
6de9cd9a 3823 emit_label (loop_end_label);
a41c6c53 3824
38899e29 3825 emit_insn (gen_cmpmem_short (op0, op1,
b9404c99 3826 convert_to_mode (Pmode, count, 1)));
a41c6c53
UW
3827 emit_label (end_label);
3828
02887425 3829 emit_insn (gen_cmpint (target, ccreg));
a41c6c53
UW
3830 }
3831}
3832
5d880bd2
UW
3833
3834/* Expand conditional increment or decrement using alc/slb instructions.
3835 Should generate code setting DST to either SRC or SRC + INCREMENT,
3836 depending on the result of the comparison CMP_OP0 CMP_CODE CMP_OP1.
00bda920
AK
3837 Returns true if successful, false otherwise.
3838
3839 That makes it possible to implement some if-constructs without jumps e.g.:
3840 (borrow = CC0 | CC1 and carry = CC2 | CC3)
3841 unsigned int a, b, c;
3842 if (a < b) c++; -> CCU b > a -> CC2; c += carry;
3843 if (a < b) c--; -> CCL3 a - b -> borrow; c -= borrow;
3844 if (a <= b) c++; -> CCL3 b - a -> borrow; c += carry;
3845 if (a <= b) c--; -> CCU a <= b -> borrow; c -= borrow;
3846
3847 Checks for EQ and NE with a nonzero value need an additional xor e.g.:
3848 if (a == b) c++; -> CCL3 a ^= b; 0 - a -> borrow; c += carry;
3849 if (a == b) c--; -> CCU a ^= b; a <= 0 -> CC0 | CC1; c -= borrow;
3850 if (a != b) c++; -> CCU a ^= b; a > 0 -> CC2; c += carry;
3851 if (a != b) c--; -> CCL3 a ^= b; 0 - a -> borrow; c -= borrow; */
5d880bd2
UW
3852
3853bool
3854s390_expand_addcc (enum rtx_code cmp_code, rtx cmp_op0, rtx cmp_op1,
3855 rtx dst, rtx src, rtx increment)
3856{
3857 enum machine_mode cmp_mode;
3858 enum machine_mode cc_mode;
3859 rtx op_res;
3860 rtx insn;
3861 rtvec p;
8d933e31 3862 int ret;
5d880bd2
UW
3863
3864 if ((GET_MODE (cmp_op0) == SImode || GET_MODE (cmp_op0) == VOIDmode)
3865 && (GET_MODE (cmp_op1) == SImode || GET_MODE (cmp_op1) == VOIDmode))
3866 cmp_mode = SImode;
3867 else if ((GET_MODE (cmp_op0) == DImode || GET_MODE (cmp_op0) == VOIDmode)
3868 && (GET_MODE (cmp_op1) == DImode || GET_MODE (cmp_op1) == VOIDmode))
3869 cmp_mode = DImode;
3870 else
3871 return false;
3872
3873 /* Try ADD LOGICAL WITH CARRY. */
3874 if (increment == const1_rtx)
3875 {
3876 /* Determine CC mode to use. */
3877 if (cmp_code == EQ || cmp_code == NE)
3878 {
3879 if (cmp_op1 != const0_rtx)
3880 {
3881 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
3882 NULL_RTX, 0, OPTAB_WIDEN);
3883 cmp_op1 = const0_rtx;
3884 }
3885
3886 cmp_code = cmp_code == EQ ? LEU : GTU;
3887 }
3888
3889 if (cmp_code == LTU || cmp_code == LEU)
3890 {
3891 rtx tem = cmp_op0;
3892 cmp_op0 = cmp_op1;
3893 cmp_op1 = tem;
3894 cmp_code = swap_condition (cmp_code);
3895 }
3896
3897 switch (cmp_code)
3898 {
3899 case GTU:
3900 cc_mode = CCUmode;
3901 break;
3902
3903 case GEU:
3904 cc_mode = CCL3mode;
3905 break;
3906
3907 default:
3908 return false;
3909 }
3910
3911 /* Emit comparison instruction pattern. */
3912 if (!register_operand (cmp_op0, cmp_mode))
3913 cmp_op0 = force_reg (cmp_mode, cmp_op0);
3914
3915 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
3916 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
3917 /* We use insn_invalid_p here to add clobbers if required. */
8d933e31
AS
3918 ret = insn_invalid_p (emit_insn (insn));
3919 gcc_assert (!ret);
5d880bd2
UW
3920
3921 /* Emit ALC instruction pattern. */
3922 op_res = gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
3923 gen_rtx_REG (cc_mode, CC_REGNUM),
3924 const0_rtx);
3925
3926 if (src != const0_rtx)
3927 {
3928 if (!register_operand (src, GET_MODE (dst)))
3929 src = force_reg (GET_MODE (dst), src);
3930
3931 src = gen_rtx_PLUS (GET_MODE (dst), src, const0_rtx);
3932 op_res = gen_rtx_PLUS (GET_MODE (dst), src, op_res);
3933 }
3934
3935 p = rtvec_alloc (2);
3936 RTVEC_ELT (p, 0) =
3937 gen_rtx_SET (VOIDmode, dst, op_res);
3938 RTVEC_ELT (p, 1) =
3939 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
3940 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
3941
3942 return true;
3943 }
3944
3945 /* Try SUBTRACT LOGICAL WITH BORROW. */
3946 if (increment == constm1_rtx)
3947 {
3948 /* Determine CC mode to use. */
3949 if (cmp_code == EQ || cmp_code == NE)
3950 {
3951 if (cmp_op1 != const0_rtx)
3952 {
3953 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
3954 NULL_RTX, 0, OPTAB_WIDEN);
3955 cmp_op1 = const0_rtx;
3956 }
3957
3958 cmp_code = cmp_code == EQ ? LEU : GTU;
3959 }
3960
3961 if (cmp_code == GTU || cmp_code == GEU)
3962 {
3963 rtx tem = cmp_op0;
3964 cmp_op0 = cmp_op1;
3965 cmp_op1 = tem;
3966 cmp_code = swap_condition (cmp_code);
3967 }
3968
3969 switch (cmp_code)
3970 {
3971 case LEU:
3972 cc_mode = CCUmode;
3973 break;
3974
3975 case LTU:
3976 cc_mode = CCL3mode;
3977 break;
3978
3979 default:
3980 return false;
3981 }
3982
3983 /* Emit comparison instruction pattern. */
3984 if (!register_operand (cmp_op0, cmp_mode))
3985 cmp_op0 = force_reg (cmp_mode, cmp_op0);
3986
3987 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
3988 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
3989 /* We use insn_invalid_p here to add clobbers if required. */
8d933e31
AS
3990 ret = insn_invalid_p (emit_insn (insn));
3991 gcc_assert (!ret);
5d880bd2
UW
3992
3993 /* Emit SLB instruction pattern. */
3994 if (!register_operand (src, GET_MODE (dst)))
3995 src = force_reg (GET_MODE (dst), src);
3996
3997 op_res = gen_rtx_MINUS (GET_MODE (dst),
3998 gen_rtx_MINUS (GET_MODE (dst), src, const0_rtx),
3999 gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
4000 gen_rtx_REG (cc_mode, CC_REGNUM),
4001 const0_rtx));
4002 p = rtvec_alloc (2);
4003 RTVEC_ELT (p, 0) =
4004 gen_rtx_SET (VOIDmode, dst, op_res);
4005 RTVEC_ELT (p, 1) =
4006 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
4007 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
4008
4009 return true;
4010 }
4011
4012 return false;
4013}
4014
6fa05db6
AS
4015/* Expand code for the insv template. Return true if successful, false else. */
4016
4017bool
4018s390_expand_insv (rtx dest, rtx op1, rtx op2, rtx src)
4019{
4020 int bitsize = INTVAL (op1);
4021 int bitpos = INTVAL (op2);
4022
c83eecad 4023 /* We need byte alignment. */
6fa05db6
AS
4024 if (bitsize % BITS_PER_UNIT)
4025 return false;
4026
4027 if (bitpos == 0
4028 && memory_operand (dest, VOIDmode)
4029 && (register_operand (src, word_mode)
4030 || const_int_operand (src, VOIDmode)))
4031 {
4032 /* Emit standard pattern if possible. */
4033 enum machine_mode mode = smallest_mode_for_size (bitsize, MODE_INT);
4034 if (GET_MODE_BITSIZE (mode) == bitsize)
4035 emit_move_insn (adjust_address (dest, mode, 0), gen_lowpart (mode, src));
4036
4037 /* (set (ze (mem)) (const_int)). */
4038 else if (const_int_operand (src, VOIDmode))
4039 {
4040 int size = bitsize / BITS_PER_UNIT;
4041 rtx src_mem = adjust_address (force_const_mem (word_mode, src), BLKmode,
4042 GET_MODE_SIZE (word_mode) - size);
4043
4044 dest = adjust_address (dest, BLKmode, 0);
4045 set_mem_size (dest, GEN_INT (size));
4046 s390_expand_movmem (dest, src_mem, GEN_INT (size));
4047 }
4048
4049 /* (set (ze (mem)) (reg)). */
4050 else if (register_operand (src, word_mode))
4051 {
4052 if (bitsize <= GET_MODE_BITSIZE (SImode))
4053 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, op1,
4054 const0_rtx), src);
4055 else
4056 {
4057 /* Emit st,stcmh sequence. */
4058 int stcmh_width = bitsize - GET_MODE_BITSIZE (SImode);
4059 int size = stcmh_width / BITS_PER_UNIT;
4060
4061 emit_move_insn (adjust_address (dest, SImode, size),
4062 gen_lowpart (SImode, src));
4063 set_mem_size (dest, GEN_INT (size));
4064 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, GEN_INT
4065 (stcmh_width), const0_rtx),
4066 gen_rtx_LSHIFTRT (word_mode, src, GEN_INT
4067 (GET_MODE_BITSIZE (SImode))));
4068 }
4069 }
4070 else
4071 return false;
4072
4073 return true;
4074 }
4075
4076 /* (set (ze (reg)) (const_int)). */
4077 if (TARGET_ZARCH
4078 && register_operand (dest, word_mode)
4079 && (bitpos % 16) == 0
4080 && (bitsize % 16) == 0
4081 && const_int_operand (src, VOIDmode))
4082 {
4083 HOST_WIDE_INT val = INTVAL (src);
4084 int regpos = bitpos + bitsize;
4085
4086 while (regpos > bitpos)
4087 {
4088 enum machine_mode putmode;
4089 int putsize;
4090
4091 if (TARGET_EXTIMM && (regpos % 32 == 0) && (regpos >= bitpos + 32))
4092 putmode = SImode;
4093 else
4094 putmode = HImode;
4095
4096 putsize = GET_MODE_BITSIZE (putmode);
4097 regpos -= putsize;
4098 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest,
4099 GEN_INT (putsize),
4100 GEN_INT (regpos)),
4101 gen_int_mode (val, putmode));
4102 val >>= putsize;
4103 }
4104 gcc_assert (regpos == bitpos);
4105 return true;
4106 }
4107
4108 return false;
4109}
5d880bd2 4110
45d18331
AS
4111/* A subroutine of s390_expand_cs_hqi and s390_expand_atomic which returns a
4112 register that holds VAL of mode MODE shifted by COUNT bits. */
3093f076
AS
4113
4114static inline rtx
4115s390_expand_mask_and_shift (rtx val, enum machine_mode mode, rtx count)
4116{
4117 val = expand_simple_binop (SImode, AND, val, GEN_INT (GET_MODE_MASK (mode)),
4118 NULL_RTX, 1, OPTAB_DIRECT);
4119 return expand_simple_binop (SImode, ASHIFT, val, count,
4120 NULL_RTX, 1, OPTAB_DIRECT);
4121}
4122
4123/* Structure to hold the initial parameters for a compare_and_swap operation
4124 in HImode and QImode. */
4125
4126struct alignment_context
4127{
4128 rtx memsi; /* SI aligned memory location. */
4129 rtx shift; /* Bit offset with regard to lsb. */
4130 rtx modemask; /* Mask of the HQImode shifted by SHIFT bits. */
4131 rtx modemaski; /* ~modemask */
6416ae7f 4132 bool aligned; /* True if memory is aligned, false else. */
3093f076
AS
4133};
4134
45d18331
AS
4135/* A subroutine of s390_expand_cs_hqi and s390_expand_atomic to initialize
4136 structure AC for transparent simplifying, if the memory alignment is known
4137 to be at least 32bit. MEM is the memory location for the actual operation
4138 and MODE its mode. */
3093f076
AS
4139
4140static void
4141init_alignment_context (struct alignment_context *ac, rtx mem,
4142 enum machine_mode mode)
4143{
4144 ac->shift = GEN_INT (GET_MODE_SIZE (SImode) - GET_MODE_SIZE (mode));
4145 ac->aligned = (MEM_ALIGN (mem) >= GET_MODE_BITSIZE (SImode));
4146
4147 if (ac->aligned)
4148 ac->memsi = adjust_address (mem, SImode, 0); /* Memory is aligned. */
4149 else
4150 {
4151 /* Alignment is unknown. */
4152 rtx byteoffset, addr, align;
4153
4154 /* Force the address into a register. */
4155 addr = force_reg (Pmode, XEXP (mem, 0));
4156
4157 /* Align it to SImode. */
4158 align = expand_simple_binop (Pmode, AND, addr,
4159 GEN_INT (-GET_MODE_SIZE (SImode)),
4160 NULL_RTX, 1, OPTAB_DIRECT);
4161 /* Generate MEM. */
4162 ac->memsi = gen_rtx_MEM (SImode, align);
4163 MEM_VOLATILE_P (ac->memsi) = MEM_VOLATILE_P (mem);
44d64274 4164 set_mem_alias_set (ac->memsi, ALIAS_SET_MEMORY_BARRIER);
3093f076
AS
4165 set_mem_align (ac->memsi, GET_MODE_BITSIZE (SImode));
4166
4167 /* Calculate shiftcount. */
4168 byteoffset = expand_simple_binop (Pmode, AND, addr,
4169 GEN_INT (GET_MODE_SIZE (SImode) - 1),
4170 NULL_RTX, 1, OPTAB_DIRECT);
4171 /* As we already have some offset, evaluate the remaining distance. */
4172 ac->shift = expand_simple_binop (SImode, MINUS, ac->shift, byteoffset,
4173 NULL_RTX, 1, OPTAB_DIRECT);
4174
4175 }
4176 /* Shift is the byte count, but we need the bitcount. */
4177 ac->shift = expand_simple_binop (SImode, MULT, ac->shift, GEN_INT (BITS_PER_UNIT),
4178 NULL_RTX, 1, OPTAB_DIRECT);
4179 /* Calculate masks. */
4180 ac->modemask = expand_simple_binop (SImode, ASHIFT,
4181 GEN_INT (GET_MODE_MASK (mode)), ac->shift,
4182 NULL_RTX, 1, OPTAB_DIRECT);
4183 ac->modemaski = expand_simple_unop (SImode, NOT, ac->modemask, NULL_RTX, 1);
4184}
4185
4186/* Expand an atomic compare and swap operation for HImode and QImode. MEM is
4187 the memory location, CMP the old value to compare MEM with and NEW the value
4188 to set if CMP == MEM.
4189 CMP is never in memory for compare_and_swap_cc because
4190 expand_bool_compare_and_swap puts it into a register for later compare. */
4191
4192void
4193s390_expand_cs_hqi (enum machine_mode mode, rtx target, rtx mem, rtx cmp, rtx new)
4194{
4195 struct alignment_context ac;
4196 rtx cmpv, newv, val, resv, cc;
4197 rtx res = gen_reg_rtx (SImode);
4198 rtx csloop = gen_label_rtx ();
4199 rtx csend = gen_label_rtx ();
4200
4201 gcc_assert (register_operand (target, VOIDmode));
4202 gcc_assert (MEM_P (mem));
4203
4204 init_alignment_context (&ac, mem, mode);
4205
4206 /* Shift the values to the correct bit positions. */
4207 if (!(ac.aligned && MEM_P (cmp)))
4208 cmp = s390_expand_mask_and_shift (cmp, mode, ac.shift);
4209 if (!(ac.aligned && MEM_P (new)))
4210 new = s390_expand_mask_and_shift (new, mode, ac.shift);
4211
4212 /* Load full word. Subsequent loads are performed by CS. */
4213 val = expand_simple_binop (SImode, AND, ac.memsi, ac.modemaski,
4214 NULL_RTX, 1, OPTAB_DIRECT);
4215
4216 /* Start CS loop. */
4217 emit_label (csloop);
4218 /* val = "<mem>00..0<mem>"
4219 * cmp = "00..0<cmp>00..0"
4220 * new = "00..0<new>00..0"
4221 */
4222
4223 /* Patch cmp and new with val at correct position. */
4224 if (ac.aligned && MEM_P (cmp))
4225 {
4226 cmpv = force_reg (SImode, val);
4227 store_bit_field (cmpv, GET_MODE_BITSIZE (mode), 0, SImode, cmp);
4228 }
4229 else
4230 cmpv = force_reg (SImode, expand_simple_binop (SImode, IOR, cmp, val,
4231 NULL_RTX, 1, OPTAB_DIRECT));
4232 if (ac.aligned && MEM_P (new))
4233 {
4234 newv = force_reg (SImode, val);
4235 store_bit_field (newv, GET_MODE_BITSIZE (mode), 0, SImode, new);
4236 }
4237 else
4238 newv = force_reg (SImode, expand_simple_binop (SImode, IOR, new, val,
4239 NULL_RTX, 1, OPTAB_DIRECT));
4240
3093f076 4241 /* Jump to end if we're done (likely?). */
8bb501bb
AK
4242 s390_emit_jump (csend, s390_emit_compare_and_swap (EQ, res, ac.memsi,
4243 cmpv, newv));
3093f076
AS
4244
4245 /* Check for changes outside mode. */
4246 resv = expand_simple_binop (SImode, AND, res, ac.modemaski,
4247 NULL_RTX, 1, OPTAB_DIRECT);
4248 cc = s390_emit_compare (NE, resv, val);
4249 emit_move_insn (val, resv);
4250 /* Loop internal if so. */
4251 s390_emit_jump (csloop, cc);
4252
4253 emit_label (csend);
4254
4255 /* Return the correct part of the bitfield. */
4256 convert_move (target, expand_simple_binop (SImode, LSHIFTRT, res, ac.shift,
4257 NULL_RTX, 1, OPTAB_DIRECT), 1);
4258}
4259
45d18331 4260/* Expand an atomic operation CODE of mode MODE. MEM is the memory location
ea2c620c 4261 and VAL the value to play with. If AFTER is true then store the value
45d18331
AS
4262 MEM holds after the operation, if AFTER is false then store the value MEM
4263 holds before the operation. If TARGET is zero then discard that value, else
4264 store it to TARGET. */
4265
4266void
4267s390_expand_atomic (enum machine_mode mode, enum rtx_code code,
4268 rtx target, rtx mem, rtx val, bool after)
4269{
4270 struct alignment_context ac;
4271 rtx cmp;
4272 rtx new = gen_reg_rtx (SImode);
4273 rtx orig = gen_reg_rtx (SImode);
4274 rtx csloop = gen_label_rtx ();
4275
4276 gcc_assert (!target || register_operand (target, VOIDmode));
4277 gcc_assert (MEM_P (mem));
4278
4279 init_alignment_context (&ac, mem, mode);
4280
4281 /* Shift val to the correct bit positions.
4282 Preserve "icm", but prevent "ex icm". */
4283 if (!(ac.aligned && code == SET && MEM_P (val)))
4284 val = s390_expand_mask_and_shift (val, mode, ac.shift);
4285
4286 /* Further preparation insns. */
4287 if (code == PLUS || code == MINUS)
4288 emit_move_insn (orig, val);
4289 else if (code == MULT || code == AND) /* val = "11..1<val>11..1" */
4290 val = expand_simple_binop (SImode, XOR, val, ac.modemaski,
4291 NULL_RTX, 1, OPTAB_DIRECT);
4292
4293 /* Load full word. Subsequent loads are performed by CS. */
4294 cmp = force_reg (SImode, ac.memsi);
4295
4296 /* Start CS loop. */
4297 emit_label (csloop);
4298 emit_move_insn (new, cmp);
4299
4300 /* Patch new with val at correct position. */
4301 switch (code)
4302 {
4303 case PLUS:
4304 case MINUS:
4305 val = expand_simple_binop (SImode, code, new, orig,
4306 NULL_RTX, 1, OPTAB_DIRECT);
4307 val = expand_simple_binop (SImode, AND, val, ac.modemask,
4308 NULL_RTX, 1, OPTAB_DIRECT);
4309 /* FALLTHRU */
4310 case SET:
4311 if (ac.aligned && MEM_P (val))
4312 store_bit_field (new, GET_MODE_BITSIZE (mode), 0, SImode, val);
4313 else
4314 {
4315 new = expand_simple_binop (SImode, AND, new, ac.modemaski,
4316 NULL_RTX, 1, OPTAB_DIRECT);
4317 new = expand_simple_binop (SImode, IOR, new, val,
4318 NULL_RTX, 1, OPTAB_DIRECT);
4319 }
4320 break;
4321 case AND:
4322 case IOR:
4323 case XOR:
4324 new = expand_simple_binop (SImode, code, new, val,
4325 NULL_RTX, 1, OPTAB_DIRECT);
4326 break;
4327 case MULT: /* NAND */
4328 new = expand_simple_binop (SImode, XOR, new, ac.modemask,
4329 NULL_RTX, 1, OPTAB_DIRECT);
4330 new = expand_simple_binop (SImode, AND, new, val,
4331 NULL_RTX, 1, OPTAB_DIRECT);
4332 break;
4333 default:
4334 gcc_unreachable ();
4335 }
45d18331 4336
8bb501bb
AK
4337 s390_emit_jump (csloop, s390_emit_compare_and_swap (NE, cmp,
4338 ac.memsi, cmp, new));
45d18331
AS
4339
4340 /* Return the correct part of the bitfield. */
4341 if (target)
4342 convert_move (target, expand_simple_binop (SImode, LSHIFTRT,
4343 after ? new : cmp, ac.shift,
4344 NULL_RTX, 1, OPTAB_DIRECT), 1);
4345}
4346
fdbe66f2 4347/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
6b2300b3
JJ
4348 We need to emit DTP-relative relocations. */
4349
fdbe66f2
EB
4350static void s390_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
4351
4352static void
9c808aad 4353s390_output_dwarf_dtprel (FILE *file, int size, rtx x)
6b2300b3
JJ
4354{
4355 switch (size)
4356 {
4357 case 4:
4358 fputs ("\t.long\t", file);
4359 break;
4360 case 8:
4361 fputs ("\t.quad\t", file);
4362 break;
4363 default:
8d933e31 4364 gcc_unreachable ();
6b2300b3
JJ
4365 }
4366 output_addr_const (file, x);
4367 fputs ("@DTPOFF", file);
4368}
4369
7269aee7 4370#ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
608063c3 4371/* Implement TARGET_MANGLE_TYPE. */
7269aee7
AH
4372
4373static const char *
608063c3 4374s390_mangle_type (tree type)
7269aee7
AH
4375{
4376 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
4377 && TARGET_LONG_DOUBLE_128)
4378 return "g";
4379
4380 /* For all other types, use normal C++ mangling. */
4381 return NULL;
4382}
4383#endif
4384
4c8c0dec 4385/* In the name of slightly smaller debug output, and to cater to
aabcd309 4386 general assembler lossage, recognize various UNSPEC sequences
4c8c0dec
JJ
4387 and turn them back into a direct symbol reference. */
4388
69bd9368 4389static rtx
9c808aad 4390s390_delegitimize_address (rtx orig_x)
4c8c0dec
JJ
4391{
4392 rtx x = orig_x, y;
4393
4394 if (GET_CODE (x) != MEM)
4395 return orig_x;
4396
4397 x = XEXP (x, 0);
4398 if (GET_CODE (x) == PLUS
4399 && GET_CODE (XEXP (x, 1)) == CONST
4400 && GET_CODE (XEXP (x, 0)) == REG
4401 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
4402 {
4403 y = XEXP (XEXP (x, 1), 0);
4404 if (GET_CODE (y) == UNSPEC
fd7643fb 4405 && XINT (y, 1) == UNSPEC_GOT)
4c8c0dec
JJ
4406 return XVECEXP (y, 0, 0);
4407 return orig_x;
4408 }
4409
4410 if (GET_CODE (x) == CONST)
4411 {
4412 y = XEXP (x, 0);
4413 if (GET_CODE (y) == UNSPEC
fd7643fb 4414 && XINT (y, 1) == UNSPEC_GOTENT)
4c8c0dec
JJ
4415 return XVECEXP (y, 0, 0);
4416 return orig_x;
4417 }
4418
c7453384 4419 return orig_x;
4c8c0dec 4420}
ba956982 4421
6d057022
AS
4422/* Output operand OP to stdio stream FILE.
4423 OP is an address (register + offset) which is not used to address data;
4424 instead the rightmost bits are interpreted as the value. */
ac32b25e
UW
4425
4426static void
4427print_shift_count_operand (FILE *file, rtx op)
4428{
d98ad410
UW
4429 HOST_WIDE_INT offset;
4430 rtx base;
f83a336d 4431
d98ad410 4432 /* Extract base register and offset. */
4989e88a 4433 if (!s390_decompose_shift_count (op, &base, &offset))
d98ad410 4434 gcc_unreachable ();
ac32b25e
UW
4435
4436 /* Sanity check. */
d98ad410 4437 if (base)
8d933e31 4438 {
d98ad410
UW
4439 gcc_assert (GET_CODE (base) == REG);
4440 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
4441 gcc_assert (REGNO_REG_CLASS (REGNO (base)) == ADDR_REGS);
8d933e31 4442 }
ac32b25e 4443
6d057022
AS
4444 /* Offsets are constricted to twelve bits. */
4445 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset & ((1 << 12) - 1));
d98ad410
UW
4446 if (base)
4447 fprintf (file, "(%s)", reg_names[REGNO (base)]);
ac32b25e
UW
4448}
4449
ab96de7e 4450/* See 'get_some_local_dynamic_name'. */
fd3cd001
UW
4451
4452static int
9c808aad 4453get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
fd3cd001
UW
4454{
4455 rtx x = *px;
4456
4457 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
4458 {
4459 x = get_pool_constant (x);
4460 return for_each_rtx (&x, get_some_local_dynamic_name_1, 0);
4461 }
4462
4463 if (GET_CODE (x) == SYMBOL_REF
4464 && tls_symbolic_operand (x) == TLS_MODEL_LOCAL_DYNAMIC)
4465 {
4466 cfun->machine->some_ld_name = XSTR (x, 0);
4467 return 1;
4468 }
4469
4470 return 0;
4471}
4472
ab96de7e
AS
4473/* Locate some local-dynamic symbol still in use by this function
4474 so that we can print its name in local-dynamic base patterns. */
4475
4476static const char *
4477get_some_local_dynamic_name (void)
4478{
4479 rtx insn;
4480
4481 if (cfun->machine->some_ld_name)
4482 return cfun->machine->some_ld_name;
4483
4484 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
4485 if (INSN_P (insn)
4486 && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
4487 return cfun->machine->some_ld_name;
4488
8d933e31 4489 gcc_unreachable ();
ab96de7e
AS
4490}
4491
38899e29 4492/* Output machine-dependent UNSPECs occurring in address constant X
faeb9bb6
UW
4493 in assembler syntax to stdio stream FILE. Returns true if the
4494 constant X could be recognized, false otherwise. */
9db1d521 4495
faeb9bb6
UW
4496bool
4497s390_output_addr_const_extra (FILE *file, rtx x)
9db1d521 4498{
faeb9bb6
UW
4499 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
4500 switch (XINT (x, 1))
4501 {
4502 case UNSPEC_GOTENT:
4503 output_addr_const (file, XVECEXP (x, 0, 0));
4504 fprintf (file, "@GOTENT");
4505 return true;
4506 case UNSPEC_GOT:
4507 output_addr_const (file, XVECEXP (x, 0, 0));
4508 fprintf (file, "@GOT");
4509 return true;
4510 case UNSPEC_GOTOFF:
4511 output_addr_const (file, XVECEXP (x, 0, 0));
4512 fprintf (file, "@GOTOFF");
4513 return true;
4514 case UNSPEC_PLT:
4515 output_addr_const (file, XVECEXP (x, 0, 0));
4516 fprintf (file, "@PLT");
4517 return true;
4518 case UNSPEC_PLTOFF:
4519 output_addr_const (file, XVECEXP (x, 0, 0));
4520 fprintf (file, "@PLTOFF");
4521 return true;
4522 case UNSPEC_TLSGD:
4523 output_addr_const (file, XVECEXP (x, 0, 0));
4524 fprintf (file, "@TLSGD");
4525 return true;
4526 case UNSPEC_TLSLDM:
4527 assemble_name (file, get_some_local_dynamic_name ());
4528 fprintf (file, "@TLSLDM");
4529 return true;
4530 case UNSPEC_DTPOFF:
4531 output_addr_const (file, XVECEXP (x, 0, 0));
4532 fprintf (file, "@DTPOFF");
4533 return true;
4534 case UNSPEC_NTPOFF:
4535 output_addr_const (file, XVECEXP (x, 0, 0));
4536 fprintf (file, "@NTPOFF");
4537 return true;
4538 case UNSPEC_GOTNTPOFF:
4539 output_addr_const (file, XVECEXP (x, 0, 0));
4540 fprintf (file, "@GOTNTPOFF");
4541 return true;
4542 case UNSPEC_INDNTPOFF:
4543 output_addr_const (file, XVECEXP (x, 0, 0));
4544 fprintf (file, "@INDNTPOFF");
4545 return true;
4546 }
9db1d521 4547
faeb9bb6 4548 return false;
9db1d521
HP
4549}
4550
c7453384 4551/* Output address operand ADDR in assembler syntax to
994fe660 4552 stdio stream FILE. */
9db1d521
HP
4553
4554void
9c808aad 4555print_operand_address (FILE *file, rtx addr)
9db1d521
HP
4556{
4557 struct s390_address ad;
4558
b808c04c 4559 if (!s390_decompose_address (addr, &ad)
93fa8428
AK
4560 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
4561 || (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx))))
c85ce869 4562 output_operand_lossage ("cannot decompose address");
c7453384 4563
9db1d521 4564 if (ad.disp)
faeb9bb6 4565 output_addr_const (file, ad.disp);
9db1d521
HP
4566 else
4567 fprintf (file, "0");
4568
4569 if (ad.base && ad.indx)
4570 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
4571 reg_names[REGNO (ad.base)]);
4572 else if (ad.base)
4573 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
4574}
4575
c7453384
EC
4576/* Output operand X in assembler syntax to stdio stream FILE.
4577 CODE specified the format flag. The following format flags
994fe660
UW
4578 are recognized:
4579
4580 'C': print opcode suffix for branch condition.
4581 'D': print opcode suffix for inverse branch condition.
fd3cd001 4582 'J': print tls_load/tls_gdcall/tls_ldcall suffix
7b8acc34 4583 'G': print the size of the operand in bytes.
994fe660
UW
4584 'O': print only the displacement of a memory reference.
4585 'R': print only the base register of a memory reference.
fc0ea003 4586 'S': print S-type memory reference (base+displacement).
994fe660
UW
4587 'N': print the second word of a DImode operand.
4588 'M': print the second word of a TImode operand.
ac32b25e 4589 'Y': print shift count operand.
994fe660 4590
5519a4f9 4591 'b': print integer X as if it's an unsigned byte.
da48f5ec
AK
4592 'x': print integer X as if it's an unsigned halfword.
4593 'h': print integer X as if it's a signed halfword.
f19a9af7 4594 'i': print the first nonzero HImode part of X.
da48f5ec
AK
4595 'j': print the first HImode part unequal to -1 of X.
4596 'k': print the first nonzero SImode part of X.
4597 'm': print the first SImode part unequal to -1 of X.
4598 'o': print integer X as if it's an unsigned 32bit word. */
9db1d521
HP
4599
4600void
9c808aad 4601print_operand (FILE *file, rtx x, int code)
9db1d521
HP
4602{
4603 switch (code)
4604 {
4605 case 'C':
ba956982 4606 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
9db1d521
HP
4607 return;
4608
4609 case 'D':
ba956982 4610 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
9db1d521
HP
4611 return;
4612
fd3cd001
UW
4613 case 'J':
4614 if (GET_CODE (x) == SYMBOL_REF)
4615 {
4616 fprintf (file, "%s", ":tls_load:");
4617 output_addr_const (file, x);
4618 }
4619 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD)
4620 {
4621 fprintf (file, "%s", ":tls_gdcall:");
4622 output_addr_const (file, XVECEXP (x, 0, 0));
4623 }
4624 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM)
4625 {
4626 fprintf (file, "%s", ":tls_ldcall:");
4627 assemble_name (file, get_some_local_dynamic_name ());
4628 }
4629 else
8d933e31 4630 gcc_unreachable ();
fd3cd001
UW
4631 return;
4632
7b8acc34
AK
4633 case 'G':
4634 fprintf (file, "%u", GET_MODE_SIZE (GET_MODE (x)));
4635 return;
4636
9db1d521
HP
4637 case 'O':
4638 {
4639 struct s390_address ad;
8d933e31 4640 int ret;
9db1d521 4641
8d933e31
AS
4642 gcc_assert (GET_CODE (x) == MEM);
4643 ret = s390_decompose_address (XEXP (x, 0), &ad);
4644 gcc_assert (ret);
93fa8428 4645 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
8d933e31 4646 gcc_assert (!ad.indx);
9db1d521
HP
4647
4648 if (ad.disp)
faeb9bb6 4649 output_addr_const (file, ad.disp);
9db1d521
HP
4650 else
4651 fprintf (file, "0");
4652 }
4653 return;
4654
4655 case 'R':
4656 {
4657 struct s390_address ad;
8d933e31 4658 int ret;
9db1d521 4659
8d933e31
AS
4660 gcc_assert (GET_CODE (x) == MEM);
4661 ret = s390_decompose_address (XEXP (x, 0), &ad);
4662 gcc_assert (ret);
93fa8428 4663 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
8d933e31 4664 gcc_assert (!ad.indx);
9db1d521
HP
4665
4666 if (ad.base)
4667 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
4668 else
4669 fprintf (file, "0");
4670 }
4671 return;
4672
fc0ea003
UW
4673 case 'S':
4674 {
4675 struct s390_address ad;
8d933e31 4676 int ret;
fc0ea003 4677
8d933e31
AS
4678 gcc_assert (GET_CODE (x) == MEM);
4679 ret = s390_decompose_address (XEXP (x, 0), &ad);
4680 gcc_assert (ret);
93fa8428 4681 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
8d933e31 4682 gcc_assert (!ad.indx);
fc0ea003
UW
4683
4684 if (ad.disp)
4685 output_addr_const (file, ad.disp);
4686 else
4687 fprintf (file, "0");
4688
4689 if (ad.base)
4690 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
4691 }
4692 return;
4693
9db1d521
HP
4694 case 'N':
4695 if (GET_CODE (x) == REG)
4696 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
4697 else if (GET_CODE (x) == MEM)
4698 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
4699 else
8d933e31 4700 gcc_unreachable ();
9db1d521
HP
4701 break;
4702
4703 case 'M':
4704 if (GET_CODE (x) == REG)
4705 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
4706 else if (GET_CODE (x) == MEM)
4707 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
4708 else
8d933e31 4709 gcc_unreachable ();
9db1d521 4710 break;
ac32b25e
UW
4711
4712 case 'Y':
4713 print_shift_count_operand (file, x);
4714 return;
9db1d521
HP
4715 }
4716
4717 switch (GET_CODE (x))
4718 {
4719 case REG:
4720 fprintf (file, "%s", reg_names[REGNO (x)]);
4721 break;
4722
4723 case MEM:
4724 output_address (XEXP (x, 0));
4725 break;
4726
4727 case CONST:
4728 case CODE_LABEL:
4729 case LABEL_REF:
4730 case SYMBOL_REF:
faeb9bb6 4731 output_addr_const (file, x);
9db1d521
HP
4732 break;
4733
4734 case CONST_INT:
4735 if (code == 'b')
4023fb28
UW
4736 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
4737 else if (code == 'x')
4738 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
4739 else if (code == 'h')
4740 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
f19a9af7 4741 else if (code == 'i')
38899e29 4742 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
f19a9af7
AK
4743 s390_extract_part (x, HImode, 0));
4744 else if (code == 'j')
38899e29
EC
4745 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4746 s390_extract_part (x, HImode, -1));
ec24698e
UW
4747 else if (code == 'k')
4748 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4749 s390_extract_part (x, SImode, 0));
4750 else if (code == 'm')
4751 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4752 s390_extract_part (x, SImode, -1));
4753 else if (code == 'o')
4754 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffffffff);
4023fb28
UW
4755 else
4756 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
4757 break;
4758
4759 case CONST_DOUBLE:
8d933e31 4760 gcc_assert (GET_MODE (x) == VOIDmode);
4023fb28
UW
4761 if (code == 'b')
4762 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
9db1d521 4763 else if (code == 'x')
4023fb28 4764 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
9db1d521 4765 else if (code == 'h')
4023fb28 4766 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
9db1d521 4767 else
8d933e31 4768 gcc_unreachable ();
9db1d521
HP
4769 break;
4770
4771 default:
4772 fatal_insn ("UNKNOWN in print_operand !?", x);
4773 break;
4774 }
4775}
4776
301d03af
RS
4777/* Target hook for assembling integer objects. We need to define it
4778 here to work a round a bug in some versions of GAS, which couldn't
4779 handle values smaller than INT_MIN when printed in decimal. */
4780
4781static bool
9c808aad 4782s390_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af
RS
4783{
4784 if (size == 8 && aligned_p
4785 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
4786 {
4a0a75dd
KG
4787 fprintf (asm_out_file, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX "\n",
4788 INTVAL (x));
301d03af
RS
4789 return true;
4790 }
4791 return default_assemble_integer (x, size, aligned_p);
4792}
4793
c7453384 4794/* Returns true if register REGNO is used for forming
994fe660 4795 a memory address in expression X. */
9db1d521 4796
3ed99cc9 4797static bool
9c808aad 4798reg_used_in_mem_p (int regno, rtx x)
9db1d521
HP
4799{
4800 enum rtx_code code = GET_CODE (x);
4801 int i, j;
4802 const char *fmt;
c7453384 4803
9db1d521
HP
4804 if (code == MEM)
4805 {
4806 if (refers_to_regno_p (regno, regno+1,
4807 XEXP (x, 0), 0))
3ed99cc9 4808 return true;
9db1d521 4809 }
c7453384 4810 else if (code == SET
4023fb28
UW
4811 && GET_CODE (SET_DEST (x)) == PC)
4812 {
4813 if (refers_to_regno_p (regno, regno+1,
4814 SET_SRC (x), 0))
3ed99cc9 4815 return true;
4023fb28 4816 }
9db1d521
HP
4817
4818 fmt = GET_RTX_FORMAT (code);
4819 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4820 {
4821 if (fmt[i] == 'e'
4822 && reg_used_in_mem_p (regno, XEXP (x, i)))
3ed99cc9 4823 return true;
c7453384 4824
9db1d521
HP
4825 else if (fmt[i] == 'E')
4826 for (j = 0; j < XVECLEN (x, i); j++)
4827 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
3ed99cc9 4828 return true;
9db1d521 4829 }
3ed99cc9 4830 return false;
9db1d521
HP
4831}
4832
d65f7478 4833/* Returns true if expression DEP_RTX sets an address register
994fe660 4834 used by instruction INSN to address memory. */
9db1d521 4835
3ed99cc9 4836static bool
9c808aad 4837addr_generation_dependency_p (rtx dep_rtx, rtx insn)
9db1d521 4838{
4023fb28 4839 rtx target, pat;
9db1d521 4840
077dab3b
HP
4841 if (GET_CODE (dep_rtx) == INSN)
4842 dep_rtx = PATTERN (dep_rtx);
4843
9db1d521
HP
4844 if (GET_CODE (dep_rtx) == SET)
4845 {
4846 target = SET_DEST (dep_rtx);
cc7ab9b7
UW
4847 if (GET_CODE (target) == STRICT_LOW_PART)
4848 target = XEXP (target, 0);
4849 while (GET_CODE (target) == SUBREG)
4850 target = SUBREG_REG (target);
4851
9db1d521
HP
4852 if (GET_CODE (target) == REG)
4853 {
4854 int regno = REGNO (target);
4855
077dab3b 4856 if (s390_safe_attr_type (insn) == TYPE_LA)
4023fb28
UW
4857 {
4858 pat = PATTERN (insn);
4859 if (GET_CODE (pat) == PARALLEL)
4860 {
8d933e31 4861 gcc_assert (XVECLEN (pat, 0) == 2);
4023fb28
UW
4862 pat = XVECEXP (pat, 0, 0);
4863 }
8d933e31
AS
4864 gcc_assert (GET_CODE (pat) == SET);
4865 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
4023fb28 4866 }
077dab3b 4867 else if (get_attr_atype (insn) == ATYPE_AGEN)
4023fb28
UW
4868 return reg_used_in_mem_p (regno, PATTERN (insn));
4869 }
9db1d521 4870 }
3ed99cc9 4871 return false;
9db1d521
HP
4872}
4873
077dab3b
HP
4874/* Return 1, if dep_insn sets register used in insn in the agen unit. */
4875
c7453384 4876int
9c808aad 4877s390_agen_dep_p (rtx dep_insn, rtx insn)
c7453384 4878{
077dab3b
HP
4879 rtx dep_rtx = PATTERN (dep_insn);
4880 int i;
c7453384
EC
4881
4882 if (GET_CODE (dep_rtx) == SET
077dab3b
HP
4883 && addr_generation_dependency_p (dep_rtx, insn))
4884 return 1;
4885 else if (GET_CODE (dep_rtx) == PARALLEL)
4886 {
4887 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
4888 {
4889 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
4890 return 1;
4891 }
4892 }
4893 return 0;
4894}
4895
52609473
HP
4896/* A C statement (sans semicolon) to update the integer scheduling priority
4897 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
4898 reduce the priority to execute INSN later. Do not define this macro if
c7453384 4899 you do not need to adjust the scheduling priorities of insns.
52609473 4900
c7453384 4901 A STD instruction should be scheduled earlier,
52609473
HP
4902 in order to use the bypass. */
4903
4904static int
9c808aad 4905s390_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
52609473
HP
4906{
4907 if (! INSN_P (insn))
4908 return priority;
4909
ec24698e
UW
4910 if (s390_tune != PROCESSOR_2084_Z990
4911 && s390_tune != PROCESSOR_2094_Z9_109)
52609473
HP
4912 return priority;
4913
4914 switch (s390_safe_attr_type (insn))
4915 {
cfdb984b
AS
4916 case TYPE_FSTOREDF:
4917 case TYPE_FSTORESF:
52609473
HP
4918 priority = priority << 3;
4919 break;
4920 case TYPE_STORE:
ea77e738 4921 case TYPE_STM:
52609473
HP
4922 priority = priority << 1;
4923 break;
4924 default:
4925 break;
4926 }
4927 return priority;
4928}
f2d3c02a 4929
077dab3b 4930/* The number of instructions that can be issued per cycle. */
f2d3c02a 4931
077dab3b 4932static int
9c808aad 4933s390_issue_rate (void)
077dab3b 4934{
ec24698e
UW
4935 if (s390_tune == PROCESSOR_2084_Z990
4936 || s390_tune == PROCESSOR_2094_Z9_109)
52609473 4937 return 3;
077dab3b
HP
4938 return 1;
4939}
f2d3c02a 4940
52609473 4941static int
9c808aad 4942s390_first_cycle_multipass_dfa_lookahead (void)
52609473 4943{
64e1e4c4 4944 return 4;
52609473
HP
4945}
4946
52609473 4947
585539a1
UW
4948/* Annotate every literal pool reference in X by an UNSPEC_LTREF expression.
4949 Fix up MEMs as required. */
4950
4951static void
4952annotate_constant_pool_refs (rtx *x)
4953{
4954 int i, j;
4955 const char *fmt;
4956
8d933e31
AS
4957 gcc_assert (GET_CODE (*x) != SYMBOL_REF
4958 || !CONSTANT_POOL_ADDRESS_P (*x));
585539a1
UW
4959
4960 /* Literal pool references can only occur inside a MEM ... */
4961 if (GET_CODE (*x) == MEM)
4962 {
4963 rtx memref = XEXP (*x, 0);
4964
4965 if (GET_CODE (memref) == SYMBOL_REF
4966 && CONSTANT_POOL_ADDRESS_P (memref))
4967 {
4968 rtx base = cfun->machine->base_reg;
4969 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, memref, base),
4970 UNSPEC_LTREF);
4971
4972 *x = replace_equiv_address (*x, addr);
4973 return;
4974 }
4975
4976 if (GET_CODE (memref) == CONST
4977 && GET_CODE (XEXP (memref, 0)) == PLUS
4978 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
4979 && GET_CODE (XEXP (XEXP (memref, 0), 0)) == SYMBOL_REF
4980 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (memref, 0), 0)))
4981 {
4982 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
4983 rtx sym = XEXP (XEXP (memref, 0), 0);
4984 rtx base = cfun->machine->base_reg;
4985 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
4986 UNSPEC_LTREF);
4987
4988 *x = replace_equiv_address (*x, plus_constant (addr, off));
4989 return;
4990 }
4991 }
4992
4993 /* ... or a load-address type pattern. */
4994 if (GET_CODE (*x) == SET)
4995 {
4996 rtx addrref = SET_SRC (*x);
4997
4998 if (GET_CODE (addrref) == SYMBOL_REF
4999 && CONSTANT_POOL_ADDRESS_P (addrref))
5000 {
5001 rtx base = cfun->machine->base_reg;
5002 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, addrref, base),
5003 UNSPEC_LTREF);
5004
5005 SET_SRC (*x) = addr;
5006 return;
5007 }
5008
5009 if (GET_CODE (addrref) == CONST
5010 && GET_CODE (XEXP (addrref, 0)) == PLUS
5011 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
5012 && GET_CODE (XEXP (XEXP (addrref, 0), 0)) == SYMBOL_REF
5013 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (addrref, 0), 0)))
5014 {
5015 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
5016 rtx sym = XEXP (XEXP (addrref, 0), 0);
5017 rtx base = cfun->machine->base_reg;
5018 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
5019 UNSPEC_LTREF);
5020
5021 SET_SRC (*x) = plus_constant (addr, off);
5022 return;
5023 }
5024 }
5025
5026 /* Annotate LTREL_BASE as well. */
5027 if (GET_CODE (*x) == UNSPEC
5028 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
5029 {
5030 rtx base = cfun->machine->base_reg;
5031 *x = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XVECEXP (*x, 0, 0), base),
5032 UNSPEC_LTREL_BASE);
5033 return;
5034 }
5035
5036 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5037 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5038 {
5039 if (fmt[i] == 'e')
5040 {
5041 annotate_constant_pool_refs (&XEXP (*x, i));
5042 }
5043 else if (fmt[i] == 'E')
5044 {
5045 for (j = 0; j < XVECLEN (*x, i); j++)
5046 annotate_constant_pool_refs (&XVECEXP (*x, i, j));
5047 }
5048 }
5049}
5050
ab96de7e
AS
5051/* Split all branches that exceed the maximum distance.
5052 Returns true if this created a new literal pool entry. */
5053
5054static int
5055s390_split_branches (void)
5056{
5057 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
8d933e31 5058 int new_literal = 0, ret;
ab96de7e
AS
5059 rtx insn, pat, tmp, target;
5060 rtx *label;
5061
5062 /* We need correct insn addresses. */
5063
5064 shorten_branches (get_insns ());
5065
5066 /* Find all branches that exceed 64KB, and split them. */
5067
5068 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5069 {
5070 if (GET_CODE (insn) != JUMP_INSN)
5071 continue;
5072
5073 pat = PATTERN (insn);
5074 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
5075 pat = XVECEXP (pat, 0, 0);
5076 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
5077 continue;
5078
5079 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
5080 {
5081 label = &SET_SRC (pat);
5082 }
5083 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
5084 {
5085 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
5086 label = &XEXP (SET_SRC (pat), 1);
5087 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
5088 label = &XEXP (SET_SRC (pat), 2);
5089 else
5090 continue;
5091 }
5092 else
5093 continue;
5094
5095 if (get_attr_length (insn) <= 4)
5096 continue;
5097
e2df5c1d
UW
5098 /* We are going to use the return register as scratch register,
5099 make sure it will be saved/restored by the prologue/epilogue. */
5100 cfun_frame_layout.save_return_addr_p = 1;
5101
ab96de7e
AS
5102 if (!flag_pic)
5103 {
5104 new_literal = 1;
5105 tmp = force_const_mem (Pmode, *label);
5106 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
5107 INSN_ADDRESSES_NEW (tmp, -1);
5108 annotate_constant_pool_refs (&PATTERN (tmp));
5109
5110 target = temp_reg;
5111 }
5112 else
5113 {
5114 new_literal = 1;
5115 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, *label),
5116 UNSPEC_LTREL_OFFSET);
5117 target = gen_rtx_CONST (Pmode, target);
5118 target = force_const_mem (Pmode, target);
5119 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, target), insn);
5120 INSN_ADDRESSES_NEW (tmp, -1);
5121 annotate_constant_pool_refs (&PATTERN (tmp));
5122
5123 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XEXP (target, 0),
5124 cfun->machine->base_reg),
5125 UNSPEC_LTREL_BASE);
5126 target = gen_rtx_PLUS (Pmode, temp_reg, target);
5127 }
5128
8d933e31
AS
5129 ret = validate_change (insn, label, target, 0);
5130 gcc_assert (ret);
ab96de7e
AS
5131 }
5132
5133 return new_literal;
5134}
5135
b2ccb744 5136
585539a1
UW
5137/* Find an annotated literal pool symbol referenced in RTX X,
5138 and store it at REF. Will abort if X contains references to
5139 more than one such pool symbol; multiple references to the same
5140 symbol are allowed, however.
b2ccb744 5141
c7453384 5142 The rtx pointed to by REF must be initialized to NULL_RTX
b2ccb744
UW
5143 by the caller before calling this routine. */
5144
5145static void
9c808aad 5146find_constant_pool_ref (rtx x, rtx *ref)
b2ccb744
UW
5147{
5148 int i, j;
5149 const char *fmt;
5150
fd7643fb
UW
5151 /* Ignore LTREL_BASE references. */
5152 if (GET_CODE (x) == UNSPEC
5153 && XINT (x, 1) == UNSPEC_LTREL_BASE)
5154 return;
5af2f3d3
UW
5155 /* Likewise POOL_ENTRY insns. */
5156 if (GET_CODE (x) == UNSPEC_VOLATILE
5157 && XINT (x, 1) == UNSPECV_POOL_ENTRY)
5158 return;
fd7643fb 5159
8d933e31
AS
5160 gcc_assert (GET_CODE (x) != SYMBOL_REF
5161 || !CONSTANT_POOL_ADDRESS_P (x));
585539a1
UW
5162
5163 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_LTREF)
b2ccb744 5164 {
585539a1 5165 rtx sym = XVECEXP (x, 0, 0);
8d933e31
AS
5166 gcc_assert (GET_CODE (sym) == SYMBOL_REF
5167 && CONSTANT_POOL_ADDRESS_P (sym));
585539a1 5168
b2ccb744 5169 if (*ref == NULL_RTX)
585539a1 5170 *ref = sym;
8d933e31
AS
5171 else
5172 gcc_assert (*ref == sym);
585539a1
UW
5173
5174 return;
b2ccb744
UW
5175 }
5176
5177 fmt = GET_RTX_FORMAT (GET_CODE (x));
5178 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5179 {
5180 if (fmt[i] == 'e')
5181 {
5182 find_constant_pool_ref (XEXP (x, i), ref);
5183 }
5184 else if (fmt[i] == 'E')
5185 {
5186 for (j = 0; j < XVECLEN (x, i); j++)
5187 find_constant_pool_ref (XVECEXP (x, i, j), ref);
5188 }
5189 }
5190}
5191
585539a1
UW
5192/* Replace every reference to the annotated literal pool
5193 symbol REF in X by its base plus OFFSET. */
b2ccb744
UW
5194
5195static void
585539a1 5196replace_constant_pool_ref (rtx *x, rtx ref, rtx offset)
b2ccb744
UW
5197{
5198 int i, j;
5199 const char *fmt;
5200
8d933e31 5201 gcc_assert (*x != ref);
b2ccb744 5202
585539a1
UW
5203 if (GET_CODE (*x) == UNSPEC
5204 && XINT (*x, 1) == UNSPEC_LTREF
5205 && XVECEXP (*x, 0, 0) == ref)
b2ccb744 5206 {
585539a1
UW
5207 *x = gen_rtx_PLUS (Pmode, XVECEXP (*x, 0, 1), offset);
5208 return;
b2ccb744
UW
5209 }
5210
585539a1
UW
5211 if (GET_CODE (*x) == PLUS
5212 && GET_CODE (XEXP (*x, 1)) == CONST_INT
5213 && GET_CODE (XEXP (*x, 0)) == UNSPEC
5214 && XINT (XEXP (*x, 0), 1) == UNSPEC_LTREF
5215 && XVECEXP (XEXP (*x, 0), 0, 0) == ref)
b2ccb744 5216 {
585539a1
UW
5217 rtx addr = gen_rtx_PLUS (Pmode, XVECEXP (XEXP (*x, 0), 0, 1), offset);
5218 *x = plus_constant (addr, INTVAL (XEXP (*x, 1)));
5219 return;
b2ccb744
UW
5220 }
5221
5222 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5223 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5224 {
5225 if (fmt[i] == 'e')
5226 {
585539a1 5227 replace_constant_pool_ref (&XEXP (*x, i), ref, offset);
b2ccb744
UW
5228 }
5229 else if (fmt[i] == 'E')
5230 {
5231 for (j = 0; j < XVECLEN (*x, i); j++)
585539a1 5232 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, offset);
b2ccb744
UW
5233 }
5234 }
5235}
5236
c7453384 5237/* Check whether X contains an UNSPEC_LTREL_BASE.
fd7643fb 5238 Return its constant pool symbol if found, NULL_RTX otherwise. */
aee4e0db 5239
fd7643fb 5240static rtx
9c808aad 5241find_ltrel_base (rtx x)
aee4e0db 5242{
aee4e0db
UW
5243 int i, j;
5244 const char *fmt;
5245
fd7643fb
UW
5246 if (GET_CODE (x) == UNSPEC
5247 && XINT (x, 1) == UNSPEC_LTREL_BASE)
5248 return XVECEXP (x, 0, 0);
aee4e0db
UW
5249
5250 fmt = GET_RTX_FORMAT (GET_CODE (x));
5251 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5252 {
5253 if (fmt[i] == 'e')
5254 {
fd7643fb
UW
5255 rtx fnd = find_ltrel_base (XEXP (x, i));
5256 if (fnd)
5257 return fnd;
aee4e0db
UW
5258 }
5259 else if (fmt[i] == 'E')
5260 {
5261 for (j = 0; j < XVECLEN (x, i); j++)
fd7643fb
UW
5262 {
5263 rtx fnd = find_ltrel_base (XVECEXP (x, i, j));
5264 if (fnd)
5265 return fnd;
5266 }
aee4e0db
UW
5267 }
5268 }
5269
fd7643fb 5270 return NULL_RTX;
aee4e0db
UW
5271}
5272
585539a1 5273/* Replace any occurrence of UNSPEC_LTREL_BASE in X with its base. */
aee4e0db
UW
5274
5275static void
585539a1 5276replace_ltrel_base (rtx *x)
aee4e0db 5277{
fd7643fb 5278 int i, j;
aee4e0db
UW
5279 const char *fmt;
5280
fd7643fb
UW
5281 if (GET_CODE (*x) == UNSPEC
5282 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
aee4e0db 5283 {
585539a1 5284 *x = XVECEXP (*x, 0, 1);
fd7643fb 5285 return;
aee4e0db
UW
5286 }
5287
5288 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5289 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5290 {
5291 if (fmt[i] == 'e')
5292 {
585539a1 5293 replace_ltrel_base (&XEXP (*x, i));
aee4e0db
UW
5294 }
5295 else if (fmt[i] == 'E')
5296 {
5297 for (j = 0; j < XVECLEN (*x, i); j++)
585539a1 5298 replace_ltrel_base (&XVECEXP (*x, i, j));
aee4e0db
UW
5299 }
5300 }
5301}
5302
5303
fd7643fb 5304/* We keep a list of constants which we have to add to internal
b2ccb744
UW
5305 constant tables in the middle of large functions. */
5306
4dc19cc0 5307#define NR_C_MODES 11
c7453384 5308enum machine_mode constant_modes[NR_C_MODES] =
b2ccb744 5309{
4dc19cc0
AK
5310 TFmode, TImode, TDmode,
5311 DFmode, DImode, DDmode,
5312 SFmode, SImode, SDmode,
b2ccb744
UW
5313 HImode,
5314 QImode
5315};
5316
b2ccb744
UW
5317struct constant
5318{
5319 struct constant *next;
5320 rtx value;
5321 rtx label;
5322};
5323
5324struct constant_pool
5325{
5326 struct constant_pool *next;
5327 rtx first_insn;
aee4e0db
UW
5328 rtx pool_insn;
5329 bitmap insns;
b2ccb744
UW
5330
5331 struct constant *constants[NR_C_MODES];
9bb86f41 5332 struct constant *execute;
b2ccb744
UW
5333 rtx label;
5334 int size;
5335};
5336
ab96de7e
AS
5337/* Allocate new constant_pool structure. */
5338
5339static struct constant_pool *
5340s390_alloc_pool (void)
5341{
5342 struct constant_pool *pool;
5343 int i;
5344
5345 pool = (struct constant_pool *) xmalloc (sizeof *pool);
5346 pool->next = NULL;
5347 for (i = 0; i < NR_C_MODES; i++)
5348 pool->constants[i] = NULL;
5349
5350 pool->execute = NULL;
5351 pool->label = gen_label_rtx ();
5352 pool->first_insn = NULL_RTX;
5353 pool->pool_insn = NULL_RTX;
5354 pool->insns = BITMAP_ALLOC (NULL);
5355 pool->size = 0;
5356
5357 return pool;
5358}
b2ccb744
UW
5359
5360/* Create new constant pool covering instructions starting at INSN
5361 and chain it to the end of POOL_LIST. */
5362
5363static struct constant_pool *
9c808aad 5364s390_start_pool (struct constant_pool **pool_list, rtx insn)
b2ccb744
UW
5365{
5366 struct constant_pool *pool, **prev;
b2ccb744 5367
5af2f3d3 5368 pool = s390_alloc_pool ();
b2ccb744 5369 pool->first_insn = insn;
aee4e0db 5370
b2ccb744
UW
5371 for (prev = pool_list; *prev; prev = &(*prev)->next)
5372 ;
5373 *prev = pool;
5374
5375 return pool;
5376}
5377
aee4e0db
UW
5378/* End range of instructions covered by POOL at INSN and emit
5379 placeholder insn representing the pool. */
b2ccb744
UW
5380
5381static void
9c808aad 5382s390_end_pool (struct constant_pool *pool, rtx insn)
b2ccb744 5383{
aee4e0db
UW
5384 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
5385
5386 if (!insn)
5387 insn = get_last_insn ();
5388
5389 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
5390 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5391}
5392
5393/* Add INSN to the list of insns covered by POOL. */
5394
5395static void
9c808aad 5396s390_add_pool_insn (struct constant_pool *pool, rtx insn)
aee4e0db
UW
5397{
5398 bitmap_set_bit (pool->insns, INSN_UID (insn));
b2ccb744
UW
5399}
5400
5401/* Return pool out of POOL_LIST that covers INSN. */
5402
5403static struct constant_pool *
9c808aad 5404s390_find_pool (struct constant_pool *pool_list, rtx insn)
b2ccb744 5405{
b2ccb744
UW
5406 struct constant_pool *pool;
5407
b2ccb744 5408 for (pool = pool_list; pool; pool = pool->next)
aee4e0db 5409 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
b2ccb744
UW
5410 break;
5411
5412 return pool;
5413}
5414
aee4e0db 5415/* Add constant VAL of mode MODE to the constant pool POOL. */
b2ccb744 5416
aee4e0db 5417static void
9c808aad 5418s390_add_constant (struct constant_pool *pool, rtx val, enum machine_mode mode)
b2ccb744
UW
5419{
5420 struct constant *c;
b2ccb744
UW
5421 int i;
5422
5423 for (i = 0; i < NR_C_MODES; i++)
5424 if (constant_modes[i] == mode)
5425 break;
8d933e31 5426 gcc_assert (i != NR_C_MODES);
b2ccb744
UW
5427
5428 for (c = pool->constants[i]; c != NULL; c = c->next)
5429 if (rtx_equal_p (val, c->value))
5430 break;
5431
5432 if (c == NULL)
5433 {
5434 c = (struct constant *) xmalloc (sizeof *c);
5435 c->value = val;
5436 c->label = gen_label_rtx ();
5437 c->next = pool->constants[i];
5438 pool->constants[i] = c;
5439 pool->size += GET_MODE_SIZE (mode);
5440 }
aee4e0db 5441}
b2ccb744 5442
aee4e0db
UW
5443/* Find constant VAL of mode MODE in the constant pool POOL.
5444 Return an RTX describing the distance from the start of
5445 the pool to the location of the new constant. */
c7453384 5446
aee4e0db 5447static rtx
9c808aad
AJ
5448s390_find_constant (struct constant_pool *pool, rtx val,
5449 enum machine_mode mode)
aee4e0db
UW
5450{
5451 struct constant *c;
5452 rtx offset;
5453 int i;
c7453384 5454
aee4e0db
UW
5455 for (i = 0; i < NR_C_MODES; i++)
5456 if (constant_modes[i] == mode)
5457 break;
8d933e31 5458 gcc_assert (i != NR_C_MODES);
c7453384 5459
aee4e0db
UW
5460 for (c = pool->constants[i]; c != NULL; c = c->next)
5461 if (rtx_equal_p (val, c->value))
5462 break;
c7453384 5463
8d933e31 5464 gcc_assert (c);
c7453384 5465
aee4e0db
UW
5466 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
5467 gen_rtx_LABEL_REF (Pmode, pool->label));
b2ccb744
UW
5468 offset = gen_rtx_CONST (Pmode, offset);
5469 return offset;
5470}
5471
ab96de7e
AS
5472/* Check whether INSN is an execute. Return the label_ref to its
5473 execute target template if so, NULL_RTX otherwise. */
5474
5475static rtx
5476s390_execute_label (rtx insn)
5477{
5478 if (GET_CODE (insn) == INSN
5479 && GET_CODE (PATTERN (insn)) == PARALLEL
5480 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == UNSPEC
5481 && XINT (XVECEXP (PATTERN (insn), 0, 0), 1) == UNSPEC_EXECUTE)
5482 return XVECEXP (XVECEXP (PATTERN (insn), 0, 0), 0, 2);
5483
5484 return NULL_RTX;
5485}
5486
9bb86f41
UW
5487/* Add execute target for INSN to the constant pool POOL. */
5488
5489static void
5490s390_add_execute (struct constant_pool *pool, rtx insn)
5491{
5492 struct constant *c;
5493
5494 for (c = pool->execute; c != NULL; c = c->next)
5495 if (INSN_UID (insn) == INSN_UID (c->value))
5496 break;
5497
5498 if (c == NULL)
5499 {
9bb86f41
UW
5500 c = (struct constant *) xmalloc (sizeof *c);
5501 c->value = insn;
d24959df 5502 c->label = gen_label_rtx ();
9bb86f41
UW
5503 c->next = pool->execute;
5504 pool->execute = c;
d24959df 5505 pool->size += 6;
9bb86f41
UW
5506 }
5507}
5508
5509/* Find execute target for INSN in the constant pool POOL.
5510 Return an RTX describing the distance from the start of
5511 the pool to the location of the execute target. */
5512
5513static rtx
5514s390_find_execute (struct constant_pool *pool, rtx insn)
5515{
5516 struct constant *c;
5517 rtx offset;
5518
5519 for (c = pool->execute; c != NULL; c = c->next)
5520 if (INSN_UID (insn) == INSN_UID (c->value))
5521 break;
5522
8d933e31 5523 gcc_assert (c);
9bb86f41
UW
5524
5525 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
5526 gen_rtx_LABEL_REF (Pmode, pool->label));
5527 offset = gen_rtx_CONST (Pmode, offset);
5528 return offset;
5529}
5530
ab96de7e 5531/* For an execute INSN, extract the execute target template. */
9bb86f41
UW
5532
5533static rtx
ab96de7e 5534s390_execute_target (rtx insn)
9bb86f41 5535{
ab96de7e
AS
5536 rtx pattern = PATTERN (insn);
5537 gcc_assert (s390_execute_label (insn));
9bb86f41
UW
5538
5539 if (XVECLEN (pattern, 0) == 2)
5540 {
5541 pattern = copy_rtx (XVECEXP (pattern, 0, 1));
5542 }
5543 else
5544 {
5545 rtvec vec = rtvec_alloc (XVECLEN (pattern, 0) - 1);
5546 int i;
5547
5548 for (i = 0; i < XVECLEN (pattern, 0) - 1; i++)
5549 RTVEC_ELT (vec, i) = copy_rtx (XVECEXP (pattern, 0, i + 1));
5550
5551 pattern = gen_rtx_PARALLEL (VOIDmode, vec);
5552 }
5553
5554 return pattern;
5555}
5556
5557/* Indicate that INSN cannot be duplicated. This is the case for
5558 execute insns that carry a unique label. */
5559
5560static bool
5561s390_cannot_copy_insn_p (rtx insn)
5562{
5563 rtx label = s390_execute_label (insn);
5564 return label && label != const0_rtx;
5565}
5566
5af2f3d3
UW
5567/* Dump out the constants in POOL. If REMOTE_LABEL is true,
5568 do not emit the pool base label. */
b2ccb744 5569
9bb86f41 5570static void
5af2f3d3 5571s390_dump_pool (struct constant_pool *pool, bool remote_label)
b2ccb744
UW
5572{
5573 struct constant *c;
9bb86f41 5574 rtx insn = pool->pool_insn;
b2ccb744
UW
5575 int i;
5576
9bb86f41
UW
5577 /* Switch to rodata section. */
5578 if (TARGET_CPU_ZARCH)
5579 {
5580 insn = emit_insn_after (gen_pool_section_start (), insn);
5581 INSN_ADDRESSES_NEW (insn, -1);
5582 }
5583
5584 /* Ensure minimum pool alignment. */
9e8327e3 5585 if (TARGET_CPU_ZARCH)
9bb86f41 5586 insn = emit_insn_after (gen_pool_align (GEN_INT (8)), insn);
b2ccb744 5587 else
9bb86f41 5588 insn = emit_insn_after (gen_pool_align (GEN_INT (4)), insn);
b2ccb744
UW
5589 INSN_ADDRESSES_NEW (insn, -1);
5590
9bb86f41 5591 /* Emit pool base label. */
5af2f3d3
UW
5592 if (!remote_label)
5593 {
5594 insn = emit_label_after (pool->label, insn);
5595 INSN_ADDRESSES_NEW (insn, -1);
5596 }
b2ccb744
UW
5597
5598 /* Dump constants in descending alignment requirement order,
5599 ensuring proper alignment for every constant. */
5600 for (i = 0; i < NR_C_MODES; i++)
5601 for (c = pool->constants[i]; c; c = c->next)
5602 {
fd7643fb 5603 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
aee4e0db
UW
5604 rtx value = c->value;
5605 if (GET_CODE (value) == CONST
5606 && GET_CODE (XEXP (value, 0)) == UNSPEC
fd7643fb 5607 && XINT (XEXP (value, 0), 1) == UNSPEC_LTREL_OFFSET
aee4e0db
UW
5608 && XVECLEN (XEXP (value, 0), 0) == 1)
5609 {
5610 value = gen_rtx_MINUS (Pmode, XVECEXP (XEXP (value, 0), 0, 0),
9c808aad 5611 gen_rtx_LABEL_REF (VOIDmode, pool->label));
aee4e0db
UW
5612 value = gen_rtx_CONST (VOIDmode, value);
5613 }
5614
b2ccb744
UW
5615 insn = emit_label_after (c->label, insn);
5616 INSN_ADDRESSES_NEW (insn, -1);
416cf582 5617
38899e29 5618 value = gen_rtx_UNSPEC_VOLATILE (constant_modes[i],
416cf582
UW
5619 gen_rtvec (1, value),
5620 UNSPECV_POOL_ENTRY);
5621 insn = emit_insn_after (value, insn);
b2ccb744
UW
5622 INSN_ADDRESSES_NEW (insn, -1);
5623 }
5624
9bb86f41
UW
5625 /* Ensure minimum alignment for instructions. */
5626 insn = emit_insn_after (gen_pool_align (GEN_INT (2)), insn);
b2ccb744
UW
5627 INSN_ADDRESSES_NEW (insn, -1);
5628
9bb86f41
UW
5629 /* Output in-pool execute template insns. */
5630 for (c = pool->execute; c; c = c->next)
5631 {
9bb86f41
UW
5632 insn = emit_label_after (c->label, insn);
5633 INSN_ADDRESSES_NEW (insn, -1);
5634
5635 insn = emit_insn_after (s390_execute_target (c->value), insn);
5636 INSN_ADDRESSES_NEW (insn, -1);
5637 }
5638
5639 /* Switch back to previous section. */
5640 if (TARGET_CPU_ZARCH)
5641 {
5642 insn = emit_insn_after (gen_pool_section_end (), insn);
5643 INSN_ADDRESSES_NEW (insn, -1);
5644 }
5645
b2ccb744
UW
5646 insn = emit_barrier_after (insn);
5647 INSN_ADDRESSES_NEW (insn, -1);
5648
aee4e0db
UW
5649 /* Remove placeholder insn. */
5650 remove_insn (pool->pool_insn);
9bb86f41
UW
5651}
5652
b2ccb744
UW
5653/* Free all memory used by POOL. */
5654
5655static void
9c808aad 5656s390_free_pool (struct constant_pool *pool)
b2ccb744 5657{
9bb86f41 5658 struct constant *c, *next;
b2ccb744
UW
5659 int i;
5660
5661 for (i = 0; i < NR_C_MODES; i++)
9bb86f41
UW
5662 for (c = pool->constants[i]; c; c = next)
5663 {
5664 next = c->next;
5665 free (c);
5666 }
5667
5668 for (c = pool->execute; c; c = next)
b2ccb744 5669 {
9bb86f41
UW
5670 next = c->next;
5671 free (c);
b2ccb744
UW
5672 }
5673
7b210806 5674 BITMAP_FREE (pool->insns);
b2ccb744 5675 free (pool);
c7453384 5676}
b2ccb744 5677
b2ccb744 5678
5af2f3d3
UW
5679/* Collect main literal pool. Return NULL on overflow. */
5680
5681static struct constant_pool *
5682s390_mainpool_start (void)
5683{
5684 struct constant_pool *pool;
5685 rtx insn;
5686
5687 pool = s390_alloc_pool ();
5688
5689 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5690 {
5691 if (GET_CODE (insn) == INSN
585539a1
UW
5692 && GET_CODE (PATTERN (insn)) == SET
5693 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC_VOLATILE
5694 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPECV_MAIN_POOL)
5af2f3d3 5695 {
8d933e31 5696 gcc_assert (!pool->pool_insn);
5af2f3d3
UW
5697 pool->pool_insn = insn;
5698 }
5699
d24959df 5700 if (!TARGET_CPU_ZARCH && s390_execute_label (insn))
9bb86f41
UW
5701 {
5702 s390_add_execute (pool, insn);
5703 }
5704 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5af2f3d3
UW
5705 {
5706 rtx pool_ref = NULL_RTX;
5707 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5708 if (pool_ref)
5709 {
5710 rtx constant = get_pool_constant (pool_ref);
5711 enum machine_mode mode = get_pool_mode (pool_ref);
5712 s390_add_constant (pool, constant, mode);
5713 }
5714 }
5715 }
5716
8d933e31 5717 gcc_assert (pool->pool_insn || pool->size == 0);
5af2f3d3
UW
5718
5719 if (pool->size >= 4096)
5720 {
d76e8439
UW
5721 /* We're going to chunkify the pool, so remove the main
5722 pool placeholder insn. */
5723 remove_insn (pool->pool_insn);
5724
5af2f3d3
UW
5725 s390_free_pool (pool);
5726 pool = NULL;
5727 }
5728
5729 return pool;
5730}
5731
5732/* POOL holds the main literal pool as collected by s390_mainpool_start.
5733 Modify the current function to output the pool constants as well as
585539a1 5734 the pool register setup instruction. */
5af2f3d3
UW
5735
5736static void
585539a1 5737s390_mainpool_finish (struct constant_pool *pool)
5af2f3d3 5738{
91086990 5739 rtx base_reg = cfun->machine->base_reg;
5af2f3d3
UW
5740 rtx insn;
5741
5742 /* If the pool is empty, we're done. */
5743 if (pool->size == 0)
5744 {
91086990
UW
5745 /* We don't actually need a base register after all. */
5746 cfun->machine->base_reg = NULL_RTX;
5747
5748 if (pool->pool_insn)
5749 remove_insn (pool->pool_insn);
5af2f3d3
UW
5750 s390_free_pool (pool);
5751 return;
5752 }
5753
5754 /* We need correct insn addresses. */
5755 shorten_branches (get_insns ());
5756
9e8327e3 5757 /* On zSeries, we use a LARL to load the pool register. The pool is
5af2f3d3 5758 located in the .rodata section, so we emit it after the function. */
9e8327e3 5759 if (TARGET_CPU_ZARCH)
5af2f3d3
UW
5760 {
5761 insn = gen_main_base_64 (base_reg, pool->label);
5762 insn = emit_insn_after (insn, pool->pool_insn);
5763 INSN_ADDRESSES_NEW (insn, -1);
5764 remove_insn (pool->pool_insn);
38899e29
EC
5765
5766 insn = get_last_insn ();
5af2f3d3
UW
5767 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5768 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5769
5770 s390_dump_pool (pool, 0);
5771 }
5772
9e8327e3 5773 /* On S/390, if the total size of the function's code plus literal pool
5af2f3d3
UW
5774 does not exceed 4096 bytes, we use BASR to set up a function base
5775 pointer, and emit the literal pool at the end of the function. */
5776 else if (INSN_ADDRESSES (INSN_UID (get_last_insn ()))
5777 + pool->size + 8 /* alignment slop */ < 4096)
5778 {
5779 insn = gen_main_base_31_small (base_reg, pool->label);
5780 insn = emit_insn_after (insn, pool->pool_insn);
5781 INSN_ADDRESSES_NEW (insn, -1);
5782 remove_insn (pool->pool_insn);
5783
5784 insn = emit_label_after (pool->label, insn);
5785 INSN_ADDRESSES_NEW (insn, -1);
5786
5787 insn = get_last_insn ();
5788 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5789 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5790
5791 s390_dump_pool (pool, 1);
5792 }
5793
5794 /* Otherwise, we emit an inline literal pool and use BASR to branch
5795 over it, setting up the pool register at the same time. */
5796 else
5797 {
5798 rtx pool_end = gen_label_rtx ();
5799
5800 insn = gen_main_base_31_large (base_reg, pool->label, pool_end);
5801 insn = emit_insn_after (insn, pool->pool_insn);
5802 INSN_ADDRESSES_NEW (insn, -1);
5803 remove_insn (pool->pool_insn);
5804
5805 insn = emit_label_after (pool->label, insn);
5806 INSN_ADDRESSES_NEW (insn, -1);
5807
5808 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5809 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5810
5811 insn = emit_label_after (pool_end, pool->pool_insn);
5812 INSN_ADDRESSES_NEW (insn, -1);
5813
5814 s390_dump_pool (pool, 1);
5815 }
5816
5817
5818 /* Replace all literal pool references. */
5819
5820 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5821 {
5822 if (INSN_P (insn))
585539a1 5823 replace_ltrel_base (&PATTERN (insn));
5af2f3d3
UW
5824
5825 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5826 {
5827 rtx addr, pool_ref = NULL_RTX;
5828 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5829 if (pool_ref)
5830 {
9bb86f41
UW
5831 if (s390_execute_label (insn))
5832 addr = s390_find_execute (pool, insn);
5833 else
5834 addr = s390_find_constant (pool, get_pool_constant (pool_ref),
5835 get_pool_mode (pool_ref));
5836
5af2f3d3
UW
5837 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
5838 INSN_CODE (insn) = -1;
5839 }
5840 }
5841 }
5842
5843
5844 /* Free the pool. */
5845 s390_free_pool (pool);
5846}
5847
5848/* POOL holds the main literal pool as collected by s390_mainpool_start.
5849 We have decided we cannot use this pool, so revert all changes
5850 to the current function that were done by s390_mainpool_start. */
5851static void
5852s390_mainpool_cancel (struct constant_pool *pool)
5853{
5854 /* We didn't actually change the instruction stream, so simply
5855 free the pool memory. */
5856 s390_free_pool (pool);
5857}
5858
5859
585539a1 5860/* Chunkify the literal pool. */
9db1d521 5861
b2ccb744
UW
5862#define S390_POOL_CHUNK_MIN 0xc00
5863#define S390_POOL_CHUNK_MAX 0xe00
5864
c7453384 5865static struct constant_pool *
585539a1 5866s390_chunkify_start (void)
9db1d521 5867{
b2ccb744
UW
5868 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
5869 int extra_size = 0;
5870 bitmap far_labels;
fd7643fb 5871 rtx pending_ltrel = NULL_RTX;
13e58269 5872 rtx insn;
9db1d521 5873
9c808aad 5874 rtx (*gen_reload_base) (rtx, rtx) =
9e8327e3 5875 TARGET_CPU_ZARCH? gen_reload_base_64 : gen_reload_base_31;
aee4e0db
UW
5876
5877
c3cc6b78
UW
5878 /* We need correct insn addresses. */
5879
5880 shorten_branches (get_insns ());
5881
fd7643fb 5882 /* Scan all insns and move literals to pool chunks. */
13e58269 5883
13e58269 5884 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9db1d521 5885 {
fd7643fb
UW
5886 /* Check for pending LTREL_BASE. */
5887 if (INSN_P (insn))
5888 {
5889 rtx ltrel_base = find_ltrel_base (PATTERN (insn));
5890 if (ltrel_base)
5891 {
8d933e31
AS
5892 gcc_assert (ltrel_base == pending_ltrel);
5893 pending_ltrel = NULL_RTX;
fd7643fb
UW
5894 }
5895 }
5896
d24959df 5897 if (!TARGET_CPU_ZARCH && s390_execute_label (insn))
9bb86f41
UW
5898 {
5899 if (!curr_pool)
5900 curr_pool = s390_start_pool (&pool_list, insn);
5901
5902 s390_add_execute (curr_pool, insn);
5903 s390_add_pool_insn (curr_pool, insn);
5904 }
5905 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
b2ccb744 5906 {
aee4e0db 5907 rtx pool_ref = NULL_RTX;
b2ccb744
UW
5908 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5909 if (pool_ref)
5910 {
fd7643fb
UW
5911 rtx constant = get_pool_constant (pool_ref);
5912 enum machine_mode mode = get_pool_mode (pool_ref);
5913
b2ccb744
UW
5914 if (!curr_pool)
5915 curr_pool = s390_start_pool (&pool_list, insn);
5916
fd7643fb 5917 s390_add_constant (curr_pool, constant, mode);
aee4e0db 5918 s390_add_pool_insn (curr_pool, insn);
aee4e0db 5919
fd7643fb
UW
5920 /* Don't split the pool chunk between a LTREL_OFFSET load
5921 and the corresponding LTREL_BASE. */
5922 if (GET_CODE (constant) == CONST
5923 && GET_CODE (XEXP (constant, 0)) == UNSPEC
5924 && XINT (XEXP (constant, 0), 1) == UNSPEC_LTREL_OFFSET)
5925 {
8d933e31 5926 gcc_assert (!pending_ltrel);
fd7643fb
UW
5927 pending_ltrel = pool_ref;
5928 }
b2ccb744
UW
5929 }
5930 }
5931
aee4e0db 5932 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
fd7643fb
UW
5933 {
5934 if (curr_pool)
5935 s390_add_pool_insn (curr_pool, insn);
5936 /* An LTREL_BASE must follow within the same basic block. */
8d933e31 5937 gcc_assert (!pending_ltrel);
fd7643fb 5938 }
aee4e0db 5939
c7453384 5940 if (!curr_pool
b2ccb744
UW
5941 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
5942 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
9db1d521 5943 continue;
13e58269 5944
9e8327e3 5945 if (TARGET_CPU_ZARCH)
9db1d521 5946 {
b2ccb744
UW
5947 if (curr_pool->size < S390_POOL_CHUNK_MAX)
5948 continue;
13e58269 5949
aee4e0db 5950 s390_end_pool (curr_pool, NULL_RTX);
b2ccb744
UW
5951 curr_pool = NULL;
5952 }
5953 else
9db1d521 5954 {
b2ccb744 5955 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
9c808aad 5956 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
b2ccb744
UW
5957 + extra_size;
5958
5959 /* We will later have to insert base register reload insns.
5960 Those will have an effect on code size, which we need to
5961 consider here. This calculation makes rather pessimistic
5962 worst-case assumptions. */
aee4e0db 5963 if (GET_CODE (insn) == CODE_LABEL)
b2ccb744 5964 extra_size += 6;
b2ccb744
UW
5965
5966 if (chunk_size < S390_POOL_CHUNK_MIN
5967 && curr_pool->size < S390_POOL_CHUNK_MIN)
5968 continue;
5969
5970 /* Pool chunks can only be inserted after BARRIERs ... */
5971 if (GET_CODE (insn) == BARRIER)
5972 {
5973 s390_end_pool (curr_pool, insn);
5974 curr_pool = NULL;
5975 extra_size = 0;
5976 }
5977
5978 /* ... so if we don't find one in time, create one. */
5979 else if ((chunk_size > S390_POOL_CHUNK_MAX
aee4e0db 5980 || curr_pool->size > S390_POOL_CHUNK_MAX))
b2ccb744 5981 {
b2ccb744
UW
5982 rtx label, jump, barrier;
5983
aee4e0db
UW
5984 /* We can insert the barrier only after a 'real' insn. */
5985 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
5986 continue;
5987 if (get_attr_length (insn) == 0)
5988 continue;
5989
c7453384 5990 /* Don't separate LTREL_BASE from the corresponding
fd7643fb
UW
5991 LTREL_OFFSET load. */
5992 if (pending_ltrel)
aee4e0db
UW
5993 continue;
5994
9c808aad 5995 label = gen_label_rtx ();
b2ccb744
UW
5996 jump = emit_jump_insn_after (gen_jump (label), insn);
5997 barrier = emit_barrier_after (jump);
5998 insn = emit_label_after (label, barrier);
5999 JUMP_LABEL (jump) = label;
6000 LABEL_NUSES (label) = 1;
6001
aee4e0db
UW
6002 INSN_ADDRESSES_NEW (jump, -1);
6003 INSN_ADDRESSES_NEW (barrier, -1);
b2ccb744
UW
6004 INSN_ADDRESSES_NEW (insn, -1);
6005
6006 s390_end_pool (curr_pool, barrier);
6007 curr_pool = NULL;
6008 extra_size = 0;
6009 }
13e58269 6010 }
9db1d521 6011 }
ce50cae8 6012
aee4e0db
UW
6013 if (curr_pool)
6014 s390_end_pool (curr_pool, NULL_RTX);
8d933e31 6015 gcc_assert (!pending_ltrel);
b2ccb744 6016
c7453384 6017 /* Find all labels that are branched into
13e58269 6018 from an insn belonging to a different chunk. */
ce50cae8 6019
7b210806 6020 far_labels = BITMAP_ALLOC (NULL);
6bc627b3 6021
13e58269 6022 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9db1d521 6023 {
b2ccb744
UW
6024 /* Labels marked with LABEL_PRESERVE_P can be target
6025 of non-local jumps, so we have to mark them.
6026 The same holds for named labels.
6027
6028 Don't do that, however, if it is the label before
6029 a jump table. */
6030
c7453384 6031 if (GET_CODE (insn) == CODE_LABEL
b2ccb744
UW
6032 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
6033 {
6034 rtx vec_insn = next_real_insn (insn);
c7453384 6035 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
b2ccb744
UW
6036 PATTERN (vec_insn) : NULL_RTX;
6037 if (!vec_pat
6038 || !(GET_CODE (vec_pat) == ADDR_VEC
6039 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
6040 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
6041 }
6042
6043 /* If we have a direct jump (conditional or unconditional)
6044 or a casesi jump, check all potential targets. */
c7453384 6045 else if (GET_CODE (insn) == JUMP_INSN)
13e58269
UW
6046 {
6047 rtx pat = PATTERN (insn);
0a3bdf9d
UW
6048 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
6049 pat = XVECEXP (pat, 0, 0);
6050
c7453384 6051 if (GET_CODE (pat) == SET)
13e58269 6052 {
aee4e0db 6053 rtx label = JUMP_LABEL (insn);
13e58269
UW
6054 if (label)
6055 {
c7453384 6056 if (s390_find_pool (pool_list, label)
b2ccb744
UW
6057 != s390_find_pool (pool_list, insn))
6058 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
13e58269 6059 }
c7453384 6060 }
b2ccb744
UW
6061 else if (GET_CODE (pat) == PARALLEL
6062 && XVECLEN (pat, 0) == 2
6063 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
6064 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
6065 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
6066 {
6067 /* Find the jump table used by this casesi jump. */
6068 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
6069 rtx vec_insn = next_real_insn (vec_label);
c7453384 6070 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
b2ccb744
UW
6071 PATTERN (vec_insn) : NULL_RTX;
6072 if (vec_pat
6073 && (GET_CODE (vec_pat) == ADDR_VEC
6074 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
6075 {
6076 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
13e58269 6077
b2ccb744
UW
6078 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
6079 {
6080 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
13e58269 6081
c7453384 6082 if (s390_find_pool (pool_list, label)
b2ccb744
UW
6083 != s390_find_pool (pool_list, insn))
6084 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
6085 }
6086 }
6087 }
13e58269 6088 }
9db1d521 6089 }
ce50cae8 6090
b2ccb744
UW
6091 /* Insert base register reload insns before every pool. */
6092
6093 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
aee4e0db 6094 {
585539a1
UW
6095 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
6096 curr_pool->label);
aee4e0db
UW
6097 rtx insn = curr_pool->first_insn;
6098 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
6099 }
b2ccb744
UW
6100
6101 /* Insert base register reload insns at every far label. */
13e58269 6102
13e58269 6103 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
c7453384 6104 if (GET_CODE (insn) == CODE_LABEL
b2ccb744
UW
6105 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
6106 {
6107 struct constant_pool *pool = s390_find_pool (pool_list, insn);
6108 if (pool)
6109 {
585539a1
UW
6110 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
6111 pool->label);
aee4e0db 6112 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
b2ccb744
UW
6113 }
6114 }
6115
aee4e0db 6116
7b210806 6117 BITMAP_FREE (far_labels);
13e58269 6118
13e58269
UW
6119
6120 /* Recompute insn addresses. */
6121
6122 init_insn_lengths ();
6123 shorten_branches (get_insns ());
9db1d521 6124
aee4e0db
UW
6125 return pool_list;
6126}
9db1d521 6127
aee4e0db 6128/* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
c7453384 6129 After we have decided to use this list, finish implementing
585539a1 6130 all changes to the current function as required. */
c7453384 6131
aee4e0db 6132static void
585539a1 6133s390_chunkify_finish (struct constant_pool *pool_list)
aee4e0db 6134{
aee4e0db
UW
6135 struct constant_pool *curr_pool = NULL;
6136 rtx insn;
c7453384
EC
6137
6138
aee4e0db
UW
6139 /* Replace all literal pool references. */
6140
c7453384 6141 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
aee4e0db 6142 {
fd7643fb 6143 if (INSN_P (insn))
585539a1 6144 replace_ltrel_base (&PATTERN (insn));
fd7643fb 6145
aee4e0db
UW
6146 curr_pool = s390_find_pool (pool_list, insn);
6147 if (!curr_pool)
6148 continue;
6149
6150 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
6151 {
6152 rtx addr, pool_ref = NULL_RTX;
6153 find_constant_pool_ref (PATTERN (insn), &pool_ref);
6154 if (pool_ref)
6155 {
9bb86f41
UW
6156 if (s390_execute_label (insn))
6157 addr = s390_find_execute (curr_pool, insn);
6158 else
6159 addr = s390_find_constant (curr_pool,
6160 get_pool_constant (pool_ref),
6161 get_pool_mode (pool_ref));
6162
aee4e0db
UW
6163 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
6164 INSN_CODE (insn) = -1;
6165 }
aee4e0db
UW
6166 }
6167 }
6168
6169 /* Dump out all literal pools. */
c7453384 6170
aee4e0db 6171 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
5af2f3d3 6172 s390_dump_pool (curr_pool, 0);
c7453384 6173
aee4e0db
UW
6174 /* Free pool list. */
6175
6176 while (pool_list)
6177 {
6178 struct constant_pool *next = pool_list->next;
6179 s390_free_pool (pool_list);
6180 pool_list = next;
6181 }
6182}
6183
6184/* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
6185 We have decided we cannot use this list, so revert all changes
6186 to the current function that were done by s390_chunkify_start. */
c7453384 6187
aee4e0db 6188static void
9c808aad 6189s390_chunkify_cancel (struct constant_pool *pool_list)
aee4e0db
UW
6190{
6191 struct constant_pool *curr_pool = NULL;
6192 rtx insn;
6193
6194 /* Remove all pool placeholder insns. */
6195
6196 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
6197 {
6198 /* Did we insert an extra barrier? Remove it. */
6199 rtx barrier = PREV_INSN (curr_pool->pool_insn);
6200 rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
6201 rtx label = NEXT_INSN (curr_pool->pool_insn);
6202
6203 if (jump && GET_CODE (jump) == JUMP_INSN
6204 && barrier && GET_CODE (barrier) == BARRIER
6205 && label && GET_CODE (label) == CODE_LABEL
6206 && GET_CODE (PATTERN (jump)) == SET
6207 && SET_DEST (PATTERN (jump)) == pc_rtx
6208 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
6209 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
6210 {
6211 remove_insn (jump);
6212 remove_insn (barrier);
6213 remove_insn (label);
b2ccb744 6214 }
9db1d521 6215
aee4e0db
UW
6216 remove_insn (curr_pool->pool_insn);
6217 }
6218
fd7643fb 6219 /* Remove all base register reload insns. */
aee4e0db
UW
6220
6221 for (insn = get_insns (); insn; )
6222 {
6223 rtx next_insn = NEXT_INSN (insn);
6224
6225 if (GET_CODE (insn) == INSN
6226 && GET_CODE (PATTERN (insn)) == SET
6227 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
fd7643fb 6228 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
aee4e0db 6229 remove_insn (insn);
9db1d521 6230
aee4e0db
UW
6231 insn = next_insn;
6232 }
6233
6234 /* Free pool list. */
9db1d521 6235
b2ccb744 6236 while (pool_list)
9db1d521 6237 {
b2ccb744
UW
6238 struct constant_pool *next = pool_list->next;
6239 s390_free_pool (pool_list);
6240 pool_list = next;
9db1d521 6241 }
9db1d521
HP
6242}
6243
b2ccb744 6244
faeb9bb6 6245/* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
416cf582
UW
6246
6247void
faeb9bb6 6248s390_output_pool_entry (rtx exp, enum machine_mode mode, unsigned int align)
416cf582
UW
6249{
6250 REAL_VALUE_TYPE r;
6251
6252 switch (GET_MODE_CLASS (mode))
6253 {
6254 case MODE_FLOAT:
4dc19cc0 6255 case MODE_DECIMAL_FLOAT:
8d933e31 6256 gcc_assert (GET_CODE (exp) == CONST_DOUBLE);
416cf582
UW
6257
6258 REAL_VALUE_FROM_CONST_DOUBLE (r, exp);
6259 assemble_real (r, mode, align);
6260 break;
6261
6262 case MODE_INT:
faeb9bb6 6263 assemble_integer (exp, GET_MODE_SIZE (mode), align, 1);
416cf582
UW
6264 break;
6265
6266 default:
8d933e31 6267 gcc_unreachable ();
416cf582
UW
6268 }
6269}
6270
6271
ab96de7e
AS
6272/* Return an RTL expression representing the value of the return address
6273 for the frame COUNT steps up from the current frame. FRAME is the
6274 frame pointer of that frame. */
b2ccb744 6275
ab96de7e
AS
6276rtx
6277s390_return_addr_rtx (int count, rtx frame ATTRIBUTE_UNUSED)
b2ccb744 6278{
ab96de7e
AS
6279 int offset;
6280 rtx addr;
aee4e0db 6281
ab96de7e 6282 /* Without backchain, we fail for all but the current frame. */
c3cc6b78 6283
ab96de7e
AS
6284 if (!TARGET_BACKCHAIN && count > 0)
6285 return NULL_RTX;
c3cc6b78 6286
ab96de7e
AS
6287 /* For the current frame, we need to make sure the initial
6288 value of RETURN_REGNUM is actually saved. */
c3cc6b78 6289
ab96de7e 6290 if (count == 0)
c3cc6b78 6291 {
7bcebb25
AK
6292 /* On non-z architectures branch splitting could overwrite r14. */
6293 if (TARGET_CPU_ZARCH)
6294 return get_hard_reg_initial_val (Pmode, RETURN_REGNUM);
6295 else
6296 {
6297 cfun_frame_layout.save_return_addr_p = true;
6298 return gen_rtx_MEM (Pmode, return_address_pointer_rtx);
6299 }
ab96de7e 6300 }
c3cc6b78 6301
ab96de7e
AS
6302 if (TARGET_PACKED_STACK)
6303 offset = -2 * UNITS_PER_WORD;
6304 else
6305 offset = RETURN_REGNUM * UNITS_PER_WORD;
c3cc6b78 6306
ab96de7e
AS
6307 addr = plus_constant (frame, offset);
6308 addr = memory_address (Pmode, addr);
6309 return gen_rtx_MEM (Pmode, addr);
6310}
c3cc6b78 6311
ab96de7e
AS
6312/* Return an RTL expression representing the back chain stored in
6313 the current stack frame. */
545d16ff 6314
ab96de7e
AS
6315rtx
6316s390_back_chain_rtx (void)
6317{
6318 rtx chain;
545d16ff 6319
ab96de7e 6320 gcc_assert (TARGET_BACKCHAIN);
545d16ff 6321
ab96de7e
AS
6322 if (TARGET_PACKED_STACK)
6323 chain = plus_constant (stack_pointer_rtx,
6324 STACK_POINTER_OFFSET - UNITS_PER_WORD);
6325 else
6326 chain = stack_pointer_rtx;
545d16ff 6327
ab96de7e
AS
6328 chain = gen_rtx_MEM (Pmode, chain);
6329 return chain;
6330}
c3cc6b78 6331
ab96de7e
AS
6332/* Find first call clobbered register unused in a function.
6333 This could be used as base register in a leaf function
6334 or for holding the return address before epilogue. */
c3cc6b78 6335
ab96de7e
AS
6336static int
6337find_unused_clobbered_reg (void)
6338{
6339 int i;
6340 for (i = 0; i < 6; i++)
6fb5fa3c 6341 if (!df_regs_ever_live_p (i))
ab96de7e
AS
6342 return i;
6343 return 0;
6344}
c3cc6b78 6345
7bcebb25
AK
6346
6347/* Helper function for s390_regs_ever_clobbered. Sets the fields in DATA for all
6348 clobbered hard regs in SETREG. */
6349
6350static void
7bc980e1 6351s390_reg_clobbered_rtx (rtx setreg, const_rtx set_insn ATTRIBUTE_UNUSED, void *data)
7bcebb25
AK
6352{
6353 int *regs_ever_clobbered = (int *)data;
6354 unsigned int i, regno;
6355 enum machine_mode mode = GET_MODE (setreg);
6356
6357 if (GET_CODE (setreg) == SUBREG)
6358 {
6359 rtx inner = SUBREG_REG (setreg);
6360 if (!GENERAL_REG_P (inner))
6361 return;
6362 regno = subreg_regno (setreg);
6363 }
6364 else if (GENERAL_REG_P (setreg))
6365 regno = REGNO (setreg);
6366 else
6367 return;
6368
6369 for (i = regno;
6370 i < regno + HARD_REGNO_NREGS (regno, mode);
6371 i++)
6372 regs_ever_clobbered[i] = 1;
6373}
6374
6375/* Walks through all basic blocks of the current function looking
6376 for clobbered hard regs using s390_reg_clobbered_rtx. The fields
6377 of the passed integer array REGS_EVER_CLOBBERED are set to one for
6378 each of those regs. */
6379
6380static void
6381s390_regs_ever_clobbered (int *regs_ever_clobbered)
6382{
6383 basic_block cur_bb;
6384 rtx cur_insn;
6385 unsigned int i;
6386
6387 memset (regs_ever_clobbered, 0, 16 * sizeof (int));
6388
6389 /* For non-leaf functions we have to consider all call clobbered regs to be
6390 clobbered. */
6391 if (!current_function_is_leaf)
6392 {
6393 for (i = 0; i < 16; i++)
6394 regs_ever_clobbered[i] = call_really_used_regs[i];
6395 }
6396
6397 /* Make the "magic" eh_return registers live if necessary. For regs_ever_live
6398 this work is done by liveness analysis (mark_regs_live_at_end).
6399 Special care is needed for functions containing landing pads. Landing pads
6400 may use the eh registers, but the code which sets these registers is not
6401 contained in that function. Hence s390_regs_ever_clobbered is not able to
6402 deal with this automatically. */
6403 if (current_function_calls_eh_return || cfun->machine->has_landing_pad_p)
6404 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM ; i++)
297a777d
AK
6405 if (current_function_calls_eh_return
6406 || (cfun->machine->has_landing_pad_p
6fb5fa3c 6407 && df_regs_ever_live_p (EH_RETURN_DATA_REGNO (i))))
297a777d 6408 regs_ever_clobbered[EH_RETURN_DATA_REGNO (i)] = 1;
7bcebb25
AK
6409
6410 /* For nonlocal gotos all call-saved registers have to be saved.
6411 This flag is also set for the unwinding code in libgcc.
6412 See expand_builtin_unwind_init. For regs_ever_live this is done by
6413 reload. */
6414 if (current_function_has_nonlocal_label)
6415 for (i = 0; i < 16; i++)
6416 if (!call_really_used_regs[i])
6417 regs_ever_clobbered[i] = 1;
6418
6419 FOR_EACH_BB (cur_bb)
6420 {
6421 FOR_BB_INSNS (cur_bb, cur_insn)
6422 {
6423 if (INSN_P (cur_insn))
6424 note_stores (PATTERN (cur_insn),
6425 s390_reg_clobbered_rtx,
6426 regs_ever_clobbered);
6427 }
6428 }
6429}
6430
ab96de7e
AS
6431/* Determine the frame area which actually has to be accessed
6432 in the function epilogue. The values are stored at the
6433 given pointers AREA_BOTTOM (address of the lowest used stack
6434 address) and AREA_TOP (address of the first item which does
6435 not belong to the stack frame). */
545d16ff 6436
ab96de7e
AS
6437static void
6438s390_frame_area (int *area_bottom, int *area_top)
6439{
6440 int b, t;
6441 int i;
545d16ff 6442
ab96de7e
AS
6443 b = INT_MAX;
6444 t = INT_MIN;
adf39f8f
AK
6445
6446 if (cfun_frame_layout.first_restore_gpr != -1)
6447 {
6448 b = (cfun_frame_layout.gprs_offset
6449 + cfun_frame_layout.first_restore_gpr * UNITS_PER_WORD);
6450 t = b + (cfun_frame_layout.last_restore_gpr
6451 - cfun_frame_layout.first_restore_gpr + 1) * UNITS_PER_WORD;
6452 }
6453
6454 if (TARGET_64BIT && cfun_save_high_fprs_p)
6455 {
6456 b = MIN (b, cfun_frame_layout.f8_offset);
6457 t = MAX (t, (cfun_frame_layout.f8_offset
6458 + cfun_frame_layout.high_fprs * 8));
6459 }
6460
6461 if (!TARGET_64BIT)
6462 for (i = 2; i < 4; i++)
6463 if (cfun_fpr_bit_p (i))
6464 {
6465 b = MIN (b, cfun_frame_layout.f4_offset + (i - 2) * 8);
6466 t = MAX (t, cfun_frame_layout.f4_offset + (i - 1) * 8);
6467 }
6468
6469 *area_bottom = b;
6470 *area_top = t;
6471}
6472
91086990 6473/* Fill cfun->machine with info about register usage of current function.
7bcebb25 6474 Return in CLOBBERED_REGS which GPRs are currently considered set. */
4023fb28
UW
6475
6476static void
7bcebb25 6477s390_register_info (int clobbered_regs[])
4023fb28
UW
6478{
6479 int i, j;
4023fb28 6480
adf39f8f
AK
6481 /* fprs 8 - 15 are call saved for 64 Bit ABI. */
6482 cfun_frame_layout.fpr_bitmap = 0;
6483 cfun_frame_layout.high_fprs = 0;
c3cc6b78 6484 if (TARGET_64BIT)
c7453384 6485 for (i = 24; i < 32; i++)
6fb5fa3c 6486 if (df_regs_ever_live_p (i) && !global_regs[i])
c3cc6b78 6487 {
adf39f8f
AK
6488 cfun_set_fpr_bit (i - 16);
6489 cfun_frame_layout.high_fprs++;
c3cc6b78 6490 }
4023fb28 6491
b767fc11
UW
6492 /* Find first and last gpr to be saved. We trust regs_ever_live
6493 data, except that we don't save and restore global registers.
545d16ff 6494
b767fc11
UW
6495 Also, all registers with special meaning to the compiler need
6496 to be handled extra. */
545d16ff 6497
7bcebb25
AK
6498 s390_regs_ever_clobbered (clobbered_regs);
6499
b767fc11 6500 for (i = 0; i < 16; i++)
e2df5c1d 6501 clobbered_regs[i] = clobbered_regs[i] && !global_regs[i] && !fixed_regs[i];
7bcebb25
AK
6502
6503 if (frame_pointer_needed)
6504 clobbered_regs[HARD_FRAME_POINTER_REGNUM] = 1;
c3cc6b78 6505
b767fc11 6506 if (flag_pic)
7bcebb25 6507 clobbered_regs[PIC_OFFSET_TABLE_REGNUM]
6fb5fa3c 6508 |= df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM);
91086990 6509
7bcebb25 6510 clobbered_regs[BASE_REGNUM]
e2df5c1d
UW
6511 |= (cfun->machine->base_reg
6512 && REGNO (cfun->machine->base_reg) == BASE_REGNUM);
91086990 6513
7bcebb25 6514 clobbered_regs[RETURN_REGNUM]
e2df5c1d 6515 |= (!current_function_is_leaf
dc4477f5 6516 || TARGET_TPF_PROFILING
e2df5c1d
UW
6517 || cfun->machine->split_branches_pending_p
6518 || cfun_frame_layout.save_return_addr_p
6519 || current_function_calls_eh_return
6520 || current_function_stdarg);
91086990 6521
7bcebb25 6522 clobbered_regs[STACK_POINTER_REGNUM]
e2df5c1d
UW
6523 |= (!current_function_is_leaf
6524 || TARGET_TPF_PROFILING
6525 || cfun_save_high_fprs_p
6526 || get_frame_size () > 0
6527 || current_function_calls_alloca
6528 || current_function_stdarg);
7bcebb25 6529
b767fc11 6530 for (i = 6; i < 16; i++)
6fb5fa3c 6531 if (df_regs_ever_live_p (i) || clobbered_regs[i])
b767fc11 6532 break;
4023fb28 6533 for (j = 15; j > i; j--)
6fb5fa3c 6534 if (df_regs_ever_live_p (j) || clobbered_regs[j])
b767fc11 6535 break;
c3cc6b78 6536
b767fc11
UW
6537 if (i == 16)
6538 {
6539 /* Nothing to save/restore. */
fb3712f6
AK
6540 cfun_frame_layout.first_save_gpr_slot = -1;
6541 cfun_frame_layout.last_save_gpr_slot = -1;
adf39f8f
AK
6542 cfun_frame_layout.first_save_gpr = -1;
6543 cfun_frame_layout.first_restore_gpr = -1;
6544 cfun_frame_layout.last_save_gpr = -1;
6545 cfun_frame_layout.last_restore_gpr = -1;
b767fc11
UW
6546 }
6547 else
6548 {
fb3712f6
AK
6549 /* Save slots for gprs from i to j. */
6550 cfun_frame_layout.first_save_gpr_slot = i;
6551 cfun_frame_layout.last_save_gpr_slot = j;
6552
6553 for (i = cfun_frame_layout.first_save_gpr_slot;
6554 i < cfun_frame_layout.last_save_gpr_slot + 1;
6555 i++)
6556 if (clobbered_regs[i])
6557 break;
6558
6559 for (j = cfun_frame_layout.last_save_gpr_slot; j > i; j--)
6560 if (clobbered_regs[j])
6561 break;
6562
6563 if (i == cfun_frame_layout.last_save_gpr_slot + 1)
6564 {
6565 /* Nothing to save/restore. */
6566 cfun_frame_layout.first_save_gpr = -1;
6567 cfun_frame_layout.first_restore_gpr = -1;
6568 cfun_frame_layout.last_save_gpr = -1;
6569 cfun_frame_layout.last_restore_gpr = -1;
6570 }
6571 else
6572 {
6573 /* Save / Restore from gpr i to j. */
6574 cfun_frame_layout.first_save_gpr = i;
6575 cfun_frame_layout.first_restore_gpr = i;
6576 cfun_frame_layout.last_save_gpr = j;
6577 cfun_frame_layout.last_restore_gpr = j;
6578 }
b767fc11 6579 }
c3cc6b78 6580
6c535c69 6581 if (current_function_stdarg)
b767fc11 6582 {
adf39f8f 6583 /* Varargs functions need to save gprs 2 to 6. */
29a79fcf
UW
6584 if (cfun->va_list_gpr_size
6585 && current_function_args_info.gprs < GP_ARG_NUM_REG)
6586 {
6587 int min_gpr = current_function_args_info.gprs;
6588 int max_gpr = min_gpr + cfun->va_list_gpr_size;
6589 if (max_gpr > GP_ARG_NUM_REG)
6590 max_gpr = GP_ARG_NUM_REG;
6591
6592 if (cfun_frame_layout.first_save_gpr == -1
6593 || cfun_frame_layout.first_save_gpr > 2 + min_gpr)
fb3712f6
AK
6594 {
6595 cfun_frame_layout.first_save_gpr = 2 + min_gpr;
6596 cfun_frame_layout.first_save_gpr_slot = 2 + min_gpr;
6597 }
29a79fcf
UW
6598
6599 if (cfun_frame_layout.last_save_gpr == -1
6600 || cfun_frame_layout.last_save_gpr < 2 + max_gpr - 1)
fb3712f6
AK
6601 {
6602 cfun_frame_layout.last_save_gpr = 2 + max_gpr - 1;
6603 cfun_frame_layout.last_save_gpr_slot = 2 + max_gpr - 1;
6604 }
29a79fcf 6605 }
b767fc11 6606
adf39f8f 6607 /* Mark f0, f2 for 31 bit and f0-f4 for 64 bit to be saved. */
29a79fcf
UW
6608 if (TARGET_HARD_FLOAT && cfun->va_list_fpr_size
6609 && current_function_args_info.fprs < FP_ARG_NUM_REG)
6610 {
6611 int min_fpr = current_function_args_info.fprs;
6612 int max_fpr = min_fpr + cfun->va_list_fpr_size;
6613 if (max_fpr > FP_ARG_NUM_REG)
6614 max_fpr = FP_ARG_NUM_REG;
6615
6616 /* ??? This is currently required to ensure proper location
6617 of the fpr save slots within the va_list save area. */
6618 if (TARGET_PACKED_STACK)
6619 min_fpr = 0;
6620
6621 for (i = min_fpr; i < max_fpr; i++)
6622 cfun_set_fpr_bit (i);
6623 }
adf39f8f
AK
6624 }
6625
6626 if (!TARGET_64BIT)
6627 for (i = 2; i < 4; i++)
6fb5fa3c 6628 if (df_regs_ever_live_p (i + 16) && !global_regs[i + 16])
adf39f8f
AK
6629 cfun_set_fpr_bit (i);
6630}
6631
91086990 6632/* Fill cfun->machine with info about frame of current function. */
adf39f8f
AK
6633
6634static void
91086990 6635s390_frame_info (void)
adf39f8f
AK
6636{
6637 int i;
6638
6639 cfun_frame_layout.frame_size = get_frame_size ();
adf39f8f 6640 if (!TARGET_64BIT && cfun_frame_layout.frame_size > 0x7fff0000)
c85ce869 6641 fatal_error ("total size of local variables exceeds architecture limit");
adf39f8f 6642
b3d31392 6643 if (!TARGET_PACKED_STACK)
adf39f8f
AK
6644 {
6645 cfun_frame_layout.backchain_offset = 0;
6646 cfun_frame_layout.f0_offset = 16 * UNITS_PER_WORD;
6647 cfun_frame_layout.f4_offset = cfun_frame_layout.f0_offset + 2 * 8;
6648 cfun_frame_layout.f8_offset = -cfun_frame_layout.high_fprs * 8;
fb3712f6 6649 cfun_frame_layout.gprs_offset = (cfun_frame_layout.first_save_gpr_slot
adf39f8f
AK
6650 * UNITS_PER_WORD);
6651 }
b3d31392 6652 else if (TARGET_BACKCHAIN) /* kernel stack layout */
adf39f8f
AK
6653 {
6654 cfun_frame_layout.backchain_offset = (STACK_POINTER_OFFSET
6655 - UNITS_PER_WORD);
6656 cfun_frame_layout.gprs_offset
6657 = (cfun_frame_layout.backchain_offset
fb3712f6 6658 - (STACK_POINTER_REGNUM - cfun_frame_layout.first_save_gpr_slot + 1)
adf39f8f
AK
6659 * UNITS_PER_WORD);
6660
6661 if (TARGET_64BIT)
6662 {
6663 cfun_frame_layout.f4_offset
6664 = (cfun_frame_layout.gprs_offset
6665 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6666
6667 cfun_frame_layout.f0_offset
6668 = (cfun_frame_layout.f4_offset
6669 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6670 }
6671 else
6672 {
ea506297
AK
6673 /* On 31 bit we have to care about alignment of the
6674 floating point regs to provide fastest access. */
adf39f8f 6675 cfun_frame_layout.f0_offset
ea506297
AK
6676 = ((cfun_frame_layout.gprs_offset
6677 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1))
adf39f8f
AK
6678 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6679
6680 cfun_frame_layout.f4_offset
6681 = (cfun_frame_layout.f0_offset
6682 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6683 }
6684 }
6685 else /* no backchain */
6686 {
6687 cfun_frame_layout.f4_offset
6688 = (STACK_POINTER_OFFSET
6689 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6690
6691 cfun_frame_layout.f0_offset
6692 = (cfun_frame_layout.f4_offset
6693 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6694
6695 cfun_frame_layout.gprs_offset
6696 = cfun_frame_layout.f0_offset - cfun_gprs_save_area_size;
6697 }
6698
6699 if (current_function_is_leaf
6700 && !TARGET_TPF_PROFILING
6701 && cfun_frame_layout.frame_size == 0
6702 && !cfun_save_high_fprs_p
6703 && !current_function_calls_alloca
6704 && !current_function_stdarg)
6705 return;
6706
b3d31392 6707 if (!TARGET_PACKED_STACK)
63296cb1
AK
6708 cfun_frame_layout.frame_size += (STACK_POINTER_OFFSET
6709 + current_function_outgoing_args_size
adf39f8f
AK
6710 + cfun_frame_layout.high_fprs * 8);
6711 else
6712 {
66480e91
AK
6713 if (TARGET_BACKCHAIN)
6714 cfun_frame_layout.frame_size += UNITS_PER_WORD;
ea506297
AK
6715
6716 /* No alignment trouble here because f8-f15 are only saved under
6717 64 bit. */
adf39f8f
AK
6718 cfun_frame_layout.f8_offset = (MIN (MIN (cfun_frame_layout.f0_offset,
6719 cfun_frame_layout.f4_offset),
6720 cfun_frame_layout.gprs_offset)
6721 - cfun_frame_layout.high_fprs * 8);
6722
6723 cfun_frame_layout.frame_size += cfun_frame_layout.high_fprs * 8;
6724
6725 for (i = 0; i < 8; i++)
6726 if (cfun_fpr_bit_p (i))
6727 cfun_frame_layout.frame_size += 8;
6728
6729 cfun_frame_layout.frame_size += cfun_gprs_save_area_size;
ea506297
AK
6730
6731 /* If under 31 bit an odd number of gprs has to be saved we have to adjust
6732 the frame size to sustain 8 byte alignment of stack frames. */
adf39f8f
AK
6733 cfun_frame_layout.frame_size = ((cfun_frame_layout.frame_size +
6734 STACK_BOUNDARY / BITS_PER_UNIT - 1)
6735 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1));
6736
6737 cfun_frame_layout.frame_size += current_function_outgoing_args_size;
b767fc11 6738 }
4023fb28
UW
6739}
6740
91086990
UW
6741/* Generate frame layout. Fills in register and frame data for the current
6742 function in cfun->machine. This routine can be called multiple times;
6743 it will re-do the complete frame layout every time. */
4023fb28 6744
91086990
UW
6745static void
6746s390_init_frame_layout (void)
9db1d521 6747{
91086990
UW
6748 HOST_WIDE_INT frame_size;
6749 int base_used;
7bcebb25 6750 int clobbered_regs[16];
b767fc11 6751
91086990
UW
6752 /* On S/390 machines, we may need to perform branch splitting, which
6753 will require both base and return address register. We have no
6754 choice but to assume we're going to need them until right at the
6755 end of the machine dependent reorg phase. */
6756 if (!TARGET_CPU_ZARCH)
6757 cfun->machine->split_branches_pending_p = true;
6758
6759 do
6760 {
6761 frame_size = cfun_frame_layout.frame_size;
6762
6763 /* Try to predict whether we'll need the base register. */
6764 base_used = cfun->machine->split_branches_pending_p
6765 || current_function_uses_const_pool
20f04e65
AK
6766 || (!DISP_IN_RANGE (frame_size)
6767 && !CONST_OK_FOR_K (frame_size));
91086990
UW
6768
6769 /* Decide which register to use as literal pool base. In small
6770 leaf functions, try to use an unused call-clobbered register
6771 as base register to avoid save/restore overhead. */
6772 if (!base_used)
6773 cfun->machine->base_reg = NULL_RTX;
6fb5fa3c 6774 else if (current_function_is_leaf && !df_regs_ever_live_p (5))
91086990
UW
6775 cfun->machine->base_reg = gen_rtx_REG (Pmode, 5);
6776 else
6777 cfun->machine->base_reg = gen_rtx_REG (Pmode, BASE_REGNUM);
adf39f8f 6778
7bcebb25 6779 s390_register_info (clobbered_regs);
91086990
UW
6780 s390_frame_info ();
6781 }
6782 while (frame_size != cfun_frame_layout.frame_size);
9db1d521
HP
6783}
6784
91086990
UW
6785/* Update frame layout. Recompute actual register save data based on
6786 current info and update regs_ever_live for the special registers.
6787 May be called multiple times, but may never cause *more* registers
6788 to be saved than s390_init_frame_layout allocated room for. */
6789
6790static void
6791s390_update_frame_layout (void)
6792{
7bcebb25 6793 int clobbered_regs[16];
91086990 6794
7bcebb25 6795 s390_register_info (clobbered_regs);
91086990 6796
6fb5fa3c
DB
6797 df_set_regs_ever_live (BASE_REGNUM,
6798 clobbered_regs[BASE_REGNUM] ? true : false);
6799 df_set_regs_ever_live (RETURN_REGNUM,
6800 clobbered_regs[RETURN_REGNUM] ? true : false);
6801 df_set_regs_ever_live (STACK_POINTER_REGNUM,
6802 clobbered_regs[STACK_POINTER_REGNUM] ? true : false);
91086990
UW
6803
6804 if (cfun->machine->base_reg)
6fb5fa3c 6805 df_set_regs_ever_live (REGNO (cfun->machine->base_reg), true);
91086990
UW
6806}
6807
74aa8b4b
AK
6808/* Return true if it is legal to put a value with MODE into REGNO. */
6809
6810bool
6811s390_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
6812{
6813 switch (REGNO_REG_CLASS (regno))
6814 {
6815 case FP_REGS:
6816 if (REGNO_PAIR_OK (regno, mode))
6817 {
6818 if (mode == SImode || mode == DImode)
6819 return true;
6820
6821 if (FLOAT_MODE_P (mode) && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
6822 return true;
6823 }
6824 break;
6825 case ADDR_REGS:
6826 if (FRAME_REGNO_P (regno) && mode == Pmode)
6827 return true;
6828
6829 /* fallthrough */
6830 case GENERAL_REGS:
6831 if (REGNO_PAIR_OK (regno, mode))
6832 {
6833 if (TARGET_64BIT
4dc19cc0 6834 || (mode != TFmode && mode != TCmode && mode != TDmode))
74aa8b4b
AK
6835 return true;
6836 }
6837 break;
6838 case CC_REGS:
6839 if (GET_MODE_CLASS (mode) == MODE_CC)
6840 return true;
6841 break;
6842 case ACCESS_REGS:
6843 if (REGNO_PAIR_OK (regno, mode))
6844 {
6845 if (mode == SImode || mode == Pmode)
6846 return true;
6847 }
6848 break;
6849 default:
6850 return false;
6851 }
6852
6853 return false;
6854}
6855
7633f08e
UW
6856/* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6857
6858bool
6859s390_hard_regno_rename_ok (unsigned int old_reg, unsigned int new_reg)
6860{
6861 /* Once we've decided upon a register to use as base register, it must
6862 no longer be used for any other purpose. */
6863 if (cfun->machine->base_reg)
6864 if (REGNO (cfun->machine->base_reg) == old_reg
6865 || REGNO (cfun->machine->base_reg) == new_reg)
6866 return false;
6867
6868 return true;
6869}
6870
74aa8b4b
AK
6871/* Maximum number of registers to represent a value of mode MODE
6872 in a register of class CLASS. */
6873
6874bool
6875s390_class_max_nregs (enum reg_class class, enum machine_mode mode)
6876{
6877 switch (class)
6878 {
6879 case FP_REGS:
6880 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6881 return 2 * ((GET_MODE_SIZE (mode) / 2 + 8 - 1) / 8);
6882 else
6883 return (GET_MODE_SIZE (mode) + 8 - 1) / 8;
6884 case ACCESS_REGS:
6885 return (GET_MODE_SIZE (mode) + 4 - 1) / 4;
6886 default:
6887 break;
6888 }
6889 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6890}
6891
91086990
UW
6892/* Return true if register FROM can be eliminated via register TO. */
6893
6894bool
6895s390_can_eliminate (int from, int to)
6896{
7633f08e
UW
6897 /* On zSeries machines, we have not marked the base register as fixed.
6898 Instead, we have an elimination rule BASE_REGNUM -> BASE_REGNUM.
6899 If a function requires the base register, we say here that this
6900 elimination cannot be performed. This will cause reload to free
6901 up the base register (as if it were fixed). On the other hand,
6902 if the current function does *not* require the base register, we
6903 say here the elimination succeeds, which in turn allows reload
6904 to allocate the base register for any other purpose. */
6905 if (from == BASE_REGNUM && to == BASE_REGNUM)
6906 {
6907 if (TARGET_CPU_ZARCH)
6908 {
6909 s390_init_frame_layout ();
6910 return cfun->machine->base_reg == NULL_RTX;
6911 }
6912
6913 return false;
6914 }
6915
6916 /* Everything else must point into the stack frame. */
91086990
UW
6917 gcc_assert (to == STACK_POINTER_REGNUM
6918 || to == HARD_FRAME_POINTER_REGNUM);
6919
6920 gcc_assert (from == FRAME_POINTER_REGNUM
6921 || from == ARG_POINTER_REGNUM
6922 || from == RETURN_ADDRESS_POINTER_REGNUM);
6923
6924 /* Make sure we actually saved the return address. */
6925 if (from == RETURN_ADDRESS_POINTER_REGNUM)
6926 if (!current_function_calls_eh_return
6927 && !current_function_stdarg
6928 && !cfun_frame_layout.save_return_addr_p)
6929 return false;
6930
6931 return true;
6932}
6933
6934/* Return offset between register FROM and TO initially after prolog. */
a38e09bc
AK
6935
6936HOST_WIDE_INT
91086990 6937s390_initial_elimination_offset (int from, int to)
a38e09bc 6938{
91086990
UW
6939 HOST_WIDE_INT offset;
6940 int index;
a38e09bc 6941
91086990
UW
6942 /* ??? Why are we called for non-eliminable pairs? */
6943 if (!s390_can_eliminate (from, to))
6944 return 0;
6945
6946 switch (from)
6947 {
6948 case FRAME_POINTER_REGNUM:
63296cb1
AK
6949 offset = (get_frame_size()
6950 + STACK_POINTER_OFFSET
6951 + current_function_outgoing_args_size);
91086990 6952 break;
adf39f8f 6953
91086990
UW
6954 case ARG_POINTER_REGNUM:
6955 s390_init_frame_layout ();
6956 offset = cfun_frame_layout.frame_size + STACK_POINTER_OFFSET;
6957 break;
6958
6959 case RETURN_ADDRESS_POINTER_REGNUM:
6960 s390_init_frame_layout ();
fb3712f6 6961 index = RETURN_REGNUM - cfun_frame_layout.first_save_gpr_slot;
91086990
UW
6962 gcc_assert (index >= 0);
6963 offset = cfun_frame_layout.frame_size + cfun_frame_layout.gprs_offset;
6964 offset += index * UNITS_PER_WORD;
6965 break;
6966
7633f08e
UW
6967 case BASE_REGNUM:
6968 offset = 0;
6969 break;
6970
91086990
UW
6971 default:
6972 gcc_unreachable ();
6973 }
6974
6975 return offset;
a38e09bc
AK
6976}
6977
4023fb28 6978/* Emit insn to save fpr REGNUM at offset OFFSET relative
c7453384 6979 to register BASE. Return generated insn. */
994fe660 6980
9db1d521 6981static rtx
9c808aad 6982save_fpr (rtx base, int offset, int regnum)
9db1d521 6983{
4023fb28
UW
6984 rtx addr;
6985 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
dcc9eb26
AK
6986
6987 if (regnum >= 16 && regnum <= (16 + FP_ARG_NUM_REG))
6988 set_mem_alias_set (addr, get_varargs_alias_set ());
6989 else
6990 set_mem_alias_set (addr, get_frame_alias_set ());
9db1d521 6991
4023fb28
UW
6992 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
6993}
9db1d521 6994
4023fb28 6995/* Emit insn to restore fpr REGNUM from offset OFFSET relative
c7453384 6996 to register BASE. Return generated insn. */
9db1d521 6997
4023fb28 6998static rtx
9c808aad 6999restore_fpr (rtx base, int offset, int regnum)
4023fb28
UW
7000{
7001 rtx addr;
7002 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
dcc9eb26 7003 set_mem_alias_set (addr, get_frame_alias_set ());
9db1d521 7004
4023fb28 7005 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
9db1d521
HP
7006}
7007
c3cc6b78 7008/* Generate insn to save registers FIRST to LAST into
c7453384 7009 the register save area located at offset OFFSET
c3cc6b78 7010 relative to register BASE. */
9db1d521 7011
c3cc6b78 7012static rtx
9c808aad 7013save_gprs (rtx base, int offset, int first, int last)
9db1d521 7014{
c3cc6b78
UW
7015 rtx addr, insn, note;
7016 int i;
7017
adf39f8f 7018 addr = plus_constant (base, offset);
c3cc6b78 7019 addr = gen_rtx_MEM (Pmode, addr);
dcc9eb26
AK
7020
7021 set_mem_alias_set (addr, get_frame_alias_set ());
c3cc6b78
UW
7022
7023 /* Special-case single register. */
7024 if (first == last)
7025 {
7026 if (TARGET_64BIT)
7027 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
7028 else
7029 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
7030
7031 RTX_FRAME_RELATED_P (insn) = 1;
7032 return insn;
7033 }
7034
7035
7036 insn = gen_store_multiple (addr,
7037 gen_rtx_REG (Pmode, first),
7038 GEN_INT (last - first + 1));
7039
dcc9eb26
AK
7040 if (first <= 6 && current_function_stdarg)
7041 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
7042 {
7043 rtx mem = XEXP (XVECEXP (PATTERN (insn), 0, i), 0);
7044
7045 if (first + i <= 6)
7046 set_mem_alias_set (mem, get_varargs_alias_set ());
7047 }
c3cc6b78
UW
7048
7049 /* We need to set the FRAME_RELATED flag on all SETs
7050 inside the store-multiple pattern.
7051
7052 However, we must not emit DWARF records for registers 2..5
c7453384 7053 if they are stored for use by variable arguments ...
c3cc6b78 7054
a4d05547 7055 ??? Unfortunately, it is not enough to simply not the
c3cc6b78
UW
7056 FRAME_RELATED flags for those SETs, because the first SET
7057 of the PARALLEL is always treated as if it had the flag
7058 set, even if it does not. Therefore we emit a new pattern
7059 without those registers as REG_FRAME_RELATED_EXPR note. */
7060
7061 if (first >= 6)
7062 {
7063 rtx pat = PATTERN (insn);
7064
7065 for (i = 0; i < XVECLEN (pat, 0); i++)
7066 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
7067 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
7068
7069 RTX_FRAME_RELATED_P (insn) = 1;
7070 }
7071 else if (last >= 6)
7072 {
adf39f8f 7073 addr = plus_constant (base, offset + (6 - first) * UNITS_PER_WORD);
c7453384 7074 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
c3cc6b78
UW
7075 gen_rtx_REG (Pmode, 6),
7076 GEN_INT (last - 6 + 1));
7077 note = PATTERN (note);
7078
7079 REG_NOTES (insn) =
c7453384 7080 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
c3cc6b78
UW
7081 note, REG_NOTES (insn));
7082
7083 for (i = 0; i < XVECLEN (note, 0); i++)
7084 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
7085 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
7086
7087 RTX_FRAME_RELATED_P (insn) = 1;
7088 }
7089
7090 return insn;
4023fb28 7091}
9db1d521 7092
c3cc6b78 7093/* Generate insn to restore registers FIRST to LAST from
c7453384 7094 the register save area located at offset OFFSET
c3cc6b78 7095 relative to register BASE. */
9db1d521 7096
c3cc6b78 7097static rtx
9c808aad 7098restore_gprs (rtx base, int offset, int first, int last)
4023fb28 7099{
c3cc6b78
UW
7100 rtx addr, insn;
7101
adf39f8f 7102 addr = plus_constant (base, offset);
c3cc6b78 7103 addr = gen_rtx_MEM (Pmode, addr);
dcc9eb26 7104 set_mem_alias_set (addr, get_frame_alias_set ());
c3cc6b78
UW
7105
7106 /* Special-case single register. */
7107 if (first == last)
7108 {
7109 if (TARGET_64BIT)
7110 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
7111 else
7112 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
7113
7114 return insn;
7115 }
7116
7117 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
7118 addr,
7119 GEN_INT (last - first + 1));
7120 return insn;
4023fb28 7121}
9db1d521 7122
585539a1 7123/* Return insn sequence to load the GOT register. */
fd7643fb
UW
7124
7125static GTY(()) rtx got_symbol;
585539a1
UW
7126rtx
7127s390_load_got (void)
fd7643fb 7128{
585539a1
UW
7129 rtx insns;
7130
fd7643fb
UW
7131 if (!got_symbol)
7132 {
7133 got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
7134 SYMBOL_REF_FLAGS (got_symbol) = SYMBOL_FLAG_LOCAL;
7135 }
7136
585539a1
UW
7137 start_sequence ();
7138
9e8327e3 7139 if (TARGET_CPU_ZARCH)
fd7643fb 7140 {
585539a1 7141 emit_move_insn (pic_offset_table_rtx, got_symbol);
fd7643fb
UW
7142 }
7143 else
7144 {
585539a1 7145 rtx offset;
fd7643fb 7146
c7453384 7147 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, got_symbol),
fd7643fb
UW
7148 UNSPEC_LTREL_OFFSET);
7149 offset = gen_rtx_CONST (Pmode, offset);
7150 offset = force_const_mem (Pmode, offset);
7151
585539a1 7152 emit_move_insn (pic_offset_table_rtx, offset);
fd7643fb 7153
c7453384 7154 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (offset, 0)),
fd7643fb
UW
7155 UNSPEC_LTREL_BASE);
7156 offset = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset);
7157
585539a1 7158 emit_move_insn (pic_offset_table_rtx, offset);
fd7643fb 7159 }
585539a1
UW
7160
7161 insns = get_insns ();
7162 end_sequence ();
7163 return insns;
fd7643fb
UW
7164}
7165
4023fb28 7166/* Expand the prologue into a bunch of separate insns. */
9db1d521 7167
4023fb28 7168void
9c808aad 7169s390_emit_prologue (void)
4023fb28 7170{
4023fb28
UW
7171 rtx insn, addr;
7172 rtx temp_reg;
2c153108 7173 int i;
adf39f8f
AK
7174 int offset;
7175 int next_fpr = 0;
9db1d521 7176
91086990 7177 /* Complete frame layout. */
b767fc11 7178
91086990 7179 s390_update_frame_layout ();
4023fb28 7180
585539a1
UW
7181 /* Annotate all constant pool references to let the scheduler know
7182 they implicitly use the base register. */
7183
7184 push_topmost_sequence ();
7185
7186 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
7187 if (INSN_P (insn))
6fb5fa3c
DB
7188 {
7189 annotate_constant_pool_refs (&PATTERN (insn));
7190 df_insn_rescan (insn);
7191 }
585539a1
UW
7192
7193 pop_topmost_sequence ();
7194
c7453384
EC
7195 /* Choose best register to use for temp use within prologue.
7196 See below for why TPF must use the register 1. */
7197
7bcebb25
AK
7198 if (!has_hard_reg_initial_val (Pmode, RETURN_REGNUM)
7199 && !current_function_is_leaf
7200 && !TARGET_TPF_PROFILING)
4023fb28 7201 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
9db1d521 7202 else
4023fb28 7203 temp_reg = gen_rtx_REG (Pmode, 1);
9db1d521 7204
4023fb28 7205 /* Save call saved gprs. */
adf39f8f 7206 if (cfun_frame_layout.first_save_gpr != -1)
2790879f
AK
7207 {
7208 insn = save_gprs (stack_pointer_rtx,
fb3712f6
AK
7209 cfun_frame_layout.gprs_offset +
7210 UNITS_PER_WORD * (cfun_frame_layout.first_save_gpr
7211 - cfun_frame_layout.first_save_gpr_slot),
2790879f
AK
7212 cfun_frame_layout.first_save_gpr,
7213 cfun_frame_layout.last_save_gpr);
7214 emit_insn (insn);
7215 }
4023fb28 7216
5af2f3d3 7217 /* Dummy insn to mark literal pool slot. */
c7453384 7218
91086990
UW
7219 if (cfun->machine->base_reg)
7220 emit_insn (gen_main_pool (cfun->machine->base_reg));
c7453384 7221
adf39f8f 7222 offset = cfun_frame_layout.f0_offset;
4023fb28 7223
adf39f8f
AK
7224 /* Save f0 and f2. */
7225 for (i = 0; i < 2; i++)
7226 {
7227 if (cfun_fpr_bit_p (i))
7228 {
7229 save_fpr (stack_pointer_rtx, offset, i + 16);
7230 offset += 8;
7231 }
b3d31392 7232 else if (!TARGET_PACKED_STACK)
adf39f8f
AK
7233 offset += 8;
7234 }
9db1d521 7235
adf39f8f
AK
7236 /* Save f4 and f6. */
7237 offset = cfun_frame_layout.f4_offset;
7238 for (i = 2; i < 4; i++)
7239 {
7240 if (cfun_fpr_bit_p (i))
4023fb28 7241 {
adf39f8f
AK
7242 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
7243 offset += 8;
7244
7245 /* If f4 and f6 are call clobbered they are saved due to stdargs and
7246 therefore are not frame related. */
7247 if (!call_really_used_regs[i + 16])
7248 RTX_FRAME_RELATED_P (insn) = 1;
4023fb28 7249 }
b3d31392 7250 else if (!TARGET_PACKED_STACK)
adf39f8f
AK
7251 offset += 8;
7252 }
7253
b3d31392 7254 if (TARGET_PACKED_STACK
adf39f8f
AK
7255 && cfun_save_high_fprs_p
7256 && cfun_frame_layout.f8_offset + cfun_frame_layout.high_fprs * 8 > 0)
7257 {
7258 offset = (cfun_frame_layout.f8_offset
7259 + (cfun_frame_layout.high_fprs - 1) * 8);
7260
7261 for (i = 15; i > 7 && offset >= 0; i--)
7262 if (cfun_fpr_bit_p (i))
7263 {
7264 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
7265
7266 RTX_FRAME_RELATED_P (insn) = 1;
7267 offset -= 8;
7268 }
7269 if (offset >= cfun_frame_layout.f8_offset)
7270 next_fpr = i + 16;
7271 }
7272
b3d31392 7273 if (!TARGET_PACKED_STACK)
adf39f8f 7274 next_fpr = cfun_save_high_fprs_p ? 31 : 0;
9db1d521 7275
4023fb28 7276 /* Decrement stack pointer. */
9db1d521 7277
adf39f8f 7278 if (cfun_frame_layout.frame_size > 0)
4023fb28 7279 {
adf39f8f 7280 rtx frame_off = GEN_INT (-cfun_frame_layout.frame_size);
9db1d521 7281
d75f90f1
AK
7282 if (s390_stack_size)
7283 {
690e7b63 7284 HOST_WIDE_INT stack_guard;
d75f90f1 7285
690e7b63
AK
7286 if (s390_stack_guard)
7287 stack_guard = s390_stack_guard;
d75f90f1 7288 else
690e7b63
AK
7289 {
7290 /* If no value for stack guard is provided the smallest power of 2
7291 larger than the current frame size is chosen. */
7292 stack_guard = 1;
7293 while (stack_guard < cfun_frame_layout.frame_size)
7294 stack_guard <<= 1;
7295 }
d75f90f1 7296
690e7b63
AK
7297 if (cfun_frame_layout.frame_size >= s390_stack_size)
7298 {
7299 warning (0, "frame size of function %qs is "
7300 HOST_WIDE_INT_PRINT_DEC
7301 " bytes exceeding user provided stack limit of "
7302 HOST_WIDE_INT_PRINT_DEC " bytes. "
7303 "An unconditional trap is added.",
7304 current_function_name(), cfun_frame_layout.frame_size,
7305 s390_stack_size);
7306 emit_insn (gen_trap ());
7307 }
7308 else
7309 {
7310 HOST_WIDE_INT stack_check_mask = ((s390_stack_size - 1)
7311 & ~(stack_guard - 1));
7312 rtx t = gen_rtx_AND (Pmode, stack_pointer_rtx,
7313 GEN_INT (stack_check_mask));
7314 if (TARGET_64BIT)
7315 gen_cmpdi (t, const0_rtx);
7316 else
7317 gen_cmpsi (t, const0_rtx);
7318
7319 emit_insn (gen_conditional_trap (gen_rtx_EQ (CCmode,
7320 gen_rtx_REG (CCmode,
7321 CC_REGNUM),
7322 const0_rtx),
7323 const0_rtx));
7324 }
d75f90f1
AK
7325 }
7326
7327 if (s390_warn_framesize > 0
7328 && cfun_frame_layout.frame_size >= s390_warn_framesize)
d4ee4d25 7329 warning (0, "frame size of %qs is " HOST_WIDE_INT_PRINT_DEC " bytes",
d75f90f1
AK
7330 current_function_name (), cfun_frame_layout.frame_size);
7331
7332 if (s390_warn_dynamicstack_p && cfun->calls_alloca)
d4ee4d25 7333 warning (0, "%qs uses dynamic stack allocation", current_function_name ());
d75f90f1 7334
4023fb28 7335 /* Save incoming stack pointer into temp reg. */
66480e91 7336 if (TARGET_BACKCHAIN || next_fpr)
adf39f8f 7337 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
c7453384 7338
fae778eb 7339 /* Subtract frame size from stack pointer. */
4023fb28 7340
d3632d41
UW
7341 if (DISP_IN_RANGE (INTVAL (frame_off)))
7342 {
c7453384 7343 insn = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
adf39f8f 7344 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
9c808aad 7345 frame_off));
d3632d41
UW
7346 insn = emit_insn (insn);
7347 }
7348 else
7349 {
b5c67a49 7350 if (!CONST_OK_FOR_K (INTVAL (frame_off)))
d3632d41
UW
7351 frame_off = force_const_mem (Pmode, frame_off);
7352
7353 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
585539a1 7354 annotate_constant_pool_refs (&PATTERN (insn));
d3632d41 7355 }
4023fb28 7356
4023fb28 7357 RTX_FRAME_RELATED_P (insn) = 1;
c7453384 7358 REG_NOTES (insn) =
4023fb28
UW
7359 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7360 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
adf39f8f
AK
7361 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
7362 GEN_INT (-cfun_frame_layout.frame_size))),
4023fb28
UW
7363 REG_NOTES (insn));
7364
7365 /* Set backchain. */
c7453384 7366
66480e91 7367 if (TARGET_BACKCHAIN)
9db1d521 7368 {
adf39f8f
AK
7369 if (cfun_frame_layout.backchain_offset)
7370 addr = gen_rtx_MEM (Pmode,
7371 plus_constant (stack_pointer_rtx,
7372 cfun_frame_layout.backchain_offset));
7373 else
7374 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
dcc9eb26 7375 set_mem_alias_set (addr, get_frame_alias_set ());
4023fb28 7376 insn = emit_insn (gen_move_insn (addr, temp_reg));
9db1d521 7377 }
7d798969
UW
7378
7379 /* If we support asynchronous exceptions (e.g. for Java),
7380 we need to make sure the backchain pointer is set up
7381 before any possibly trapping memory access. */
7382
66480e91 7383 if (TARGET_BACKCHAIN && flag_non_call_exceptions)
7d798969
UW
7384 {
7385 addr = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
7386 emit_insn (gen_rtx_CLOBBER (VOIDmode, addr));
7387 }
4023fb28 7388 }
9db1d521 7389
4023fb28 7390 /* Save fprs 8 - 15 (64 bit ABI). */
c7453384 7391
adf39f8f 7392 if (cfun_save_high_fprs_p && next_fpr)
4023fb28 7393 {
adf39f8f
AK
7394 insn = emit_insn (gen_add2_insn (temp_reg,
7395 GEN_INT (cfun_frame_layout.f8_offset)));
7396
7397 offset = 0;
9db1d521 7398
adf39f8f
AK
7399 for (i = 24; i <= next_fpr; i++)
7400 if (cfun_fpr_bit_p (i - 16))
4023fb28 7401 {
c7453384 7402 rtx addr = plus_constant (stack_pointer_rtx,
adf39f8f
AK
7403 cfun_frame_layout.frame_size
7404 + cfun_frame_layout.f8_offset
7405 + offset);
7406
7407 insn = save_fpr (temp_reg, offset, i);
7408 offset += 8;
4023fb28 7409 RTX_FRAME_RELATED_P (insn) = 1;
c7453384 7410 REG_NOTES (insn) =
4023fb28 7411 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
adf39f8f
AK
7412 gen_rtx_SET (VOIDmode,
7413 gen_rtx_MEM (DFmode, addr),
7414 gen_rtx_REG (DFmode, i)),
7415 REG_NOTES (insn));
4023fb28
UW
7416 }
7417 }
c7453384 7418
4023fb28 7419 /* Set frame pointer, if needed. */
c7453384 7420
29742ba4 7421 if (frame_pointer_needed)
4023fb28
UW
7422 {
7423 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
7424 RTX_FRAME_RELATED_P (insn) = 1;
7425 }
9db1d521 7426
4023fb28 7427 /* Set up got pointer, if needed. */
c7453384 7428
6fb5fa3c 7429 if (flag_pic && df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM))
585539a1
UW
7430 {
7431 rtx insns = s390_load_got ();
7432
7433 for (insn = insns; insn; insn = NEXT_INSN (insn))
6fb5fa3c 7434 annotate_constant_pool_refs (&PATTERN (insn));
585539a1
UW
7435
7436 emit_insn (insns);
7437 }
c7453384 7438
3839e36a 7439 if (TARGET_TPF_PROFILING)
c7453384
EC
7440 {
7441 /* Generate a BAS instruction to serve as a function
7442 entry intercept to facilitate the use of tracing
2f7e5a0d
EC
7443 algorithms located at the branch target. */
7444 emit_insn (gen_prologue_tpf ());
c7453384
EC
7445
7446 /* Emit a blockage here so that all code
7447 lies between the profiling mechanisms. */
7448 emit_insn (gen_blockage ());
7449 }
4023fb28 7450}
9db1d521 7451
b1c9bc51 7452/* Expand the epilogue into a bunch of separate insns. */
9db1d521 7453
4023fb28 7454void
ed9676cf 7455s390_emit_epilogue (bool sibcall)
4023fb28 7456{
4023fb28 7457 rtx frame_pointer, return_reg;
5d4d885c 7458 int area_bottom, area_top, offset = 0;
adf39f8f 7459 int next_offset;
4023fb28 7460 rtvec p;
7333171f 7461 int i;
9db1d521 7462
3839e36a 7463 if (TARGET_TPF_PROFILING)
c7453384
EC
7464 {
7465
7466 /* Generate a BAS instruction to serve as a function
7467 entry intercept to facilitate the use of tracing
2f7e5a0d 7468 algorithms located at the branch target. */
c7453384 7469
c7453384
EC
7470 /* Emit a blockage here so that all code
7471 lies between the profiling mechanisms. */
7472 emit_insn (gen_blockage ());
7473
2f7e5a0d 7474 emit_insn (gen_epilogue_tpf ());
c7453384
EC
7475 }
7476
4023fb28 7477 /* Check whether to use frame or stack pointer for restore. */
9db1d521 7478
adf39f8f
AK
7479 frame_pointer = (frame_pointer_needed
7480 ? hard_frame_pointer_rtx : stack_pointer_rtx);
9db1d521 7481
adf39f8f 7482 s390_frame_area (&area_bottom, &area_top);
9db1d521 7483
c7453384 7484 /* Check whether we can access the register save area.
4023fb28 7485 If not, increment the frame pointer as required. */
9db1d521 7486
4023fb28
UW
7487 if (area_top <= area_bottom)
7488 {
7489 /* Nothing to restore. */
7490 }
adf39f8f
AK
7491 else if (DISP_IN_RANGE (cfun_frame_layout.frame_size + area_bottom)
7492 && DISP_IN_RANGE (cfun_frame_layout.frame_size + area_top - 1))
4023fb28
UW
7493 {
7494 /* Area is in range. */
adf39f8f 7495 offset = cfun_frame_layout.frame_size;
4023fb28
UW
7496 }
7497 else
7498 {
7499 rtx insn, frame_off;
9db1d521 7500
c7453384 7501 offset = area_bottom < 0 ? -area_bottom : 0;
adf39f8f 7502 frame_off = GEN_INT (cfun_frame_layout.frame_size - offset);
9db1d521 7503
d3632d41
UW
7504 if (DISP_IN_RANGE (INTVAL (frame_off)))
7505 {
c7453384 7506 insn = gen_rtx_SET (VOIDmode, frame_pointer,
d3632d41
UW
7507 gen_rtx_PLUS (Pmode, frame_pointer, frame_off));
7508 insn = emit_insn (insn);
7509 }
7510 else
7511 {
b5c67a49 7512 if (!CONST_OK_FOR_K (INTVAL (frame_off)))
d3632d41 7513 frame_off = force_const_mem (Pmode, frame_off);
9db1d521 7514
d3632d41 7515 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
585539a1 7516 annotate_constant_pool_refs (&PATTERN (insn));
d3632d41 7517 }
4023fb28 7518 }
9db1d521 7519
4023fb28
UW
7520 /* Restore call saved fprs. */
7521
7522 if (TARGET_64BIT)
9db1d521 7523 {
adf39f8f
AK
7524 if (cfun_save_high_fprs_p)
7525 {
7526 next_offset = cfun_frame_layout.f8_offset;
7527 for (i = 24; i < 32; i++)
7528 {
7529 if (cfun_fpr_bit_p (i - 16))
7530 {
7531 restore_fpr (frame_pointer,
7532 offset + next_offset, i);
7533 next_offset += 8;
7534 }
7535 }
7536 }
7537
9db1d521
HP
7538 }
7539 else
7540 {
adf39f8f 7541 next_offset = cfun_frame_layout.f4_offset;
7333171f 7542 for (i = 18; i < 20; i++)
adf39f8f
AK
7543 {
7544 if (cfun_fpr_bit_p (i - 16))
7545 {
7546 restore_fpr (frame_pointer,
7547 offset + next_offset, i);
7548 next_offset += 8;
7549 }
b3d31392 7550 else if (!TARGET_PACKED_STACK)
adf39f8f
AK
7551 next_offset += 8;
7552 }
7553
4023fb28 7554 }
9db1d521 7555
4023fb28
UW
7556 /* Return register. */
7557
c7453384 7558 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4023fb28
UW
7559
7560 /* Restore call saved gprs. */
7561
adf39f8f 7562 if (cfun_frame_layout.first_restore_gpr != -1)
4023fb28 7563 {
c3cc6b78 7564 rtx insn, addr;
1447dc69
HP
7565 int i;
7566
c7453384 7567 /* Check for global register and save them
1447dc69
HP
7568 to stack location from where they get restored. */
7569
adf39f8f
AK
7570 for (i = cfun_frame_layout.first_restore_gpr;
7571 i <= cfun_frame_layout.last_restore_gpr;
1447dc69
HP
7572 i++)
7573 {
c7453384 7574 /* These registers are special and need to be
cf5ee720 7575 restored in any case. */
c7453384 7576 if (i == STACK_POINTER_REGNUM
cf5ee720 7577 || i == RETURN_REGNUM
490ceeb4 7578 || i == BASE_REGNUM
5d4d885c 7579 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
cf5ee720
UW
7580 continue;
7581
1447dc69
HP
7582 if (global_regs[i])
7583 {
c7453384 7584 addr = plus_constant (frame_pointer,
adf39f8f 7585 offset + cfun_frame_layout.gprs_offset
fb3712f6 7586 + (i - cfun_frame_layout.first_save_gpr_slot)
adf39f8f 7587 * UNITS_PER_WORD);
1447dc69 7588 addr = gen_rtx_MEM (Pmode, addr);
dcc9eb26 7589 set_mem_alias_set (addr, get_frame_alias_set ());
1447dc69 7590 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
c7453384 7591 }
1447dc69 7592 }
4023fb28 7593
ed9676cf 7594 if (! sibcall)
9db1d521 7595 {
ed9676cf
AK
7596 /* Fetch return address from stack before load multiple,
7597 this will do good for scheduling. */
38899e29 7598
adf39f8f
AK
7599 if (cfun_frame_layout.save_return_addr_p
7600 || (cfun_frame_layout.first_restore_gpr < BASE_REGNUM
7601 && cfun_frame_layout.last_restore_gpr > RETURN_REGNUM))
ed9676cf
AK
7602 {
7603 int return_regnum = find_unused_clobbered_reg();
7604 if (!return_regnum)
7605 return_regnum = 4;
7606 return_reg = gen_rtx_REG (Pmode, return_regnum);
38899e29 7607
ed9676cf 7608 addr = plus_constant (frame_pointer,
adf39f8f
AK
7609 offset + cfun_frame_layout.gprs_offset
7610 + (RETURN_REGNUM
fb3712f6 7611 - cfun_frame_layout.first_save_gpr_slot)
adf39f8f 7612 * UNITS_PER_WORD);
ed9676cf 7613 addr = gen_rtx_MEM (Pmode, addr);
dcc9eb26 7614 set_mem_alias_set (addr, get_frame_alias_set ());
ed9676cf
AK
7615 emit_move_insn (return_reg, addr);
7616 }
9db1d521 7617 }
4023fb28 7618
adf39f8f
AK
7619 insn = restore_gprs (frame_pointer,
7620 offset + cfun_frame_layout.gprs_offset
7621 + (cfun_frame_layout.first_restore_gpr
fb3712f6 7622 - cfun_frame_layout.first_save_gpr_slot)
adf39f8f
AK
7623 * UNITS_PER_WORD,
7624 cfun_frame_layout.first_restore_gpr,
7625 cfun_frame_layout.last_restore_gpr);
c3cc6b78 7626 emit_insn (insn);
4023fb28 7627 }
9db1d521 7628
ed9676cf
AK
7629 if (! sibcall)
7630 {
c7453384 7631
ed9676cf 7632 /* Return to caller. */
38899e29 7633
ed9676cf 7634 p = rtvec_alloc (2);
38899e29 7635
ed9676cf
AK
7636 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
7637 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
7638 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
7639 }
9db1d521
HP
7640}
7641
9db1d521 7642
c7453384 7643/* Return the size in bytes of a function argument of
994fe660
UW
7644 type TYPE and/or mode MODE. At least one of TYPE or
7645 MODE must be specified. */
9db1d521
HP
7646
7647static int
9c808aad 7648s390_function_arg_size (enum machine_mode mode, tree type)
9db1d521
HP
7649{
7650 if (type)
7651 return int_size_in_bytes (type);
7652
d65f7478 7653 /* No type info available for some library calls ... */
9db1d521
HP
7654 if (mode != BLKmode)
7655 return GET_MODE_SIZE (mode);
7656
7657 /* If we have neither type nor mode, abort */
8d933e31 7658 gcc_unreachable ();
9db1d521
HP
7659}
7660
82b1c974
UW
7661/* Return true if a function argument of type TYPE and mode MODE
7662 is to be passed in a floating-point register, if available. */
7663
7664static bool
9c808aad 7665s390_function_arg_float (enum machine_mode mode, tree type)
82b1c974 7666{
8c17530e
UW
7667 int size = s390_function_arg_size (mode, type);
7668 if (size > 8)
7669 return false;
7670
82b1c974
UW
7671 /* Soft-float changes the ABI: no floating-point registers are used. */
7672 if (TARGET_SOFT_FLOAT)
7673 return false;
7674
7675 /* No type info available for some library calls ... */
7676 if (!type)
4dc19cc0 7677 return mode == SFmode || mode == DFmode || mode == SDmode || mode == DDmode;
82b1c974
UW
7678
7679 /* The ABI says that record types with a single member are treated
7680 just like that member would be. */
7681 while (TREE_CODE (type) == RECORD_TYPE)
7682 {
7683 tree field, single = NULL_TREE;
7684
7685 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
7686 {
7687 if (TREE_CODE (field) != FIELD_DECL)
7688 continue;
7689
7690 if (single == NULL_TREE)
7691 single = TREE_TYPE (field);
7692 else
7693 return false;
7694 }
7695
7696 if (single == NULL_TREE)
7697 return false;
7698 else
7699 type = single;
7700 }
7701
7702 return TREE_CODE (type) == REAL_TYPE;
7703}
7704
8c17530e
UW
7705/* Return true if a function argument of type TYPE and mode MODE
7706 is to be passed in an integer register, or a pair of integer
7707 registers, if available. */
7708
7709static bool
7710s390_function_arg_integer (enum machine_mode mode, tree type)
7711{
7712 int size = s390_function_arg_size (mode, type);
7713 if (size > 8)
7714 return false;
7715
7716 /* No type info available for some library calls ... */
7717 if (!type)
7718 return GET_MODE_CLASS (mode) == MODE_INT
4dc19cc0 7719 || (TARGET_SOFT_FLOAT && SCALAR_FLOAT_MODE_P (mode));
8c17530e
UW
7720
7721 /* We accept small integral (and similar) types. */
7722 if (INTEGRAL_TYPE_P (type)
38899e29 7723 || POINTER_TYPE_P (type)
8c17530e
UW
7724 || TREE_CODE (type) == OFFSET_TYPE
7725 || (TARGET_SOFT_FLOAT && TREE_CODE (type) == REAL_TYPE))
7726 return true;
7727
7728 /* We also accept structs of size 1, 2, 4, 8 that are not
38899e29 7729 passed in floating-point registers. */
8c17530e
UW
7730 if (AGGREGATE_TYPE_P (type)
7731 && exact_log2 (size) >= 0
7732 && !s390_function_arg_float (mode, type))
7733 return true;
7734
7735 return false;
7736}
7737
994fe660
UW
7738/* Return 1 if a function argument of type TYPE and mode MODE
7739 is to be passed by reference. The ABI specifies that only
7740 structures of size 1, 2, 4, or 8 bytes are passed by value,
7741 all other structures (and complex numbers) are passed by
7742 reference. */
7743
8cd5a4e0
RH
7744static bool
7745s390_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
7746 enum machine_mode mode, tree type,
7747 bool named ATTRIBUTE_UNUSED)
9db1d521
HP
7748{
7749 int size = s390_function_arg_size (mode, type);
8c17530e
UW
7750 if (size > 8)
7751 return true;
9db1d521
HP
7752
7753 if (type)
7754 {
8c17530e 7755 if (AGGREGATE_TYPE_P (type) && exact_log2 (size) < 0)
9db1d521
HP
7756 return 1;
7757
8c17530e
UW
7758 if (TREE_CODE (type) == COMPLEX_TYPE
7759 || TREE_CODE (type) == VECTOR_TYPE)
9db1d521
HP
7760 return 1;
7761 }
c7453384 7762
9db1d521 7763 return 0;
9db1d521
HP
7764}
7765
7766/* Update the data in CUM to advance over an argument of mode MODE and
7767 data type TYPE. (TYPE is null for libcalls where that information
994fe660
UW
7768 may not be available.). The boolean NAMED specifies whether the
7769 argument is a named argument (as opposed to an unnamed argument
7770 matching an ellipsis). */
9db1d521
HP
7771
7772void
9c808aad
AJ
7773s390_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
7774 tree type, int named ATTRIBUTE_UNUSED)
9db1d521 7775{
8cd5a4e0 7776 if (s390_function_arg_float (mode, type))
9db1d521 7777 {
82b1c974 7778 cum->fprs += 1;
9db1d521 7779 }
8c17530e 7780 else if (s390_function_arg_integer (mode, type))
9db1d521
HP
7781 {
7782 int size = s390_function_arg_size (mode, type);
7783 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
7784 }
8c17530e 7785 else
8d933e31 7786 gcc_unreachable ();
9db1d521
HP
7787}
7788
994fe660
UW
7789/* Define where to put the arguments to a function.
7790 Value is zero to push the argument on the stack,
7791 or a hard register in which to store the argument.
7792
7793 MODE is the argument's machine mode.
7794 TYPE is the data type of the argument (as a tree).
7795 This is null for libcalls where that information may
7796 not be available.
7797 CUM is a variable of type CUMULATIVE_ARGS which gives info about
7798 the preceding args and about the function being called.
7799 NAMED is nonzero if this argument is a named parameter
c7453384 7800 (otherwise it is an extra parameter matching an ellipsis).
994fe660
UW
7801
7802 On S/390, we use general purpose registers 2 through 6 to
7803 pass integer, pointer, and certain structure arguments, and
7804 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
7805 to pass floating point arguments. All remaining arguments
7806 are pushed to the stack. */
9db1d521
HP
7807
7808rtx
9c808aad
AJ
7809s390_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
7810 int named ATTRIBUTE_UNUSED)
9db1d521 7811{
82b1c974 7812 if (s390_function_arg_float (mode, type))
9db1d521 7813 {
29a79fcf 7814 if (cum->fprs + 1 > FP_ARG_NUM_REG)
9db1d521
HP
7815 return 0;
7816 else
f1c25d3b 7817 return gen_rtx_REG (mode, cum->fprs + 16);
9db1d521 7818 }
8c17530e 7819 else if (s390_function_arg_integer (mode, type))
9db1d521
HP
7820 {
7821 int size = s390_function_arg_size (mode, type);
7822 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
7823
29a79fcf 7824 if (cum->gprs + n_gprs > GP_ARG_NUM_REG)
9db1d521
HP
7825 return 0;
7826 else
f1c25d3b 7827 return gen_rtx_REG (mode, cum->gprs + 2);
9db1d521 7828 }
8c17530e
UW
7829
7830 /* After the real arguments, expand_call calls us once again
7831 with a void_type_node type. Whatever we return here is
7832 passed as operand 2 to the call expanders.
7833
7834 We don't need this feature ... */
7835 else if (type == void_type_node)
7836 return const0_rtx;
7837
8d933e31 7838 gcc_unreachable ();
8c17530e
UW
7839}
7840
7841/* Return true if return values of type TYPE should be returned
7842 in a memory buffer whose address is passed by the caller as
7843 hidden first argument. */
7844
7845static bool
7846s390_return_in_memory (tree type, tree fundecl ATTRIBUTE_UNUSED)
7847{
7848 /* We accept small integral (and similar) types. */
7849 if (INTEGRAL_TYPE_P (type)
38899e29 7850 || POINTER_TYPE_P (type)
8c17530e
UW
7851 || TREE_CODE (type) == OFFSET_TYPE
7852 || TREE_CODE (type) == REAL_TYPE)
7853 return int_size_in_bytes (type) > 8;
7854
7855 /* Aggregates and similar constructs are always returned
7856 in memory. */
7857 if (AGGREGATE_TYPE_P (type)
7858 || TREE_CODE (type) == COMPLEX_TYPE
7859 || TREE_CODE (type) == VECTOR_TYPE)
7860 return true;
7861
7862 /* ??? We get called on all sorts of random stuff from
7863 aggregate_value_p. We can't abort, but it's not clear
7864 what's safe to return. Pretend it's a struct I guess. */
7865 return true;
7866}
7867
7868/* Define where to return a (scalar) value of type TYPE.
7869 If TYPE is null, define where to return a (scalar)
7870 value of mode MODE from a libcall. */
7871
7872rtx
7873s390_function_value (tree type, enum machine_mode mode)
7874{
7875 if (type)
7876 {
8df83eae 7877 int unsignedp = TYPE_UNSIGNED (type);
8c17530e
UW
7878 mode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
7879 }
7880
4dc19cc0 7881 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT || SCALAR_FLOAT_MODE_P (mode));
8d933e31 7882 gcc_assert (GET_MODE_SIZE (mode) <= 8);
8c17530e 7883
4dc19cc0 7884 if (TARGET_HARD_FLOAT && SCALAR_FLOAT_MODE_P (mode))
8c17530e
UW
7885 return gen_rtx_REG (mode, 16);
7886 else
7887 return gen_rtx_REG (mode, 2);
9db1d521
HP
7888}
7889
7890
994fe660
UW
7891/* Create and return the va_list datatype.
7892
7893 On S/390, va_list is an array type equivalent to
7894
7895 typedef struct __va_list_tag
7896 {
7897 long __gpr;
7898 long __fpr;
7899 void *__overflow_arg_area;
7900 void *__reg_save_area;
994fe660
UW
7901 } va_list[1];
7902
7903 where __gpr and __fpr hold the number of general purpose
7904 or floating point arguments used up to now, respectively,
c7453384 7905 __overflow_arg_area points to the stack location of the
994fe660
UW
7906 next argument passed on the stack, and __reg_save_area
7907 always points to the start of the register area in the
7908 call frame of the current function. The function prologue
7909 saves all registers used for argument passing into this
7910 area if the function uses variable arguments. */
9db1d521 7911
c35d187f
RH
7912static tree
7913s390_build_builtin_va_list (void)
9db1d521
HP
7914{
7915 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
7916
47798692 7917 record = lang_hooks.types.make_type (RECORD_TYPE);
9db1d521
HP
7918
7919 type_decl =
7920 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
7921
c7453384 7922 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
9db1d521 7923 long_integer_type_node);
c7453384 7924 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
9db1d521
HP
7925 long_integer_type_node);
7926 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
7927 ptr_type_node);
7928 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
7929 ptr_type_node);
7930
29a79fcf
UW
7931 va_list_gpr_counter_field = f_gpr;
7932 va_list_fpr_counter_field = f_fpr;
7933
9db1d521
HP
7934 DECL_FIELD_CONTEXT (f_gpr) = record;
7935 DECL_FIELD_CONTEXT (f_fpr) = record;
7936 DECL_FIELD_CONTEXT (f_ovf) = record;
7937 DECL_FIELD_CONTEXT (f_sav) = record;
7938
7939 TREE_CHAIN (record) = type_decl;
7940 TYPE_NAME (record) = type_decl;
7941 TYPE_FIELDS (record) = f_gpr;
7942 TREE_CHAIN (f_gpr) = f_fpr;
7943 TREE_CHAIN (f_fpr) = f_ovf;
7944 TREE_CHAIN (f_ovf) = f_sav;
7945
7946 layout_type (record);
7947
7948 /* The correct type is an array type of one element. */
7949 return build_array_type (record, build_index_type (size_zero_node));
7950}
7951
994fe660 7952/* Implement va_start by filling the va_list structure VALIST.
6c535c69
ZW
7953 STDARG_P is always true, and ignored.
7954 NEXTARG points to the first anonymous stack argument.
994fe660 7955
f710504c 7956 The following global variables are used to initialize
994fe660
UW
7957 the va_list structure:
7958
7959 current_function_args_info:
7960 holds number of gprs and fprs used for named arguments.
7961 current_function_arg_offset_rtx:
7962 holds the offset of the first anonymous stack argument
7963 (relative to the virtual arg pointer). */
9db1d521
HP
7964
7965void
9c808aad 7966s390_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
9db1d521
HP
7967{
7968 HOST_WIDE_INT n_gpr, n_fpr;
7969 int off;
7970 tree f_gpr, f_fpr, f_ovf, f_sav;
7971 tree gpr, fpr, ovf, sav, t;
7972
7973 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
7974 f_fpr = TREE_CHAIN (f_gpr);
7975 f_ovf = TREE_CHAIN (f_fpr);
7976 f_sav = TREE_CHAIN (f_ovf);
7977
967af719 7978 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
7979 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
7980 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
7981 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
7982 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
9db1d521
HP
7983
7984 /* Count number of gp and fp argument registers used. */
7985
7986 n_gpr = current_function_args_info.gprs;
7987 n_fpr = current_function_args_info.fprs;
7988
29a79fcf
UW
7989 if (cfun->va_list_gpr_size)
7990 {
07beea0d 7991 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gpr), gpr,
47a25a46 7992 build_int_cst (NULL_TREE, n_gpr));
29a79fcf
UW
7993 TREE_SIDE_EFFECTS (t) = 1;
7994 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7995 }
9db1d521 7996
29a79fcf
UW
7997 if (cfun->va_list_fpr_size)
7998 {
07beea0d 7999 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (fpr), fpr,
47a25a46 8000 build_int_cst (NULL_TREE, n_fpr));
29a79fcf
UW
8001 TREE_SIDE_EFFECTS (t) = 1;
8002 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8003 }
9db1d521
HP
8004
8005 /* Find the overflow area. */
29a79fcf
UW
8006 if (n_gpr + cfun->va_list_gpr_size > GP_ARG_NUM_REG
8007 || n_fpr + cfun->va_list_fpr_size > FP_ARG_NUM_REG)
8008 {
8009 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
9db1d521 8010
29a79fcf
UW
8011 off = INTVAL (current_function_arg_offset_rtx);
8012 off = off < 0 ? 0 : off;
8013 if (TARGET_DEBUG_ARG)
8014 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
8015 (int)n_gpr, (int)n_fpr, off);
9db1d521 8016
5be014d5 8017 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t, size_int (off));
9db1d521 8018
07beea0d 8019 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
29a79fcf
UW
8020 TREE_SIDE_EFFECTS (t) = 1;
8021 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8022 }
9db1d521
HP
8023
8024 /* Find the register save area. */
29a79fcf
UW
8025 if ((cfun->va_list_gpr_size && n_gpr < GP_ARG_NUM_REG)
8026 || (cfun->va_list_fpr_size && n_fpr < FP_ARG_NUM_REG))
8027 {
8028 t = make_tree (TREE_TYPE (sav), return_address_pointer_rtx);
5be014d5
AP
8029 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
8030 size_int (-RETURN_REGNUM * UNITS_PER_WORD));
6b78f6be 8031
07beea0d 8032 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (sav), sav, t);
29a79fcf
UW
8033 TREE_SIDE_EFFECTS (t) = 1;
8034 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8035 }
9db1d521
HP
8036}
8037
c7453384 8038/* Implement va_arg by updating the va_list structure
994fe660 8039 VALIST as required to retrieve an argument of type
c7453384
EC
8040 TYPE, and returning that argument.
8041
994fe660 8042 Generates code equivalent to:
c7453384 8043
9db1d521
HP
8044 if (integral value) {
8045 if (size <= 4 && args.gpr < 5 ||
c7453384 8046 size > 4 && args.gpr < 4 )
9db1d521
HP
8047 ret = args.reg_save_area[args.gpr+8]
8048 else
8049 ret = *args.overflow_arg_area++;
8050 } else if (float value) {
8051 if (args.fgpr < 2)
8052 ret = args.reg_save_area[args.fpr+64]
8053 else
8054 ret = *args.overflow_arg_area++;
8055 } else if (aggregate value) {
8056 if (args.gpr < 5)
8057 ret = *args.reg_save_area[args.gpr]
8058 else
8059 ret = **args.overflow_arg_area++;
8060 } */
8061
ab96de7e 8062static tree
63694b5e
UW
8063s390_gimplify_va_arg (tree valist, tree type, tree *pre_p,
8064 tree *post_p ATTRIBUTE_UNUSED)
9db1d521
HP
8065{
8066 tree f_gpr, f_fpr, f_ovf, f_sav;
8067 tree gpr, fpr, ovf, sav, reg, t, u;
8068 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
63694b5e 8069 tree lab_false, lab_over, addr;
9db1d521
HP
8070
8071 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
8072 f_fpr = TREE_CHAIN (f_gpr);
8073 f_ovf = TREE_CHAIN (f_fpr);
8074 f_sav = TREE_CHAIN (f_ovf);
8075
967af719 8076 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
8077 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
8078 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
8079 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
8080 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
9db1d521
HP
8081
8082 size = int_size_in_bytes (type);
8083
8cd5a4e0 8084 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
9db1d521
HP
8085 {
8086 if (TARGET_DEBUG_ARG)
8087 {
8088 fprintf (stderr, "va_arg: aggregate type");
8089 debug_tree (type);
8090 }
8091
8092 /* Aggregates are passed by reference. */
8093 indirect_p = 1;
8094 reg = gpr;
8095 n_reg = 1;
ea506297 8096
b3d31392 8097 /* kernel stack layout on 31 bit: It is assumed here that no padding
ea506297
AK
8098 will be added by s390_frame_info because for va_args always an even
8099 number of gprs has to be saved r15-r2 = 14 regs. */
6b78f6be 8100 sav_ofs = 2 * UNITS_PER_WORD;
9db1d521
HP
8101 sav_scale = UNITS_PER_WORD;
8102 size = UNITS_PER_WORD;
29a79fcf 8103 max_reg = GP_ARG_NUM_REG - n_reg;
9db1d521 8104 }
82b1c974 8105 else if (s390_function_arg_float (TYPE_MODE (type), type))
9db1d521
HP
8106 {
8107 if (TARGET_DEBUG_ARG)
8108 {
8109 fprintf (stderr, "va_arg: float type");
8110 debug_tree (type);
8111 }
8112
8113 /* FP args go in FP registers, if present. */
8114 indirect_p = 0;
8115 reg = fpr;
8116 n_reg = 1;
6b78f6be 8117 sav_ofs = 16 * UNITS_PER_WORD;
9db1d521 8118 sav_scale = 8;
29a79fcf 8119 max_reg = FP_ARG_NUM_REG - n_reg;
9db1d521
HP
8120 }
8121 else
8122 {
8123 if (TARGET_DEBUG_ARG)
8124 {
8125 fprintf (stderr, "va_arg: other type");
8126 debug_tree (type);
8127 }
8128
8129 /* Otherwise into GP registers. */
8130 indirect_p = 0;
8131 reg = gpr;
8132 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
ea506297 8133
b3d31392
AK
8134 /* kernel stack layout on 31 bit: It is assumed here that no padding
8135 will be added by s390_frame_info because for va_args always an even
8136 number of gprs has to be saved r15-r2 = 14 regs. */
6b78f6be 8137 sav_ofs = 2 * UNITS_PER_WORD;
c7453384 8138
c873e11f
UW
8139 if (size < UNITS_PER_WORD)
8140 sav_ofs += UNITS_PER_WORD - size;
9db1d521
HP
8141
8142 sav_scale = UNITS_PER_WORD;
29a79fcf 8143 max_reg = GP_ARG_NUM_REG - n_reg;
9db1d521
HP
8144 }
8145
8146 /* Pull the value out of the saved registers ... */
8147
63694b5e
UW
8148 lab_false = create_artificial_label ();
8149 lab_over = create_artificial_label ();
8150 addr = create_tmp_var (ptr_type_node, "addr");
dcc9eb26 8151 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
9db1d521 8152
6c6dd4bd 8153 t = fold_convert (TREE_TYPE (reg), size_int (max_reg));
63694b5e
UW
8154 t = build2 (GT_EXPR, boolean_type_node, reg, t);
8155 u = build1 (GOTO_EXPR, void_type_node, lab_false);
8156 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
8157 gimplify_and_add (t, pre_p);
9db1d521 8158
5be014d5
AP
8159 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav,
8160 size_int (sav_ofs));
6c6dd4bd
UW
8161 u = build2 (MULT_EXPR, TREE_TYPE (reg), reg,
8162 fold_convert (TREE_TYPE (reg), size_int (sav_scale)));
5be014d5 8163 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, fold_convert (sizetype, u));
9db1d521 8164
07beea0d 8165 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
63694b5e 8166 gimplify_and_add (t, pre_p);
9db1d521 8167
63694b5e
UW
8168 t = build1 (GOTO_EXPR, void_type_node, lab_over);
8169 gimplify_and_add (t, pre_p);
9db1d521 8170
63694b5e
UW
8171 t = build1 (LABEL_EXPR, void_type_node, lab_false);
8172 append_to_statement_list (t, pre_p);
9db1d521 8173
9db1d521
HP
8174
8175 /* ... Otherwise out of the overflow area. */
8176
ab96de7e
AS
8177 t = ovf;
8178 if (size < UNITS_PER_WORD)
5be014d5
AP
8179 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
8180 size_int (UNITS_PER_WORD - size));
ab96de7e
AS
8181
8182 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
8183
07beea0d 8184 u = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
ab96de7e
AS
8185 gimplify_and_add (u, pre_p);
8186
5be014d5
AP
8187 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
8188 size_int (size));
07beea0d 8189 t = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, ovf, t);
ab96de7e
AS
8190 gimplify_and_add (t, pre_p);
8191
8192 t = build1 (LABEL_EXPR, void_type_node, lab_over);
8193 append_to_statement_list (t, pre_p);
8194
8195
8196 /* Increment register save count. */
8197
8198 u = build2 (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
8199 fold_convert (TREE_TYPE (reg), size_int (n_reg)));
8200 gimplify_and_add (u, pre_p);
8201
8202 if (indirect_p)
8203 {
8204 t = build_pointer_type (build_pointer_type (type));
8205 addr = fold_convert (t, addr);
8206 addr = build_va_arg_indirect_ref (addr);
8207 }
8208 else
8209 {
8210 t = build_pointer_type (type);
8211 addr = fold_convert (t, addr);
8212 }
8213
8214 return build_va_arg_indirect_ref (addr);
8215}
8216
8217
8218/* Builtins. */
8219
8220enum s390_builtin
8221{
8222 S390_BUILTIN_THREAD_POINTER,
8223 S390_BUILTIN_SET_THREAD_POINTER,
8224
8225 S390_BUILTIN_max
8226};
8227
8228static unsigned int const code_for_builtin_64[S390_BUILTIN_max] = {
8229 CODE_FOR_get_tp_64,
8230 CODE_FOR_set_tp_64
8231};
8232
8233static unsigned int const code_for_builtin_31[S390_BUILTIN_max] = {
8234 CODE_FOR_get_tp_31,
8235 CODE_FOR_set_tp_31
8236};
8237
8238static void
8239s390_init_builtins (void)
8240{
8241 tree ftype;
8242
8243 ftype = build_function_type (ptr_type_node, void_list_node);
c79efc4d
RÁE
8244 add_builtin_function ("__builtin_thread_pointer", ftype,
8245 S390_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
8246 NULL, NULL_TREE);
ab96de7e
AS
8247
8248 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
c79efc4d
RÁE
8249 add_builtin_function ("__builtin_set_thread_pointer", ftype,
8250 S390_BUILTIN_SET_THREAD_POINTER, BUILT_IN_MD,
8251 NULL, NULL_TREE);
ab96de7e
AS
8252}
8253
8254/* Expand an expression EXP that calls a built-in function,
8255 with result going to TARGET if that's convenient
8256 (and in mode MODE if that's convenient).
8257 SUBTARGET may be used as the target for computing one of EXP's operands.
8258 IGNORE is nonzero if the value is to be ignored. */
8259
8260static rtx
8261s390_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
8262 enum machine_mode mode ATTRIBUTE_UNUSED,
8263 int ignore ATTRIBUTE_UNUSED)
8264{
8265#define MAX_ARGS 2
8266
8267 unsigned int const *code_for_builtin =
8268 TARGET_64BIT ? code_for_builtin_64 : code_for_builtin_31;
8269
5039610b 8270 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
ab96de7e 8271 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
ab96de7e
AS
8272 enum insn_code icode;
8273 rtx op[MAX_ARGS], pat;
8274 int arity;
8275 bool nonvoid;
5039610b
SL
8276 tree arg;
8277 call_expr_arg_iterator iter;
ab96de7e
AS
8278
8279 if (fcode >= S390_BUILTIN_max)
8280 internal_error ("bad builtin fcode");
8281 icode = code_for_builtin[fcode];
8282 if (icode == 0)
8283 internal_error ("bad builtin fcode");
8284
8285 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
8286
5039610b
SL
8287 arity = 0;
8288 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
ab96de7e
AS
8289 {
8290 const struct insn_operand_data *insn_op;
8291
ab96de7e
AS
8292 if (arg == error_mark_node)
8293 return NULL_RTX;
8294 if (arity > MAX_ARGS)
8295 return NULL_RTX;
8296
8297 insn_op = &insn_data[icode].operand[arity + nonvoid];
8298
8299 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, 0);
8300
8301 if (!(*insn_op->predicate) (op[arity], insn_op->mode))
8302 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
5039610b 8303 arity++;
ab96de7e
AS
8304 }
8305
8306 if (nonvoid)
8307 {
8308 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8309 if (!target
8310 || GET_MODE (target) != tmode
8311 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
8312 target = gen_reg_rtx (tmode);
8313 }
8314
8315 switch (arity)
8316 {
8317 case 0:
8318 pat = GEN_FCN (icode) (target);
8319 break;
8320 case 1:
8321 if (nonvoid)
8322 pat = GEN_FCN (icode) (target, op[0]);
8323 else
8324 pat = GEN_FCN (icode) (op[0]);
8325 break;
8326 case 2:
8327 pat = GEN_FCN (icode) (target, op[0], op[1]);
8328 break;
8329 default:
8d933e31 8330 gcc_unreachable ();
ab96de7e
AS
8331 }
8332 if (!pat)
8333 return NULL_RTX;
8334 emit_insn (pat);
8335
8336 if (nonvoid)
8337 return target;
8338 else
8339 return const0_rtx;
8340}
8341
8342
8343/* Output assembly code for the trampoline template to
8344 stdio stream FILE.
8345
8346 On S/390, we use gpr 1 internally in the trampoline code;
8347 gpr 0 is used to hold the static chain. */
8348
8349void
8350s390_trampoline_template (FILE *file)
8351{
8352 rtx op[2];
8353 op[0] = gen_rtx_REG (Pmode, 0);
8354 op[1] = gen_rtx_REG (Pmode, 1);
8355
8356 if (TARGET_64BIT)
8357 {
8358 output_asm_insn ("basr\t%1,0", op);
8359 output_asm_insn ("lmg\t%0,%1,14(%1)", op);
8360 output_asm_insn ("br\t%1", op);
8361 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 10));
8362 }
8363 else
8364 {
8365 output_asm_insn ("basr\t%1,0", op);
8366 output_asm_insn ("lm\t%0,%1,6(%1)", op);
8367 output_asm_insn ("br\t%1", op);
8368 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 8));
8369 }
8370}
8371
8372/* Emit RTL insns to initialize the variable parts of a trampoline.
8373 FNADDR is an RTX for the address of the function's pure code.
8374 CXT is an RTX for the static chain value for the function. */
8375
8376void
8377s390_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
8378{
8379 emit_move_insn (gen_rtx_MEM (Pmode,
8380 memory_address (Pmode,
8381 plus_constant (addr, (TARGET_64BIT ? 16 : 8)))), cxt);
8382 emit_move_insn (gen_rtx_MEM (Pmode,
8383 memory_address (Pmode,
8384 plus_constant (addr, (TARGET_64BIT ? 24 : 12)))), fnaddr);
8385}
8386
ab96de7e
AS
8387/* Output assembler code to FILE to increment profiler label # LABELNO
8388 for profiling a function entry. */
8389
8390void
8391s390_function_profiler (FILE *file, int labelno)
8392{
8393 rtx op[7];
8394
8395 char label[128];
8396 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
8397
8398 fprintf (file, "# function profiler \n");
8399
8400 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
8401 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8402 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
8403
8404 op[2] = gen_rtx_REG (Pmode, 1);
8405 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
8406 SYMBOL_REF_FLAGS (op[3]) = SYMBOL_FLAG_LOCAL;
8407
8408 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
8409 if (flag_pic)
8410 {
8411 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), UNSPEC_PLT);
8412 op[4] = gen_rtx_CONST (Pmode, op[4]);
8413 }
8414
8415 if (TARGET_64BIT)
8416 {
8417 output_asm_insn ("stg\t%0,%1", op);
8418 output_asm_insn ("larl\t%2,%3", op);
8419 output_asm_insn ("brasl\t%0,%4", op);
8420 output_asm_insn ("lg\t%0,%1", op);
8421 }
8422 else if (!flag_pic)
8423 {
8424 op[6] = gen_label_rtx ();
8425
8426 output_asm_insn ("st\t%0,%1", op);
8427 output_asm_insn ("bras\t%2,%l6", op);
8428 output_asm_insn (".long\t%4", op);
8429 output_asm_insn (".long\t%3", op);
8430 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
8431 output_asm_insn ("l\t%0,0(%2)", op);
8432 output_asm_insn ("l\t%2,4(%2)", op);
8433 output_asm_insn ("basr\t%0,%0", op);
8434 output_asm_insn ("l\t%0,%1", op);
8435 }
8436 else
8437 {
8438 op[5] = gen_label_rtx ();
8439 op[6] = gen_label_rtx ();
8440
8441 output_asm_insn ("st\t%0,%1", op);
8442 output_asm_insn ("bras\t%2,%l6", op);
8443 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[5]));
8444 output_asm_insn (".long\t%4-%l5", op);
8445 output_asm_insn (".long\t%3-%l5", op);
8446 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
8447 output_asm_insn ("lr\t%0,%2", op);
8448 output_asm_insn ("a\t%0,0(%2)", op);
8449 output_asm_insn ("a\t%2,4(%2)", op);
8450 output_asm_insn ("basr\t%0,%0", op);
8451 output_asm_insn ("l\t%0,%1", op);
8452 }
8453}
8454
8455/* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
8456 into its SYMBOL_REF_FLAGS. */
8457
8458static void
8459s390_encode_section_info (tree decl, rtx rtl, int first)
8460{
8461 default_encode_section_info (decl, rtl, first);
8462
8463 /* If a variable has a forced alignment to < 2 bytes, mark it with
8464 SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL operand. */
8465 if (TREE_CODE (decl) == VAR_DECL
8466 && DECL_USER_ALIGN (decl) && DECL_ALIGN (decl) < 16)
8467 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_ALIGN1;
8468}
8469
8470/* Output thunk to FILE that implements a C++ virtual function call (with
8471 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
8472 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
8473 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
8474 relative to the resulting this pointer. */
8475
8476static void
8477s390_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
8478 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
8479 tree function)
8480{
8481 rtx op[10];
8482 int nonlocal = 0;
8483
8484 /* Operand 0 is the target function. */
8485 op[0] = XEXP (DECL_RTL (function), 0);
8486 if (flag_pic && !SYMBOL_REF_LOCAL_P (op[0]))
8487 {
8488 nonlocal = 1;
8489 op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]),
8490 TARGET_64BIT ? UNSPEC_PLT : UNSPEC_GOT);
8491 op[0] = gen_rtx_CONST (Pmode, op[0]);
8492 }
8493
8494 /* Operand 1 is the 'this' pointer. */
8495 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
8496 op[1] = gen_rtx_REG (Pmode, 3);
8497 else
8498 op[1] = gen_rtx_REG (Pmode, 2);
8499
8500 /* Operand 2 is the delta. */
8501 op[2] = GEN_INT (delta);
8502
8503 /* Operand 3 is the vcall_offset. */
8504 op[3] = GEN_INT (vcall_offset);
8505
8506 /* Operand 4 is the temporary register. */
8507 op[4] = gen_rtx_REG (Pmode, 1);
8508
8509 /* Operands 5 to 8 can be used as labels. */
8510 op[5] = NULL_RTX;
8511 op[6] = NULL_RTX;
8512 op[7] = NULL_RTX;
8513 op[8] = NULL_RTX;
8514
8515 /* Operand 9 can be used for temporary register. */
8516 op[9] = NULL_RTX;
8517
8518 /* Generate code. */
8519 if (TARGET_64BIT)
8520 {
8521 /* Setup literal pool pointer if required. */
8522 if ((!DISP_IN_RANGE (delta)
ec24698e
UW
8523 && !CONST_OK_FOR_K (delta)
8524 && !CONST_OK_FOR_Os (delta))
ab96de7e 8525 || (!DISP_IN_RANGE (vcall_offset)
ec24698e
UW
8526 && !CONST_OK_FOR_K (vcall_offset)
8527 && !CONST_OK_FOR_Os (vcall_offset)))
ab96de7e
AS
8528 {
8529 op[5] = gen_label_rtx ();
8530 output_asm_insn ("larl\t%4,%5", op);
8531 }
8532
8533 /* Add DELTA to this pointer. */
8534 if (delta)
8535 {
b5c67a49 8536 if (CONST_OK_FOR_J (delta))
ab96de7e
AS
8537 output_asm_insn ("la\t%1,%2(%1)", op);
8538 else if (DISP_IN_RANGE (delta))
8539 output_asm_insn ("lay\t%1,%2(%1)", op);
b5c67a49 8540 else if (CONST_OK_FOR_K (delta))
ab96de7e 8541 output_asm_insn ("aghi\t%1,%2", op);
ec24698e
UW
8542 else if (CONST_OK_FOR_Os (delta))
8543 output_asm_insn ("agfi\t%1,%2", op);
ab96de7e
AS
8544 else
8545 {
8546 op[6] = gen_label_rtx ();
8547 output_asm_insn ("agf\t%1,%6-%5(%4)", op);
8548 }
8549 }
8550
8551 /* Perform vcall adjustment. */
8552 if (vcall_offset)
8553 {
8554 if (DISP_IN_RANGE (vcall_offset))
8555 {
8556 output_asm_insn ("lg\t%4,0(%1)", op);
8557 output_asm_insn ("ag\t%1,%3(%4)", op);
8558 }
b5c67a49 8559 else if (CONST_OK_FOR_K (vcall_offset))
ab96de7e
AS
8560 {
8561 output_asm_insn ("lghi\t%4,%3", op);
8562 output_asm_insn ("ag\t%4,0(%1)", op);
8563 output_asm_insn ("ag\t%1,0(%4)", op);
8564 }
ec24698e
UW
8565 else if (CONST_OK_FOR_Os (vcall_offset))
8566 {
8567 output_asm_insn ("lgfi\t%4,%3", op);
8568 output_asm_insn ("ag\t%4,0(%1)", op);
8569 output_asm_insn ("ag\t%1,0(%4)", op);
8570 }
ab96de7e
AS
8571 else
8572 {
8573 op[7] = gen_label_rtx ();
8574 output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
8575 output_asm_insn ("ag\t%4,0(%1)", op);
8576 output_asm_insn ("ag\t%1,0(%4)", op);
8577 }
8578 }
8579
8580 /* Jump to target. */
8581 output_asm_insn ("jg\t%0", op);
8582
8583 /* Output literal pool if required. */
8584 if (op[5])
8585 {
8586 output_asm_insn (".align\t4", op);
8587 targetm.asm_out.internal_label (file, "L",
8588 CODE_LABEL_NUMBER (op[5]));
8589 }
8590 if (op[6])
8591 {
8592 targetm.asm_out.internal_label (file, "L",
8593 CODE_LABEL_NUMBER (op[6]));
8594 output_asm_insn (".long\t%2", op);
8595 }
8596 if (op[7])
8597 {
8598 targetm.asm_out.internal_label (file, "L",
8599 CODE_LABEL_NUMBER (op[7]));
8600 output_asm_insn (".long\t%3", op);
8601 }
8602 }
8603 else
8604 {
8605 /* Setup base pointer if required. */
8606 if (!vcall_offset
8607 || (!DISP_IN_RANGE (delta)
ec24698e
UW
8608 && !CONST_OK_FOR_K (delta)
8609 && !CONST_OK_FOR_Os (delta))
ab96de7e 8610 || (!DISP_IN_RANGE (delta)
ec24698e
UW
8611 && !CONST_OK_FOR_K (vcall_offset)
8612 && !CONST_OK_FOR_Os (vcall_offset)))
ab96de7e
AS
8613 {
8614 op[5] = gen_label_rtx ();
8615 output_asm_insn ("basr\t%4,0", op);
8616 targetm.asm_out.internal_label (file, "L",
8617 CODE_LABEL_NUMBER (op[5]));
8618 }
8619
8620 /* Add DELTA to this pointer. */
8621 if (delta)
8622 {
b5c67a49 8623 if (CONST_OK_FOR_J (delta))
ab96de7e
AS
8624 output_asm_insn ("la\t%1,%2(%1)", op);
8625 else if (DISP_IN_RANGE (delta))
8626 output_asm_insn ("lay\t%1,%2(%1)", op);
b5c67a49 8627 else if (CONST_OK_FOR_K (delta))
ab96de7e 8628 output_asm_insn ("ahi\t%1,%2", op);
ec24698e
UW
8629 else if (CONST_OK_FOR_Os (delta))
8630 output_asm_insn ("afi\t%1,%2", op);
ab96de7e
AS
8631 else
8632 {
8633 op[6] = gen_label_rtx ();
8634 output_asm_insn ("a\t%1,%6-%5(%4)", op);
8635 }
8636 }
8637
8638 /* Perform vcall adjustment. */
8639 if (vcall_offset)
8640 {
b5c67a49 8641 if (CONST_OK_FOR_J (vcall_offset))
ab96de7e 8642 {
c4d50129 8643 output_asm_insn ("l\t%4,0(%1)", op);
ab96de7e
AS
8644 output_asm_insn ("a\t%1,%3(%4)", op);
8645 }
8646 else if (DISP_IN_RANGE (vcall_offset))
8647 {
c4d50129 8648 output_asm_insn ("l\t%4,0(%1)", op);
ab96de7e
AS
8649 output_asm_insn ("ay\t%1,%3(%4)", op);
8650 }
b5c67a49 8651 else if (CONST_OK_FOR_K (vcall_offset))
ab96de7e
AS
8652 {
8653 output_asm_insn ("lhi\t%4,%3", op);
8654 output_asm_insn ("a\t%4,0(%1)", op);
8655 output_asm_insn ("a\t%1,0(%4)", op);
8656 }
ec24698e
UW
8657 else if (CONST_OK_FOR_Os (vcall_offset))
8658 {
8659 output_asm_insn ("iilf\t%4,%3", op);
8660 output_asm_insn ("a\t%4,0(%1)", op);
8661 output_asm_insn ("a\t%1,0(%4)", op);
8662 }
ab96de7e
AS
8663 else
8664 {
8665 op[7] = gen_label_rtx ();
8666 output_asm_insn ("l\t%4,%7-%5(%4)", op);
8667 output_asm_insn ("a\t%4,0(%1)", op);
8668 output_asm_insn ("a\t%1,0(%4)", op);
8669 }
9db1d521 8670
ab96de7e
AS
8671 /* We had to clobber the base pointer register.
8672 Re-setup the base pointer (with a different base). */
8673 op[5] = gen_label_rtx ();
8674 output_asm_insn ("basr\t%4,0", op);
8675 targetm.asm_out.internal_label (file, "L",
8676 CODE_LABEL_NUMBER (op[5]));
8677 }
9db1d521 8678
ab96de7e
AS
8679 /* Jump to target. */
8680 op[8] = gen_label_rtx ();
9db1d521 8681
ab96de7e
AS
8682 if (!flag_pic)
8683 output_asm_insn ("l\t%4,%8-%5(%4)", op);
8684 else if (!nonlocal)
8685 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8686 /* We cannot call through .plt, since .plt requires %r12 loaded. */
8687 else if (flag_pic == 1)
8688 {
8689 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8690 output_asm_insn ("l\t%4,%0(%4)", op);
8691 }
8692 else if (flag_pic == 2)
8693 {
8694 op[9] = gen_rtx_REG (Pmode, 0);
8695 output_asm_insn ("l\t%9,%8-4-%5(%4)", op);
8696 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8697 output_asm_insn ("ar\t%4,%9", op);
8698 output_asm_insn ("l\t%4,0(%4)", op);
8699 }
9db1d521 8700
ab96de7e 8701 output_asm_insn ("br\t%4", op);
9db1d521 8702
ab96de7e
AS
8703 /* Output literal pool. */
8704 output_asm_insn (".align\t4", op);
9db1d521 8705
ab96de7e
AS
8706 if (nonlocal && flag_pic == 2)
8707 output_asm_insn (".long\t%0", op);
8708 if (nonlocal)
8709 {
8710 op[0] = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
8711 SYMBOL_REF_FLAGS (op[0]) = SYMBOL_FLAG_LOCAL;
8712 }
63694b5e 8713
ab96de7e
AS
8714 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[8]));
8715 if (!flag_pic)
8716 output_asm_insn (".long\t%0", op);
8717 else
8718 output_asm_insn (".long\t%0-%5", op);
9db1d521 8719
ab96de7e
AS
8720 if (op[6])
8721 {
8722 targetm.asm_out.internal_label (file, "L",
8723 CODE_LABEL_NUMBER (op[6]));
8724 output_asm_insn (".long\t%2", op);
8725 }
8726 if (op[7])
8727 {
8728 targetm.asm_out.internal_label (file, "L",
8729 CODE_LABEL_NUMBER (op[7]));
8730 output_asm_insn (".long\t%3", op);
8731 }
9db1d521 8732 }
9db1d521
HP
8733}
8734
ab96de7e
AS
8735static bool
8736s390_valid_pointer_mode (enum machine_mode mode)
8737{
8738 return (mode == SImode || (TARGET_64BIT && mode == DImode));
8739}
994fe660 8740
9a6f71b4 8741/* Checks whether the given CALL_EXPR would use a caller
ab96de7e
AS
8742 saved register. This is used to decide whether sibling call
8743 optimization could be performed on the respective function
8744 call. */
fd3cd001 8745
ab96de7e 8746static bool
9a6f71b4 8747s390_call_saved_register_used (tree call_expr)
fd3cd001 8748{
ab96de7e
AS
8749 CUMULATIVE_ARGS cum;
8750 tree parameter;
8751 enum machine_mode mode;
8752 tree type;
8753 rtx parm_rtx;
9a6f71b4 8754 int reg, i;
fd3cd001 8755
ab96de7e 8756 INIT_CUMULATIVE_ARGS (cum, NULL, NULL, 0, 0);
fd3cd001 8757
9a6f71b4 8758 for (i = 0; i < call_expr_nargs (call_expr); i++)
ab96de7e 8759 {
9a6f71b4 8760 parameter = CALL_EXPR_ARG (call_expr, i);
8d933e31 8761 gcc_assert (parameter);
fd3cd001 8762
ab96de7e
AS
8763 /* For an undeclared variable passed as parameter we will get
8764 an ERROR_MARK node here. */
8765 if (TREE_CODE (parameter) == ERROR_MARK)
8766 return true;
fd3cd001 8767
8d933e31
AS
8768 type = TREE_TYPE (parameter);
8769 gcc_assert (type);
fd3cd001 8770
8d933e31
AS
8771 mode = TYPE_MODE (type);
8772 gcc_assert (mode);
fd3cd001 8773
ab96de7e
AS
8774 if (pass_by_reference (&cum, mode, type, true))
8775 {
8776 mode = Pmode;
8777 type = build_pointer_type (type);
8778 }
fd3cd001 8779
ab96de7e 8780 parm_rtx = s390_function_arg (&cum, mode, type, 0);
fd3cd001 8781
ab96de7e 8782 s390_function_arg_advance (&cum, mode, type, 0);
fd3cd001 8783
ab96de7e
AS
8784 if (parm_rtx && REG_P (parm_rtx))
8785 {
8786 for (reg = 0;
8787 reg < HARD_REGNO_NREGS (REGNO (parm_rtx), GET_MODE (parm_rtx));
8788 reg++)
8789 if (! call_used_regs[reg + REGNO (parm_rtx)])
8790 return true;
8791 }
8792 }
8793 return false;
8794}
fd3cd001 8795
ab96de7e
AS
8796/* Return true if the given call expression can be
8797 turned into a sibling call.
8798 DECL holds the declaration of the function to be called whereas
8799 EXP is the call expression itself. */
fd3cd001 8800
ab96de7e
AS
8801static bool
8802s390_function_ok_for_sibcall (tree decl, tree exp)
8803{
8804 /* The TPF epilogue uses register 1. */
8805 if (TARGET_TPF_PROFILING)
8806 return false;
fd3cd001 8807
ab96de7e
AS
8808 /* The 31 bit PLT code uses register 12 (GOT pointer - caller saved)
8809 which would have to be restored before the sibcall. */
7691ec4e 8810 if (!TARGET_64BIT && flag_pic && decl && !targetm.binds_local_p (decl))
ab96de7e 8811 return false;
fd3cd001 8812
ab96de7e
AS
8813 /* Register 6 on s390 is available as an argument register but unfortunately
8814 "caller saved". This makes functions needing this register for arguments
8815 not suitable for sibcalls. */
9a6f71b4 8816 return !s390_call_saved_register_used (exp);
ab96de7e 8817}
fd3cd001 8818
ab96de7e 8819/* Return the fixed registers used for condition codes. */
fd3cd001 8820
ab96de7e
AS
8821static bool
8822s390_fixed_condition_code_regs (unsigned int *p1, unsigned int *p2)
8823{
8824 *p1 = CC_REGNUM;
8825 *p2 = INVALID_REGNUM;
8826
8827 return true;
8828}
fd3cd001 8829
ab96de7e
AS
8830/* This function is used by the call expanders of the machine description.
8831 It emits the call insn itself together with the necessary operations
8832 to adjust the target address and returns the emitted insn.
8833 ADDR_LOCATION is the target address rtx
8834 TLS_CALL the location of the thread-local symbol
8835 RESULT_REG the register where the result of the call should be stored
8836 RETADDR_REG the register where the return address should be stored
8837 If this parameter is NULL_RTX the call is considered
8838 to be a sibling call. */
fd3cd001 8839
ab96de7e
AS
8840rtx
8841s390_emit_call (rtx addr_location, rtx tls_call, rtx result_reg,
8842 rtx retaddr_reg)
9db1d521 8843{
ab96de7e
AS
8844 bool plt_call = false;
8845 rtx insn;
8846 rtx call;
8847 rtx clobber;
8848 rtvec vec;
cadc42db 8849
ab96de7e
AS
8850 /* Direct function calls need special treatment. */
8851 if (GET_CODE (addr_location) == SYMBOL_REF)
9db1d521 8852 {
ab96de7e
AS
8853 /* When calling a global routine in PIC mode, we must
8854 replace the symbol itself with the PLT stub. */
8855 if (flag_pic && !SYMBOL_REF_LOCAL_P (addr_location))
8856 {
8857 addr_location = gen_rtx_UNSPEC (Pmode,
8858 gen_rtvec (1, addr_location),
8859 UNSPEC_PLT);
8860 addr_location = gen_rtx_CONST (Pmode, addr_location);
8861 plt_call = true;
8862 }
8863
8864 /* Unless we can use the bras(l) insn, force the
8865 routine address into a register. */
8866 if (!TARGET_SMALL_EXEC && !TARGET_CPU_ZARCH)
8867 {
8868 if (flag_pic)
8869 addr_location = legitimize_pic_address (addr_location, 0);
8870 else
8871 addr_location = force_reg (Pmode, addr_location);
8872 }
9db1d521 8873 }
ab96de7e
AS
8874
8875 /* If it is already an indirect call or the code above moved the
8876 SYMBOL_REF to somewhere else make sure the address can be found in
8877 register 1. */
8878 if (retaddr_reg == NULL_RTX
8879 && GET_CODE (addr_location) != SYMBOL_REF
8880 && !plt_call)
9db1d521 8881 {
ab96de7e
AS
8882 emit_move_insn (gen_rtx_REG (Pmode, SIBCALL_REGNUM), addr_location);
8883 addr_location = gen_rtx_REG (Pmode, SIBCALL_REGNUM);
9db1d521 8884 }
9db1d521 8885
ab96de7e
AS
8886 addr_location = gen_rtx_MEM (QImode, addr_location);
8887 call = gen_rtx_CALL (VOIDmode, addr_location, const0_rtx);
4023fb28 8888
ab96de7e
AS
8889 if (result_reg != NULL_RTX)
8890 call = gen_rtx_SET (VOIDmode, result_reg, call);
4023fb28 8891
ab96de7e
AS
8892 if (retaddr_reg != NULL_RTX)
8893 {
8894 clobber = gen_rtx_CLOBBER (VOIDmode, retaddr_reg);
c7453384 8895
ab96de7e
AS
8896 if (tls_call != NULL_RTX)
8897 vec = gen_rtvec (3, call, clobber,
8898 gen_rtx_USE (VOIDmode, tls_call));
8899 else
8900 vec = gen_rtvec (2, call, clobber);
4023fb28 8901
ab96de7e
AS
8902 call = gen_rtx_PARALLEL (VOIDmode, vec);
8903 }
4023fb28 8904
ab96de7e 8905 insn = emit_call_insn (call);
4023fb28 8906
ab96de7e
AS
8907 /* 31-bit PLT stubs and tls calls use the GOT register implicitly. */
8908 if ((!TARGET_64BIT && plt_call) || tls_call != NULL_RTX)
8909 {
8910 /* s390_function_ok_for_sibcall should
8911 have denied sibcalls in this case. */
8d933e31 8912 gcc_assert (retaddr_reg != NULL_RTX);
4023fb28 8913
ab96de7e
AS
8914 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
8915 }
8916 return insn;
8917}
4023fb28 8918
ab96de7e 8919/* Implement CONDITIONAL_REGISTER_USAGE. */
4023fb28 8920
ab96de7e
AS
8921void
8922s390_conditional_register_usage (void)
8923{
8924 int i;
4023fb28 8925
4023fb28
UW
8926 if (flag_pic)
8927 {
ab96de7e
AS
8928 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
8929 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
4023fb28 8930 }
ab96de7e 8931 if (TARGET_CPU_ZARCH)
4023fb28 8932 {
7633f08e
UW
8933 fixed_regs[BASE_REGNUM] = 0;
8934 call_used_regs[BASE_REGNUM] = 0;
ab96de7e
AS
8935 fixed_regs[RETURN_REGNUM] = 0;
8936 call_used_regs[RETURN_REGNUM] = 0;
4023fb28 8937 }
ab96de7e 8938 if (TARGET_64BIT)
4023fb28 8939 {
ab96de7e
AS
8940 for (i = 24; i < 32; i++)
8941 call_used_regs[i] = call_really_used_regs[i] = 0;
4023fb28
UW
8942 }
8943 else
8944 {
ab96de7e
AS
8945 for (i = 18; i < 20; i++)
8946 call_used_regs[i] = call_really_used_regs[i] = 0;
8947 }
4023fb28 8948
ab96de7e
AS
8949 if (TARGET_SOFT_FLOAT)
8950 {
8951 for (i = 16; i < 32; i++)
8952 call_used_regs[i] = fixed_regs[i] = 1;
4023fb28
UW
8953 }
8954}
8955
ab96de7e 8956/* Corresponding function to eh_return expander. */
fb49053f 8957
ab96de7e
AS
8958static GTY(()) rtx s390_tpf_eh_return_symbol;
8959void
8960s390_emit_tpf_eh_return (rtx target)
fb49053f 8961{
ab96de7e 8962 rtx insn, reg;
e23795ea 8963
ab96de7e
AS
8964 if (!s390_tpf_eh_return_symbol)
8965 s390_tpf_eh_return_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tpf_eh_return");
8966
8967 reg = gen_rtx_REG (Pmode, 2);
8968
8969 emit_move_insn (reg, target);
8970 insn = s390_emit_call (s390_tpf_eh_return_symbol, NULL_RTX, reg,
8971 gen_rtx_REG (Pmode, RETURN_REGNUM));
8972 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), reg);
8973
8974 emit_move_insn (EH_RETURN_HANDLER_RTX, reg);
fd3cd001
UW
8975}
8976
ab96de7e
AS
8977/* Rework the prologue/epilogue to avoid saving/restoring
8978 registers unnecessarily. */
3062825f 8979
c590b625 8980static void
ab96de7e 8981s390_optimize_prologue (void)
483ab821 8982{
ab96de7e 8983 rtx insn, new_insn, next_insn;
3062825f 8984
ab96de7e 8985 /* Do a final recompute of the frame-related data. */
3062825f 8986
ab96de7e 8987 s390_update_frame_layout ();
3062825f 8988
ab96de7e
AS
8989 /* If all special registers are in fact used, there's nothing we
8990 can do, so no point in walking the insn list. */
3062825f 8991
ab96de7e
AS
8992 if (cfun_frame_layout.first_save_gpr <= BASE_REGNUM
8993 && cfun_frame_layout.last_save_gpr >= BASE_REGNUM
8994 && (TARGET_CPU_ZARCH
8995 || (cfun_frame_layout.first_save_gpr <= RETURN_REGNUM
8996 && cfun_frame_layout.last_save_gpr >= RETURN_REGNUM)))
8997 return;
3062825f 8998
ab96de7e 8999 /* Search for prologue/epilogue insns and replace them. */
3062825f 9000
ab96de7e
AS
9001 for (insn = get_insns (); insn; insn = next_insn)
9002 {
9003 int first, last, off;
9004 rtx set, base, offset;
3062825f 9005
ab96de7e 9006 next_insn = NEXT_INSN (insn);
89ce1c8f 9007
ab96de7e
AS
9008 if (GET_CODE (insn) != INSN)
9009 continue;
3062825f 9010
ab96de7e
AS
9011 if (GET_CODE (PATTERN (insn)) == PARALLEL
9012 && store_multiple_operation (PATTERN (insn), VOIDmode))
3062825f 9013 {
ab96de7e
AS
9014 set = XVECEXP (PATTERN (insn), 0, 0);
9015 first = REGNO (SET_SRC (set));
9016 last = first + XVECLEN (PATTERN (insn), 0) - 1;
9017 offset = const0_rtx;
9018 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
9019 off = INTVAL (offset);
3062825f 9020
ab96de7e
AS
9021 if (GET_CODE (base) != REG || off < 0)
9022 continue;
22a707a4
AK
9023 if (cfun_frame_layout.first_save_gpr != -1
9024 && (cfun_frame_layout.first_save_gpr < first
9025 || cfun_frame_layout.last_save_gpr > last))
9026 continue;
ab96de7e
AS
9027 if (REGNO (base) != STACK_POINTER_REGNUM
9028 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9029 continue;
9030 if (first > BASE_REGNUM || last < BASE_REGNUM)
9031 continue;
9032
9033 if (cfun_frame_layout.first_save_gpr != -1)
3062825f 9034 {
ab96de7e
AS
9035 new_insn = save_gprs (base,
9036 off + (cfun_frame_layout.first_save_gpr
9037 - first) * UNITS_PER_WORD,
9038 cfun_frame_layout.first_save_gpr,
9039 cfun_frame_layout.last_save_gpr);
9040 new_insn = emit_insn_before (new_insn, insn);
9041 INSN_ADDRESSES_NEW (new_insn, -1);
3062825f 9042 }
3062825f 9043
ab96de7e
AS
9044 remove_insn (insn);
9045 continue;
3062825f
UW
9046 }
9047
22a707a4
AK
9048 if (cfun_frame_layout.first_save_gpr == -1
9049 && GET_CODE (PATTERN (insn)) == SET
ab96de7e
AS
9050 && GET_CODE (SET_SRC (PATTERN (insn))) == REG
9051 && (REGNO (SET_SRC (PATTERN (insn))) == BASE_REGNUM
9052 || (!TARGET_CPU_ZARCH
9053 && REGNO (SET_SRC (PATTERN (insn))) == RETURN_REGNUM))
9054 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
3062825f 9055 {
ab96de7e
AS
9056 set = PATTERN (insn);
9057 first = REGNO (SET_SRC (set));
9058 offset = const0_rtx;
9059 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
9060 off = INTVAL (offset);
3062825f 9061
ab96de7e
AS
9062 if (GET_CODE (base) != REG || off < 0)
9063 continue;
9064 if (REGNO (base) != STACK_POINTER_REGNUM
9065 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9066 continue;
3062825f 9067
ab96de7e
AS
9068 remove_insn (insn);
9069 continue;
3062825f
UW
9070 }
9071
ab96de7e
AS
9072 if (GET_CODE (PATTERN (insn)) == PARALLEL
9073 && load_multiple_operation (PATTERN (insn), VOIDmode))
89ce1c8f 9074 {
ab96de7e
AS
9075 set = XVECEXP (PATTERN (insn), 0, 0);
9076 first = REGNO (SET_DEST (set));
9077 last = first + XVECLEN (PATTERN (insn), 0) - 1;
9078 offset = const0_rtx;
9079 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
9080 off = INTVAL (offset);
89ce1c8f 9081
ab96de7e
AS
9082 if (GET_CODE (base) != REG || off < 0)
9083 continue;
22a707a4
AK
9084 if (cfun_frame_layout.first_restore_gpr != -1
9085 && (cfun_frame_layout.first_restore_gpr < first
9086 || cfun_frame_layout.last_restore_gpr > last))
9087 continue;
ab96de7e
AS
9088 if (REGNO (base) != STACK_POINTER_REGNUM
9089 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9090 continue;
9091 if (first > BASE_REGNUM || last < BASE_REGNUM)
9092 continue;
3062825f 9093
ab96de7e
AS
9094 if (cfun_frame_layout.first_restore_gpr != -1)
9095 {
9096 new_insn = restore_gprs (base,
9097 off + (cfun_frame_layout.first_restore_gpr
9098 - first) * UNITS_PER_WORD,
9099 cfun_frame_layout.first_restore_gpr,
9100 cfun_frame_layout.last_restore_gpr);
9101 new_insn = emit_insn_before (new_insn, insn);
9102 INSN_ADDRESSES_NEW (new_insn, -1);
9103 }
89ce1c8f 9104
ab96de7e
AS
9105 remove_insn (insn);
9106 continue;
89ce1c8f
JJ
9107 }
9108
22a707a4
AK
9109 if (cfun_frame_layout.first_restore_gpr == -1
9110 && GET_CODE (PATTERN (insn)) == SET
ab96de7e
AS
9111 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
9112 && (REGNO (SET_DEST (PATTERN (insn))) == BASE_REGNUM
9113 || (!TARGET_CPU_ZARCH
9114 && REGNO (SET_DEST (PATTERN (insn))) == RETURN_REGNUM))
9115 && GET_CODE (SET_SRC (PATTERN (insn))) == MEM)
3062825f 9116 {
ab96de7e
AS
9117 set = PATTERN (insn);
9118 first = REGNO (SET_DEST (set));
9119 offset = const0_rtx;
9120 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
9121 off = INTVAL (offset);
c7453384 9122
ab96de7e
AS
9123 if (GET_CODE (base) != REG || off < 0)
9124 continue;
9125 if (REGNO (base) != STACK_POINTER_REGNUM
9126 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9127 continue;
29742ba4 9128
ab96de7e
AS
9129 remove_insn (insn);
9130 continue;
9131 }
9132 }
29742ba4
HP
9133}
9134
ab96de7e 9135/* Perform machine-dependent processing. */
ed9676cf 9136
ab96de7e
AS
9137static void
9138s390_reorg (void)
ed9676cf 9139{
ab96de7e 9140 bool pool_overflow = false;
ed9676cf 9141
ab96de7e
AS
9142 /* Make sure all splits have been performed; splits after
9143 machine_dependent_reorg might confuse insn length counts. */
9144 split_all_insns_noflow ();
38899e29 9145
3cd045d1
UW
9146 /* From here on decomposed literal pool addresses must be accepted. */
9147 cfun->machine->decomposed_literal_pool_addresses_ok_p = true;
ed9676cf 9148
ab96de7e
AS
9149 /* Install the main literal pool and the associated base
9150 register load insns.
38899e29 9151
ab96de7e
AS
9152 In addition, there are two problematic situations we need
9153 to correct:
ed9676cf 9154
ab96de7e
AS
9155 - the literal pool might be > 4096 bytes in size, so that
9156 some of its elements cannot be directly accessed
ed9676cf 9157
ab96de7e
AS
9158 - a branch target might be > 64K away from the branch, so that
9159 it is not possible to use a PC-relative instruction.
ed9676cf 9160
ab96de7e
AS
9161 To fix those, we split the single literal pool into multiple
9162 pool chunks, reloading the pool base register at various
9163 points throughout the function to ensure it always points to
9164 the pool chunk the following code expects, and / or replace
9165 PC-relative branches by absolute branches.
ed9676cf 9166
ab96de7e
AS
9167 However, the two problems are interdependent: splitting the
9168 literal pool can move a branch further away from its target,
9169 causing the 64K limit to overflow, and on the other hand,
9170 replacing a PC-relative branch by an absolute branch means
9171 we need to put the branch target address into the literal
9172 pool, possibly causing it to overflow.
ffdda752 9173
ab96de7e
AS
9174 So, we loop trying to fix up both problems until we manage
9175 to satisfy both conditions at the same time. Note that the
9176 loop is guaranteed to terminate as every pass of the loop
9177 strictly decreases the total number of PC-relative branches
9178 in the function. (This is not completely true as there
9179 might be branch-over-pool insns introduced by chunkify_start.
9180 Those never need to be split however.) */
ffdda752 9181
ab96de7e
AS
9182 for (;;)
9183 {
9184 struct constant_pool *pool = NULL;
a628ab6d 9185
ab96de7e
AS
9186 /* Collect the literal pool. */
9187 if (!pool_overflow)
9188 {
9189 pool = s390_mainpool_start ();
9190 if (!pool)
9191 pool_overflow = true;
9192 }
a628ab6d 9193
ab96de7e
AS
9194 /* If literal pool overflowed, start to chunkify it. */
9195 if (pool_overflow)
9196 pool = s390_chunkify_start ();
a628ab6d 9197
ab96de7e
AS
9198 /* Split out-of-range branches. If this has created new
9199 literal pool entries, cancel current chunk list and
9200 recompute it. zSeries machines have large branch
9201 instructions, so we never need to split a branch. */
9202 if (!TARGET_CPU_ZARCH && s390_split_branches ())
9203 {
9204 if (pool_overflow)
9205 s390_chunkify_cancel (pool);
9206 else
9207 s390_mainpool_cancel (pool);
a628ab6d 9208
ab96de7e
AS
9209 continue;
9210 }
9211
9212 /* If we made it up to here, both conditions are satisfied.
9213 Finish up literal pool related changes. */
9214 if (pool_overflow)
9215 s390_chunkify_finish (pool);
9216 else
9217 s390_mainpool_finish (pool);
9218
9219 /* We're done splitting branches. */
9220 cfun->machine->split_branches_pending_p = false;
9221 break;
a628ab6d 9222 }
a628ab6d 9223
d24959df
UW
9224 /* Generate out-of-pool execute target insns. */
9225 if (TARGET_CPU_ZARCH)
9226 {
9227 rtx insn, label, target;
9228
9229 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9230 {
9231 label = s390_execute_label (insn);
9232 if (!label)
9233 continue;
9234
9235 gcc_assert (label != const0_rtx);
9236
9237 target = emit_label (XEXP (label, 0));
9238 INSN_ADDRESSES_NEW (target, -1);
9239
9240 target = emit_insn (s390_execute_target (insn));
9241 INSN_ADDRESSES_NEW (target, -1);
9242 }
9243 }
9244
9245 /* Try to optimize prologue and epilogue further. */
ab96de7e
AS
9246 s390_optimize_prologue ();
9247}
ed9676cf 9248
ed9676cf 9249
ab96de7e 9250/* Initialize GCC target structure. */
38899e29 9251
ab96de7e
AS
9252#undef TARGET_ASM_ALIGNED_HI_OP
9253#define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
9254#undef TARGET_ASM_ALIGNED_DI_OP
9255#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
9256#undef TARGET_ASM_INTEGER
9257#define TARGET_ASM_INTEGER s390_assemble_integer
ed9676cf 9258
ab96de7e
AS
9259#undef TARGET_ASM_OPEN_PAREN
9260#define TARGET_ASM_OPEN_PAREN ""
38899e29 9261
ab96de7e
AS
9262#undef TARGET_ASM_CLOSE_PAREN
9263#define TARGET_ASM_CLOSE_PAREN ""
ed9676cf 9264
ab96de7e
AS
9265#undef TARGET_DEFAULT_TARGET_FLAGS
9266#define TARGET_DEFAULT_TARGET_FLAGS (TARGET_DEFAULT | MASK_FUSED_MADD)
9267#undef TARGET_HANDLE_OPTION
9268#define TARGET_HANDLE_OPTION s390_handle_option
38899e29 9269
ab96de7e
AS
9270#undef TARGET_ENCODE_SECTION_INFO
9271#define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
ed9676cf 9272
ab96de7e
AS
9273#ifdef HAVE_AS_TLS
9274#undef TARGET_HAVE_TLS
9275#define TARGET_HAVE_TLS true
9276#endif
9277#undef TARGET_CANNOT_FORCE_CONST_MEM
9278#define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
ed9676cf 9279
ab96de7e
AS
9280#undef TARGET_DELEGITIMIZE_ADDRESS
9281#define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
ed9676cf 9282
ab96de7e
AS
9283#undef TARGET_RETURN_IN_MEMORY
9284#define TARGET_RETURN_IN_MEMORY s390_return_in_memory
38899e29 9285
ab96de7e
AS
9286#undef TARGET_INIT_BUILTINS
9287#define TARGET_INIT_BUILTINS s390_init_builtins
9288#undef TARGET_EXPAND_BUILTIN
9289#define TARGET_EXPAND_BUILTIN s390_expand_builtin
38899e29 9290
ab96de7e
AS
9291#undef TARGET_ASM_OUTPUT_MI_THUNK
9292#define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
9293#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
9294#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
ed9676cf 9295
ab96de7e
AS
9296#undef TARGET_SCHED_ADJUST_PRIORITY
9297#define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
9298#undef TARGET_SCHED_ISSUE_RATE
9299#define TARGET_SCHED_ISSUE_RATE s390_issue_rate
9300#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
9301#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
38899e29 9302
ab96de7e
AS
9303#undef TARGET_CANNOT_COPY_INSN_P
9304#define TARGET_CANNOT_COPY_INSN_P s390_cannot_copy_insn_p
9305#undef TARGET_RTX_COSTS
9306#define TARGET_RTX_COSTS s390_rtx_costs
9307#undef TARGET_ADDRESS_COST
9308#define TARGET_ADDRESS_COST s390_address_cost
38899e29 9309
ab96de7e
AS
9310#undef TARGET_MACHINE_DEPENDENT_REORG
9311#define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
8a512b77 9312
ab96de7e
AS
9313#undef TARGET_VALID_POINTER_MODE
9314#define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
38899e29 9315
ab96de7e
AS
9316#undef TARGET_BUILD_BUILTIN_VA_LIST
9317#define TARGET_BUILD_BUILTIN_VA_LIST s390_build_builtin_va_list
9318#undef TARGET_GIMPLIFY_VA_ARG_EXPR
9319#define TARGET_GIMPLIFY_VA_ARG_EXPR s390_gimplify_va_arg
4798630c 9320
ab96de7e
AS
9321#undef TARGET_PROMOTE_FUNCTION_ARGS
9322#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
9323#undef TARGET_PROMOTE_FUNCTION_RETURN
9324#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
9325#undef TARGET_PASS_BY_REFERENCE
9326#define TARGET_PASS_BY_REFERENCE s390_pass_by_reference
4798630c 9327
ab96de7e
AS
9328#undef TARGET_FUNCTION_OK_FOR_SIBCALL
9329#define TARGET_FUNCTION_OK_FOR_SIBCALL s390_function_ok_for_sibcall
4798630c 9330
ab96de7e
AS
9331#undef TARGET_FIXED_CONDITION_CODE_REGS
9332#define TARGET_FIXED_CONDITION_CODE_REGS s390_fixed_condition_code_regs
4798630c 9333
ab96de7e
AS
9334#undef TARGET_CC_MODES_COMPATIBLE
9335#define TARGET_CC_MODES_COMPATIBLE s390_cc_modes_compatible
4798630c 9336
e7e64a25
AS
9337#undef TARGET_INVALID_WITHIN_DOLOOP
9338#define TARGET_INVALID_WITHIN_DOLOOP hook_constcharptr_rtx_null
c08b81aa 9339
fdbe66f2
EB
9340#ifdef HAVE_AS_TLS
9341#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
9342#define TARGET_ASM_OUTPUT_DWARF_DTPREL s390_output_dwarf_dtprel
9343#endif
9344
7269aee7 9345#ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
608063c3
JB
9346#undef TARGET_MANGLE_TYPE
9347#define TARGET_MANGLE_TYPE s390_mangle_type
7269aee7
AH
9348#endif
9349
4dc19cc0
AK
9350#undef TARGET_SCALAR_MODE_SUPPORTED_P
9351#define TARGET_SCALAR_MODE_SUPPORTED_P s390_scalar_mode_supported_p
9352
833cd70a
AK
9353#undef TARGET_SECONDARY_RELOAD
9354#define TARGET_SECONDARY_RELOAD s390_secondary_reload
9355
c7ff6e7a
AK
9356#undef TARGET_LIBGCC_CMP_RETURN_MODE
9357#define TARGET_LIBGCC_CMP_RETURN_MODE s390_libgcc_cmp_return_mode
9358
9359#undef TARGET_LIBGCC_SHIFT_COUNT_MODE
9360#define TARGET_LIBGCC_SHIFT_COUNT_MODE s390_libgcc_shift_count_mode
9361
ab96de7e 9362struct gcc_target targetm = TARGET_INITIALIZER;
38899e29 9363
29742ba4 9364#include "gt-s390.h"