]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/s390/s390.c
c-format.c (handle_format_attribute): Fix -Wc++-compat and/or -Wcast-qual warnings.
[thirdparty/gcc.git] / gcc / config / s390 / s390.c
CommitLineData
9db1d521 1/* Subroutines used for code generation on IBM S/390 and zSeries
6fb5fa3c 2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
963fc8d0 3 2007, 2008 Free Software Foundation, Inc.
9db1d521 4 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
963fc8d0
AK
5 Ulrich Weigand (uweigand@de.ibm.com) and
6 Andreas Krebbel (Andreas.Krebbel@de.ibm.com).
9db1d521 7
58add37a 8This file is part of GCC.
9db1d521 9
58add37a
UW
10GCC is free software; you can redistribute it and/or modify it under
11the terms of the GNU General Public License as published by the Free
2f83c7d6 12Software Foundation; either version 3, or (at your option) any later
58add37a 13version.
9db1d521 14
58add37a
UW
15GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16WARRANTY; without even the implied warranty of MERCHANTABILITY or
17FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18for more details.
9db1d521
HP
19
20You should have received a copy of the GNU General Public License
2f83c7d6
NC
21along with GCC; see the file COPYING3. If not see
22<http://www.gnu.org/licenses/>. */
9db1d521
HP
23
24#include "config.h"
9db1d521 25#include "system.h"
4977bab6
ZW
26#include "coretypes.h"
27#include "tm.h"
9db1d521
HP
28#include "rtl.h"
29#include "tree.h"
30#include "tm_p.h"
31#include "regs.h"
32#include "hard-reg-set.h"
33#include "real.h"
34#include "insn-config.h"
35#include "conditions.h"
36#include "output.h"
37#include "insn-attr.h"
38#include "flags.h"
39#include "except.h"
40#include "function.h"
41#include "recog.h"
42#include "expr.h"
7c82a1ed 43#include "reload.h"
9db1d521
HP
44#include "toplev.h"
45#include "basic-block.h"
4023fb28 46#include "integrate.h"
9db1d521
HP
47#include "ggc.h"
48#include "target.h"
49#include "target-def.h"
0d3c08b6 50#include "debug.h"
f1e639b1 51#include "langhooks.h"
a41c6c53 52#include "optabs.h"
63694b5e 53#include "tree-gimple.h"
6fb5fa3c 54#include "df.h"
9db1d521 55
114278e7 56
017e0eb9
MD
57/* Define the specific costs for a given cpu. */
58
59struct processor_costs
60{
98fd0d70 61 /* multiplication */
017e0eb9
MD
62 const int m; /* cost of an M instruction. */
63 const int mghi; /* cost of an MGHI instruction. */
64 const int mh; /* cost of an MH instruction. */
65 const int mhi; /* cost of an MHI instruction. */
2742a1ed 66 const int ml; /* cost of an ML instruction. */
017e0eb9
MD
67 const int mr; /* cost of an MR instruction. */
68 const int ms; /* cost of an MS instruction. */
69 const int msg; /* cost of an MSG instruction. */
70 const int msgf; /* cost of an MSGF instruction. */
71 const int msgfr; /* cost of an MSGFR instruction. */
72 const int msgr; /* cost of an MSGR instruction. */
73 const int msr; /* cost of an MSR instruction. */
74 const int mult_df; /* cost of multiplication in DFmode. */
f61a2c7d 75 const int mxbr;
98fd0d70 76 /* square root */
f61a2c7d 77 const int sqxbr; /* cost of square root in TFmode. */
2742a1ed
MD
78 const int sqdbr; /* cost of square root in DFmode. */
79 const int sqebr; /* cost of square root in SFmode. */
98fd0d70 80 /* multiply and add */
b75d6bab
MD
81 const int madbr; /* cost of multiply and add in DFmode. */
82 const int maebr; /* cost of multiply and add in SFmode. */
98fd0d70 83 /* division */
f61a2c7d 84 const int dxbr;
98fd0d70 85 const int ddbr;
98fd0d70 86 const int debr;
6fa5b390
MD
87 const int dlgr;
88 const int dlr;
89 const int dr;
90 const int dsgfr;
91 const int dsgr;
017e0eb9
MD
92};
93
94const struct processor_costs *s390_cost;
95
96static const
97struct processor_costs z900_cost =
98{
99 COSTS_N_INSNS (5), /* M */
100 COSTS_N_INSNS (10), /* MGHI */
101 COSTS_N_INSNS (5), /* MH */
102 COSTS_N_INSNS (4), /* MHI */
2742a1ed 103 COSTS_N_INSNS (5), /* ML */
017e0eb9
MD
104 COSTS_N_INSNS (5), /* MR */
105 COSTS_N_INSNS (4), /* MS */
106 COSTS_N_INSNS (15), /* MSG */
107 COSTS_N_INSNS (7), /* MSGF */
108 COSTS_N_INSNS (7), /* MSGFR */
109 COSTS_N_INSNS (10), /* MSGR */
110 COSTS_N_INSNS (4), /* MSR */
111 COSTS_N_INSNS (7), /* multiplication in DFmode */
f61a2c7d
AK
112 COSTS_N_INSNS (13), /* MXBR */
113 COSTS_N_INSNS (136), /* SQXBR */
2742a1ed
MD
114 COSTS_N_INSNS (44), /* SQDBR */
115 COSTS_N_INSNS (35), /* SQEBR */
b75d6bab
MD
116 COSTS_N_INSNS (18), /* MADBR */
117 COSTS_N_INSNS (13), /* MAEBR */
f61a2c7d 118 COSTS_N_INSNS (134), /* DXBR */
98fd0d70 119 COSTS_N_INSNS (30), /* DDBR */
98fd0d70 120 COSTS_N_INSNS (27), /* DEBR */
6fa5b390
MD
121 COSTS_N_INSNS (220), /* DLGR */
122 COSTS_N_INSNS (34), /* DLR */
123 COSTS_N_INSNS (34), /* DR */
124 COSTS_N_INSNS (32), /* DSGFR */
125 COSTS_N_INSNS (32), /* DSGR */
017e0eb9
MD
126};
127
128static const
129struct processor_costs z990_cost =
130{
131 COSTS_N_INSNS (4), /* M */
132 COSTS_N_INSNS (2), /* MGHI */
133 COSTS_N_INSNS (2), /* MH */
134 COSTS_N_INSNS (2), /* MHI */
2742a1ed 135 COSTS_N_INSNS (4), /* ML */
017e0eb9
MD
136 COSTS_N_INSNS (4), /* MR */
137 COSTS_N_INSNS (5), /* MS */
138 COSTS_N_INSNS (6), /* MSG */
139 COSTS_N_INSNS (4), /* MSGF */
140 COSTS_N_INSNS (4), /* MSGFR */
141 COSTS_N_INSNS (4), /* MSGR */
142 COSTS_N_INSNS (4), /* MSR */
143 COSTS_N_INSNS (1), /* multiplication in DFmode */
f61a2c7d
AK
144 COSTS_N_INSNS (28), /* MXBR */
145 COSTS_N_INSNS (130), /* SQXBR */
2742a1ed
MD
146 COSTS_N_INSNS (66), /* SQDBR */
147 COSTS_N_INSNS (38), /* SQEBR */
b75d6bab
MD
148 COSTS_N_INSNS (1), /* MADBR */
149 COSTS_N_INSNS (1), /* MAEBR */
f61a2c7d 150 COSTS_N_INSNS (60), /* DXBR */
98fd0d70 151 COSTS_N_INSNS (40), /* DDBR */
142cd70f 152 COSTS_N_INSNS (26), /* DEBR */
6fa5b390
MD
153 COSTS_N_INSNS (176), /* DLGR */
154 COSTS_N_INSNS (31), /* DLR */
155 COSTS_N_INSNS (31), /* DR */
156 COSTS_N_INSNS (31), /* DSGFR */
157 COSTS_N_INSNS (31), /* DSGR */
017e0eb9
MD
158};
159
ec24698e
UW
160static const
161struct processor_costs z9_109_cost =
162{
163 COSTS_N_INSNS (4), /* M */
164 COSTS_N_INSNS (2), /* MGHI */
165 COSTS_N_INSNS (2), /* MH */
166 COSTS_N_INSNS (2), /* MHI */
167 COSTS_N_INSNS (4), /* ML */
168 COSTS_N_INSNS (4), /* MR */
169 COSTS_N_INSNS (5), /* MS */
170 COSTS_N_INSNS (6), /* MSG */
171 COSTS_N_INSNS (4), /* MSGF */
172 COSTS_N_INSNS (4), /* MSGFR */
173 COSTS_N_INSNS (4), /* MSGR */
174 COSTS_N_INSNS (4), /* MSR */
175 COSTS_N_INSNS (1), /* multiplication in DFmode */
f61a2c7d
AK
176 COSTS_N_INSNS (28), /* MXBR */
177 COSTS_N_INSNS (130), /* SQXBR */
ec24698e
UW
178 COSTS_N_INSNS (66), /* SQDBR */
179 COSTS_N_INSNS (38), /* SQEBR */
180 COSTS_N_INSNS (1), /* MADBR */
181 COSTS_N_INSNS (1), /* MAEBR */
f61a2c7d 182 COSTS_N_INSNS (60), /* DXBR */
ec24698e 183 COSTS_N_INSNS (40), /* DDBR */
142cd70f 184 COSTS_N_INSNS (26), /* DEBR */
ec24698e
UW
185 COSTS_N_INSNS (30), /* DLGR */
186 COSTS_N_INSNS (23), /* DLR */
187 COSTS_N_INSNS (23), /* DR */
188 COSTS_N_INSNS (24), /* DSGFR */
189 COSTS_N_INSNS (24), /* DSGR */
190};
017e0eb9 191
93538e8e
AK
192static const
193struct processor_costs z10_cost =
194{
195 COSTS_N_INSNS (4), /* M */
196 COSTS_N_INSNS (2), /* MGHI */
197 COSTS_N_INSNS (2), /* MH */
198 COSTS_N_INSNS (2), /* MHI */
199 COSTS_N_INSNS (4), /* ML */
200 COSTS_N_INSNS (4), /* MR */
201 COSTS_N_INSNS (5), /* MS */
202 COSTS_N_INSNS (6), /* MSG */
203 COSTS_N_INSNS (4), /* MSGF */
204 COSTS_N_INSNS (4), /* MSGFR */
205 COSTS_N_INSNS (4), /* MSGR */
206 COSTS_N_INSNS (4), /* MSR */
207 COSTS_N_INSNS (1), /* multiplication in DFmode */
208 COSTS_N_INSNS (28), /* MXBR */
209 COSTS_N_INSNS (130), /* SQXBR */
210 COSTS_N_INSNS (66), /* SQDBR */
211 COSTS_N_INSNS (38), /* SQEBR */
212 COSTS_N_INSNS (1), /* MADBR */
213 COSTS_N_INSNS (1), /* MAEBR */
214 COSTS_N_INSNS (60), /* DXBR */
215 COSTS_N_INSNS (40), /* DDBR */
216 COSTS_N_INSNS (26), /* DEBR */
217 COSTS_N_INSNS (30), /* DLGR */
218 COSTS_N_INSNS (23), /* DLR */
219 COSTS_N_INSNS (23), /* DR */
220 COSTS_N_INSNS (24), /* DSGFR */
221 COSTS_N_INSNS (24), /* DSGR */
222};
223
9db1d521
HP
224extern int reload_completed;
225
9db1d521
HP
226/* Save information from a "cmpxx" operation until the branch or scc is
227 emitted. */
228rtx s390_compare_op0, s390_compare_op1;
229
e0374221
AS
230/* Save the result of a compare_and_swap until the branch or scc is
231 emitted. */
232rtx s390_compare_emitted = NULL_RTX;
233
994fe660
UW
234/* Structure used to hold the components of a S/390 memory
235 address. A legitimate address on S/390 is of the general
236 form
237 base + index + displacement
238 where any of the components is optional.
239
240 base and index are registers of the class ADDR_REGS,
241 displacement is an unsigned 12-bit immediate constant. */
9db1d521
HP
242
243struct s390_address
244{
245 rtx base;
246 rtx indx;
247 rtx disp;
3ed99cc9 248 bool pointer;
f01cf809 249 bool literal_pool;
9db1d521
HP
250};
251
be2c2a4b 252/* Which cpu are we tuning for. */
f5db779b 253enum processor_type s390_tune = PROCESSOR_max;
f13e0d4e 254enum processor_flags s390_tune_flags;
1fec52be
HP
255/* Which instruction set architecture to use. */
256enum processor_type s390_arch;
f13e0d4e 257enum processor_flags s390_arch_flags;
d75f90f1
AK
258
259HOST_WIDE_INT s390_warn_framesize = 0;
d75f90f1
AK
260HOST_WIDE_INT s390_stack_size = 0;
261HOST_WIDE_INT s390_stack_guard = 0;
262
adf39f8f
AK
263/* The following structure is embedded in the machine
264 specific part of struct function. */
265
266struct s390_frame_layout GTY (())
267{
268 /* Offset within stack frame. */
269 HOST_WIDE_INT gprs_offset;
270 HOST_WIDE_INT f0_offset;
271 HOST_WIDE_INT f4_offset;
272 HOST_WIDE_INT f8_offset;
273 HOST_WIDE_INT backchain_offset;
fb3712f6
AK
274
275 /* Number of first and last gpr where slots in the register
276 save area are reserved for. */
277 int first_save_gpr_slot;
278 int last_save_gpr_slot;
279
29742ba4 280 /* Number of first and last gpr to be saved, restored. */
4023fb28
UW
281 int first_save_gpr;
282 int first_restore_gpr;
283 int last_save_gpr;
b767fc11 284 int last_restore_gpr;
4023fb28 285
adf39f8f
AK
286 /* Bits standing for floating point registers. Set, if the
287 respective register has to be saved. Starting with reg 16 (f0)
288 at the rightmost bit.
289 Bit 15 - 8 7 6 5 4 3 2 1 0
290 fpr 15 - 8 7 5 3 1 6 4 2 0
291 reg 31 - 24 23 22 21 20 19 18 17 16 */
292 unsigned int fpr_bitmap;
293
294 /* Number of floating point registers f8-f15 which must be saved. */
295 int high_fprs;
296
dc4477f5
AK
297 /* Set if return address needs to be saved.
298 This flag is set by s390_return_addr_rtx if it could not use
299 the initial value of r14 and therefore depends on r14 saved
300 to the stack. */
adf39f8f
AK
301 bool save_return_addr_p;
302
29742ba4 303 /* Size of stack frame. */
4023fb28 304 HOST_WIDE_INT frame_size;
adf39f8f
AK
305};
306
307/* Define the structure for the machine field in struct function. */
308
309struct machine_function GTY(())
310{
311 struct s390_frame_layout frame_layout;
fd3cd001 312
585539a1
UW
313 /* Literal pool base register. */
314 rtx base_reg;
315
91086990
UW
316 /* True if we may need to perform branch splitting. */
317 bool split_branches_pending_p;
318
3cd045d1
UW
319 /* True during final stage of literal pool processing. */
320 bool decomposed_literal_pool_addresses_ok_p;
321
fd3cd001
UW
322 /* Some local-dynamic TLS symbol name. */
323 const char *some_ld_name;
7bcebb25
AK
324
325 bool has_landing_pad_p;
4023fb28
UW
326};
327
adf39f8f
AK
328/* Few accessor macros for struct cfun->machine->s390_frame_layout. */
329
330#define cfun_frame_layout (cfun->machine->frame_layout)
331#define cfun_save_high_fprs_p (!!cfun_frame_layout.high_fprs)
fb3712f6
AK
332#define cfun_gprs_save_area_size ((cfun_frame_layout.last_save_gpr_slot - \
333 cfun_frame_layout.first_save_gpr_slot + 1) * UNITS_PER_WORD)
adf39f8f
AK
334#define cfun_set_fpr_bit(BITNUM) (cfun->machine->frame_layout.fpr_bitmap |= \
335 (1 << (BITNUM)))
336#define cfun_fpr_bit_p(BITNUM) (!!(cfun->machine->frame_layout.fpr_bitmap & \
337 (1 << (BITNUM))))
338
29a79fcf
UW
339/* Number of GPRs and FPRs used for argument passing. */
340#define GP_ARG_NUM_REG 5
341#define FP_ARG_NUM_REG (TARGET_64BIT? 4 : 2)
342
b5c67a49
AK
343/* A couple of shortcuts. */
344#define CONST_OK_FOR_J(x) \
345 CONST_OK_FOR_CONSTRAINT_P((x), 'J', "J")
346#define CONST_OK_FOR_K(x) \
347 CONST_OK_FOR_CONSTRAINT_P((x), 'K', "K")
ec24698e
UW
348#define CONST_OK_FOR_Os(x) \
349 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Os")
350#define CONST_OK_FOR_Op(x) \
351 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Op")
352#define CONST_OK_FOR_On(x) \
353 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "On")
b5c67a49 354
74aa8b4b
AK
355#define REGNO_PAIR_OK(REGNO, MODE) \
356 (HARD_REGNO_NREGS ((REGNO), (MODE)) == 1 || !((REGNO) & 1))
357
c7ff6e7a
AK
358static enum machine_mode
359s390_libgcc_cmp_return_mode (void)
360{
361 return TARGET_64BIT ? DImode : SImode;
362}
363
364static enum machine_mode
365s390_libgcc_shift_count_mode (void)
366{
367 return TARGET_64BIT ? DImode : SImode;
368}
369
4dc19cc0
AK
370/* Return true if the back end supports mode MODE. */
371static bool
372s390_scalar_mode_supported_p (enum machine_mode mode)
373{
374 if (DECIMAL_FLOAT_MODE_P (mode))
375 return true;
376 else
377 return default_scalar_mode_supported_p (mode);
378}
379
7bcebb25
AK
380/* Set the has_landing_pad_p flag in struct machine_function to VALUE. */
381
382void
383s390_set_has_landing_pad_p (bool value)
384{
385 cfun->machine->has_landing_pad_p = value;
386}
29a79fcf 387
69950452
AS
388/* If two condition code modes are compatible, return a condition code
389 mode which is compatible with both. Otherwise, return
390 VOIDmode. */
391
392static enum machine_mode
393s390_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
394{
395 if (m1 == m2)
396 return m1;
397
398 switch (m1)
399 {
400 case CCZmode:
401 if (m2 == CCUmode || m2 == CCTmode || m2 == CCZ1mode
402 || m2 == CCSmode || m2 == CCSRmode || m2 == CCURmode)
403 return m2;
404 return VOIDmode;
405
406 case CCSmode:
407 case CCUmode:
408 case CCTmode:
409 case CCSRmode:
410 case CCURmode:
411 case CCZ1mode:
412 if (m2 == CCZmode)
413 return m1;
414
415 return VOIDmode;
416
417 default:
418 return VOIDmode;
419 }
420 return VOIDmode;
421}
422
994fe660 423/* Return true if SET either doesn't set the CC register, or else
c7453384 424 the source and destination have matching CC modes and that
994fe660 425 CC mode is at least as constrained as REQ_MODE. */
c7453384 426
3ed99cc9 427static bool
9c808aad 428s390_match_ccmode_set (rtx set, enum machine_mode req_mode)
9db1d521 429{
994fe660 430 enum machine_mode set_mode;
9db1d521 431
8d933e31 432 gcc_assert (GET_CODE (set) == SET);
9db1d521
HP
433
434 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
435 return 1;
436
437 set_mode = GET_MODE (SET_DEST (set));
438 switch (set_mode)
439 {
9db1d521 440 case CCSmode:
07893d4f 441 case CCSRmode:
9db1d521 442 case CCUmode:
07893d4f 443 case CCURmode:
ba956982 444 case CCLmode:
07893d4f
UW
445 case CCL1mode:
446 case CCL2mode:
5d880bd2 447 case CCL3mode:
07893d4f
UW
448 case CCT1mode:
449 case CCT2mode:
450 case CCT3mode:
451 if (req_mode != set_mode)
ba956982
UW
452 return 0;
453 break;
07893d4f 454
9db1d521 455 case CCZmode:
07893d4f
UW
456 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
457 && req_mode != CCSRmode && req_mode != CCURmode)
9db1d521
HP
458 return 0;
459 break;
0a3bdf9d
UW
460
461 case CCAPmode:
462 case CCANmode:
463 if (req_mode != CCAmode)
464 return 0;
465 break;
c7453384 466
9db1d521 467 default:
8d933e31 468 gcc_unreachable ();
9db1d521 469 }
c7453384 470
9db1d521
HP
471 return (GET_MODE (SET_SRC (set)) == set_mode);
472}
473
c7453384
EC
474/* Return true if every SET in INSN that sets the CC register
475 has source and destination with matching CC modes and that
476 CC mode is at least as constrained as REQ_MODE.
07893d4f 477 If REQ_MODE is VOIDmode, always return false. */
c7453384 478
3ed99cc9 479bool
9c808aad 480s390_match_ccmode (rtx insn, enum machine_mode req_mode)
9db1d521
HP
481{
482 int i;
483
07893d4f
UW
484 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
485 if (req_mode == VOIDmode)
3ed99cc9 486 return false;
07893d4f 487
9db1d521
HP
488 if (GET_CODE (PATTERN (insn)) == SET)
489 return s390_match_ccmode_set (PATTERN (insn), req_mode);
490
491 if (GET_CODE (PATTERN (insn)) == PARALLEL)
492 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
493 {
494 rtx set = XVECEXP (PATTERN (insn), 0, i);
495 if (GET_CODE (set) == SET)
496 if (!s390_match_ccmode_set (set, req_mode))
3ed99cc9 497 return false;
9db1d521
HP
498 }
499
3ed99cc9 500 return true;
9db1d521
HP
501}
502
c7453384 503/* If a test-under-mask instruction can be used to implement
07893d4f 504 (compare (and ... OP1) OP2), return the CC mode required
c7453384 505 to do that. Otherwise, return VOIDmode.
07893d4f
UW
506 MIXED is true if the instruction can distinguish between
507 CC1 and CC2 for mixed selected bits (TMxx), it is false
508 if the instruction cannot (TM). */
509
510enum machine_mode
3ed99cc9 511s390_tm_ccmode (rtx op1, rtx op2, bool mixed)
07893d4f
UW
512{
513 int bit0, bit1;
514
515 /* ??? Fixme: should work on CONST_DOUBLE as well. */
516 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
517 return VOIDmode;
518
00bda920
AK
519 /* Selected bits all zero: CC0.
520 e.g.: int a; if ((a & (16 + 128)) == 0) */
07893d4f
UW
521 if (INTVAL (op2) == 0)
522 return CCTmode;
523
00bda920
AK
524 /* Selected bits all one: CC3.
525 e.g.: int a; if ((a & (16 + 128)) == 16 + 128) */
07893d4f
UW
526 if (INTVAL (op2) == INTVAL (op1))
527 return CCT3mode;
528
00bda920
AK
529 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. e.g.:
530 int a;
531 if ((a & (16 + 128)) == 16) -> CCT1
532 if ((a & (16 + 128)) == 128) -> CCT2 */
07893d4f
UW
533 if (mixed)
534 {
535 bit1 = exact_log2 (INTVAL (op2));
536 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
537 if (bit0 != -1 && bit1 != -1)
538 return bit0 > bit1 ? CCT1mode : CCT2mode;
539 }
540
541 return VOIDmode;
542}
543
c7453384
EC
544/* Given a comparison code OP (EQ, NE, etc.) and the operands
545 OP0 and OP1 of a COMPARE, return the mode to be used for the
ba956982
UW
546 comparison. */
547
548enum machine_mode
9c808aad 549s390_select_ccmode (enum rtx_code code, rtx op0, rtx op1)
ba956982
UW
550{
551 switch (code)
552 {
553 case EQ:
554 case NE:
26a89301
UW
555 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
556 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
557 return CCAPmode;
0a3bdf9d 558 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
b5c67a49 559 && CONST_OK_FOR_K (INTVAL (XEXP (op0, 1))))
0a3bdf9d 560 return CCAPmode;
3ef093a8
AK
561 if ((GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
562 || GET_CODE (op1) == NEG)
563 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
ba956982
UW
564 return CCLmode;
565
07893d4f
UW
566 if (GET_CODE (op0) == AND)
567 {
568 /* Check whether we can potentially do it via TM. */
569 enum machine_mode ccmode;
570 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
571 if (ccmode != VOIDmode)
572 {
573 /* Relax CCTmode to CCZmode to allow fall-back to AND
574 if that turns out to be beneficial. */
575 return ccmode == CCTmode ? CCZmode : ccmode;
576 }
577 }
578
c7453384 579 if (register_operand (op0, HImode)
07893d4f
UW
580 && GET_CODE (op1) == CONST_INT
581 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
582 return CCT3mode;
c7453384 583 if (register_operand (op0, QImode)
07893d4f
UW
584 && GET_CODE (op1) == CONST_INT
585 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
586 return CCT3mode;
587
ba956982
UW
588 return CCZmode;
589
590 case LE:
591 case LT:
592 case GE:
593 case GT:
00bda920
AK
594 /* The only overflow condition of NEG and ABS happens when
595 -INT_MAX is used as parameter, which stays negative. So
596 we have an overflow from a positive value to a negative.
597 Using CCAP mode the resulting cc can be used for comparisons. */
26a89301
UW
598 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
599 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
600 return CCAPmode;
00bda920
AK
601
602 /* If constants are involved in an add instruction it is possible to use
603 the resulting cc for comparisons with zero. Knowing the sign of the
35fd3193 604 constant the overflow behavior gets predictable. e.g.:
00bda920
AK
605 int a, b; if ((b = a + c) > 0)
606 with c as a constant value: c < 0 -> CCAN and c >= 0 -> CCAP */
26a89301 607 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
b5c67a49 608 && CONST_OK_FOR_K (INTVAL (XEXP (op0, 1))))
26a89301
UW
609 {
610 if (INTVAL (XEXP((op0), 1)) < 0)
611 return CCANmode;
612 else
613 return CCAPmode;
614 }
615 /* Fall through. */
ba956982
UW
616 case UNORDERED:
617 case ORDERED:
618 case UNEQ:
619 case UNLE:
620 case UNLT:
621 case UNGE:
622 case UNGT:
623 case LTGT:
07893d4f
UW
624 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
625 && GET_CODE (op1) != CONST_INT)
626 return CCSRmode;
ba956982
UW
627 return CCSmode;
628
ba956982
UW
629 case LTU:
630 case GEU:
3ef093a8
AK
631 if (GET_CODE (op0) == PLUS
632 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
07893d4f
UW
633 return CCL1mode;
634
635 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
636 && GET_CODE (op1) != CONST_INT)
637 return CCURmode;
638 return CCUmode;
639
640 case LEU:
ba956982 641 case GTU:
3ef093a8
AK
642 if (GET_CODE (op0) == MINUS
643 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
07893d4f
UW
644 return CCL2mode;
645
646 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
647 && GET_CODE (op1) != CONST_INT)
648 return CCURmode;
ba956982
UW
649 return CCUmode;
650
651 default:
8d933e31 652 gcc_unreachable ();
ba956982
UW
653 }
654}
655
68f9c5e2
UW
656/* Replace the comparison OP0 CODE OP1 by a semantically equivalent one
657 that we can implement more efficiently. */
658
659void
660s390_canonicalize_comparison (enum rtx_code *code, rtx *op0, rtx *op1)
661{
662 /* Convert ZERO_EXTRACT back to AND to enable TM patterns. */
663 if ((*code == EQ || *code == NE)
664 && *op1 == const0_rtx
665 && GET_CODE (*op0) == ZERO_EXTRACT
666 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
667 && GET_CODE (XEXP (*op0, 2)) == CONST_INT
668 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
669 {
670 rtx inner = XEXP (*op0, 0);
671 HOST_WIDE_INT modesize = GET_MODE_BITSIZE (GET_MODE (inner));
672 HOST_WIDE_INT len = INTVAL (XEXP (*op0, 1));
673 HOST_WIDE_INT pos = INTVAL (XEXP (*op0, 2));
674
675 if (len > 0 && len < modesize
676 && pos >= 0 && pos + len <= modesize
677 && modesize <= HOST_BITS_PER_WIDE_INT)
678 {
679 unsigned HOST_WIDE_INT block;
680 block = ((unsigned HOST_WIDE_INT) 1 << len) - 1;
681 block <<= modesize - pos - len;
682
683 *op0 = gen_rtx_AND (GET_MODE (inner), inner,
684 gen_int_mode (block, GET_MODE (inner)));
685 }
686 }
687
688 /* Narrow AND of memory against immediate to enable TM. */
689 if ((*code == EQ || *code == NE)
690 && *op1 == const0_rtx
691 && GET_CODE (*op0) == AND
692 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
693 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
694 {
695 rtx inner = XEXP (*op0, 0);
696 rtx mask = XEXP (*op0, 1);
697
698 /* Ignore paradoxical SUBREGs if all extra bits are masked out. */
699 if (GET_CODE (inner) == SUBREG
700 && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (inner)))
701 && (GET_MODE_SIZE (GET_MODE (inner))
702 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner))))
703 && ((INTVAL (mask)
704 & GET_MODE_MASK (GET_MODE (inner))
705 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (inner))))
706 == 0))
707 inner = SUBREG_REG (inner);
708
709 /* Do not change volatile MEMs. */
710 if (MEM_P (inner) && !MEM_VOLATILE_P (inner))
711 {
712 int part = s390_single_part (XEXP (*op0, 1),
713 GET_MODE (inner), QImode, 0);
714 if (part >= 0)
715 {
716 mask = gen_int_mode (s390_extract_part (mask, QImode, 0), QImode);
717 inner = adjust_address_nv (inner, QImode, part);
718 *op0 = gen_rtx_AND (QImode, inner, mask);
719 }
720 }
721 }
722
723 /* Narrow comparisons against 0xffff to HImode if possible. */
68f9c5e2
UW
724 if ((*code == EQ || *code == NE)
725 && GET_CODE (*op1) == CONST_INT
726 && INTVAL (*op1) == 0xffff
727 && SCALAR_INT_MODE_P (GET_MODE (*op0))
728 && (nonzero_bits (*op0, GET_MODE (*op0))
729 & ~(unsigned HOST_WIDE_INT) 0xffff) == 0)
730 {
731 *op0 = gen_lowpart (HImode, *op0);
732 *op1 = constm1_rtx;
733 }
5b022de5 734
638e37c2 735 /* Remove redundant UNSPEC_CCU_TO_INT conversions if possible. */
5b022de5 736 if (GET_CODE (*op0) == UNSPEC
638e37c2 737 && XINT (*op0, 1) == UNSPEC_CCU_TO_INT
5b022de5
UW
738 && XVECLEN (*op0, 0) == 1
739 && GET_MODE (XVECEXP (*op0, 0, 0)) == CCUmode
740 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
741 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
742 && *op1 == const0_rtx)
743 {
744 enum rtx_code new_code = UNKNOWN;
745 switch (*code)
746 {
747 case EQ: new_code = EQ; break;
748 case NE: new_code = NE; break;
02887425
UW
749 case LT: new_code = GTU; break;
750 case GT: new_code = LTU; break;
751 case LE: new_code = GEU; break;
752 case GE: new_code = LEU; break;
5b022de5
UW
753 default: break;
754 }
755
756 if (new_code != UNKNOWN)
757 {
758 *op0 = XVECEXP (*op0, 0, 0);
759 *code = new_code;
760 }
761 }
69950452 762
638e37c2
WG
763 /* Remove redundant UNSPEC_CCZ_TO_INT conversions if possible. */
764 if (GET_CODE (*op0) == UNSPEC
765 && XINT (*op0, 1) == UNSPEC_CCZ_TO_INT
766 && XVECLEN (*op0, 0) == 1
767 && GET_MODE (XVECEXP (*op0, 0, 0)) == CCZmode
768 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
769 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
770 && *op1 == const0_rtx)
771 {
772 enum rtx_code new_code = UNKNOWN;
773 switch (*code)
774 {
775 case EQ: new_code = EQ; break;
776 case NE: new_code = NE; break;
777 default: break;
778 }
779
780 if (new_code != UNKNOWN)
781 {
782 *op0 = XVECEXP (*op0, 0, 0);
783 *code = new_code;
784 }
785 }
786
69950452
AS
787 /* Simplify cascaded EQ, NE with const0_rtx. */
788 if ((*code == NE || *code == EQ)
789 && (GET_CODE (*op0) == EQ || GET_CODE (*op0) == NE)
790 && GET_MODE (*op0) == SImode
791 && GET_MODE (XEXP (*op0, 0)) == CCZ1mode
792 && REG_P (XEXP (*op0, 0))
793 && XEXP (*op0, 1) == const0_rtx
794 && *op1 == const0_rtx)
795 {
796 if ((*code == EQ && GET_CODE (*op0) == NE)
797 || (*code == NE && GET_CODE (*op0) == EQ))
798 *code = EQ;
799 else
800 *code = NE;
801 *op0 = XEXP (*op0, 0);
802 }
c5b2a111
UW
803
804 /* Prefer register over memory as first operand. */
805 if (MEM_P (*op0) && REG_P (*op1))
806 {
807 rtx tem = *op0; *op0 = *op1; *op1 = tem;
808 *code = swap_condition (*code);
809 }
68f9c5e2
UW
810}
811
6590e19a
UW
812/* Emit a compare instruction suitable to implement the comparison
813 OP0 CODE OP1. Return the correct condition RTL to be placed in
814 the IF_THEN_ELSE of the conditional branch testing the result. */
815
816rtx
817s390_emit_compare (enum rtx_code code, rtx op0, rtx op1)
818{
819 enum machine_mode mode = s390_select_ccmode (code, op0, op1);
e0374221 820 rtx ret = NULL_RTX;
6590e19a 821
e0374221 822 /* Do not output a redundant compare instruction if a compare_and_swap
69950452
AS
823 pattern already computed the result and the machine modes are compatible. */
824 if (s390_compare_emitted
825 && (s390_cc_modes_compatible (GET_MODE (s390_compare_emitted), mode)
826 == GET_MODE (s390_compare_emitted)))
e0374221
AS
827 ret = gen_rtx_fmt_ee (code, VOIDmode, s390_compare_emitted, const0_rtx);
828 else
829 {
830 rtx cc = gen_rtx_REG (mode, CC_REGNUM);
831
832 emit_insn (gen_rtx_SET (VOIDmode, cc, gen_rtx_COMPARE (mode, op0, op1)));
833 ret = gen_rtx_fmt_ee (code, VOIDmode, cc, const0_rtx);
834 }
835 s390_compare_emitted = NULL_RTX;
836 return ret;
6590e19a
UW
837}
838
8bb501bb
AK
839/* Emit a SImode compare and swap instruction setting MEM to NEW if OLD
840 matches CMP.
841 Return the correct condition RTL to be placed in the IF_THEN_ELSE of the
842 conditional branch testing the result. */
843
844static rtx
845s390_emit_compare_and_swap (enum rtx_code code, rtx old, rtx mem, rtx cmp, rtx new)
846{
847 rtx ret;
848
849 emit_insn (gen_sync_compare_and_swap_ccsi (old, mem, cmp, new));
850 ret = gen_rtx_fmt_ee (code, VOIDmode, s390_compare_emitted, const0_rtx);
851
852 s390_compare_emitted = NULL_RTX;
853
854 return ret;
855}
856
6590e19a
UW
857/* Emit a jump instruction to TARGET. If COND is NULL_RTX, emit an
858 unconditional jump, else a conditional jump under condition COND. */
859
860void
861s390_emit_jump (rtx target, rtx cond)
862{
863 rtx insn;
864
865 target = gen_rtx_LABEL_REF (VOIDmode, target);
866 if (cond)
867 target = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, target, pc_rtx);
868
869 insn = gen_rtx_SET (VOIDmode, pc_rtx, target);
870 emit_jump_insn (insn);
871}
872
c7453384 873/* Return branch condition mask to implement a branch
5b022de5 874 specified by CODE. Return -1 for invalid comparisons. */
ba956982 875
0bfc3f69 876int
9c808aad 877s390_branch_condition_mask (rtx code)
c7453384 878{
ba956982
UW
879 const int CC0 = 1 << 3;
880 const int CC1 = 1 << 2;
881 const int CC2 = 1 << 1;
882 const int CC3 = 1 << 0;
883
8d933e31
AS
884 gcc_assert (GET_CODE (XEXP (code, 0)) == REG);
885 gcc_assert (REGNO (XEXP (code, 0)) == CC_REGNUM);
886 gcc_assert (XEXP (code, 1) == const0_rtx);
ba956982
UW
887
888 switch (GET_MODE (XEXP (code, 0)))
889 {
890 case CCZmode:
69950452 891 case CCZ1mode:
ba956982
UW
892 switch (GET_CODE (code))
893 {
894 case EQ: return CC0;
895 case NE: return CC1 | CC2 | CC3;
5b022de5 896 default: return -1;
ba956982
UW
897 }
898 break;
899
07893d4f
UW
900 case CCT1mode:
901 switch (GET_CODE (code))
902 {
903 case EQ: return CC1;
904 case NE: return CC0 | CC2 | CC3;
5b022de5 905 default: return -1;
07893d4f
UW
906 }
907 break;
908
909 case CCT2mode:
910 switch (GET_CODE (code))
911 {
912 case EQ: return CC2;
913 case NE: return CC0 | CC1 | CC3;
5b022de5 914 default: return -1;
07893d4f
UW
915 }
916 break;
917
918 case CCT3mode:
919 switch (GET_CODE (code))
920 {
921 case EQ: return CC3;
922 case NE: return CC0 | CC1 | CC2;
5b022de5 923 default: return -1;
07893d4f
UW
924 }
925 break;
926
ba956982
UW
927 case CCLmode:
928 switch (GET_CODE (code))
929 {
930 case EQ: return CC0 | CC2;
931 case NE: return CC1 | CC3;
5b022de5 932 default: return -1;
07893d4f
UW
933 }
934 break;
935
936 case CCL1mode:
937 switch (GET_CODE (code))
938 {
939 case LTU: return CC2 | CC3; /* carry */
940 case GEU: return CC0 | CC1; /* no carry */
5b022de5 941 default: return -1;
07893d4f
UW
942 }
943 break;
944
945 case CCL2mode:
946 switch (GET_CODE (code))
947 {
948 case GTU: return CC0 | CC1; /* borrow */
949 case LEU: return CC2 | CC3; /* no borrow */
5b022de5 950 default: return -1;
ba956982
UW
951 }
952 break;
953
5d880bd2
UW
954 case CCL3mode:
955 switch (GET_CODE (code))
956 {
957 case EQ: return CC0 | CC2;
958 case NE: return CC1 | CC3;
959 case LTU: return CC1;
960 case GTU: return CC3;
961 case LEU: return CC1 | CC2;
962 case GEU: return CC2 | CC3;
5b022de5 963 default: return -1;
5d880bd2
UW
964 }
965
ba956982
UW
966 case CCUmode:
967 switch (GET_CODE (code))
968 {
969 case EQ: return CC0;
970 case NE: return CC1 | CC2 | CC3;
971 case LTU: return CC1;
972 case GTU: return CC2;
973 case LEU: return CC0 | CC1;
974 case GEU: return CC0 | CC2;
5b022de5 975 default: return -1;
ba956982
UW
976 }
977 break;
978
07893d4f
UW
979 case CCURmode:
980 switch (GET_CODE (code))
981 {
982 case EQ: return CC0;
983 case NE: return CC2 | CC1 | CC3;
984 case LTU: return CC2;
985 case GTU: return CC1;
986 case LEU: return CC0 | CC2;
987 case GEU: return CC0 | CC1;
5b022de5 988 default: return -1;
07893d4f
UW
989 }
990 break;
991
0a3bdf9d
UW
992 case CCAPmode:
993 switch (GET_CODE (code))
994 {
995 case EQ: return CC0;
996 case NE: return CC1 | CC2 | CC3;
997 case LT: return CC1 | CC3;
998 case GT: return CC2;
999 case LE: return CC0 | CC1 | CC3;
1000 case GE: return CC0 | CC2;
5b022de5 1001 default: return -1;
0a3bdf9d
UW
1002 }
1003 break;
1004
1005 case CCANmode:
1006 switch (GET_CODE (code))
1007 {
1008 case EQ: return CC0;
1009 case NE: return CC1 | CC2 | CC3;
1010 case LT: return CC1;
1011 case GT: return CC2 | CC3;
1012 case LE: return CC0 | CC1;
1013 case GE: return CC0 | CC2 | CC3;
5b022de5 1014 default: return -1;
0a3bdf9d
UW
1015 }
1016 break;
1017
ba956982
UW
1018 case CCSmode:
1019 switch (GET_CODE (code))
1020 {
1021 case EQ: return CC0;
1022 case NE: return CC1 | CC2 | CC3;
1023 case LT: return CC1;
1024 case GT: return CC2;
1025 case LE: return CC0 | CC1;
1026 case GE: return CC0 | CC2;
1027 case UNORDERED: return CC3;
1028 case ORDERED: return CC0 | CC1 | CC2;
1029 case UNEQ: return CC0 | CC3;
1030 case UNLT: return CC1 | CC3;
1031 case UNGT: return CC2 | CC3;
1032 case UNLE: return CC0 | CC1 | CC3;
1033 case UNGE: return CC0 | CC2 | CC3;
1034 case LTGT: return CC1 | CC2;
5b022de5 1035 default: return -1;
ba956982 1036 }
07893d4f
UW
1037 break;
1038
1039 case CCSRmode:
1040 switch (GET_CODE (code))
1041 {
1042 case EQ: return CC0;
1043 case NE: return CC2 | CC1 | CC3;
1044 case LT: return CC2;
1045 case GT: return CC1;
1046 case LE: return CC0 | CC2;
1047 case GE: return CC0 | CC1;
1048 case UNORDERED: return CC3;
1049 case ORDERED: return CC0 | CC2 | CC1;
1050 case UNEQ: return CC0 | CC3;
1051 case UNLT: return CC2 | CC3;
1052 case UNGT: return CC1 | CC3;
1053 case UNLE: return CC0 | CC2 | CC3;
1054 case UNGE: return CC0 | CC1 | CC3;
1055 case LTGT: return CC2 | CC1;
5b022de5 1056 default: return -1;
07893d4f
UW
1057 }
1058 break;
ba956982
UW
1059
1060 default:
5b022de5 1061 return -1;
ba956982
UW
1062 }
1063}
1064
963fc8d0
AK
1065
1066/* Return branch condition mask to implement a compare and branch
1067 specified by CODE. Return -1 for invalid comparisons. */
1068
1069int
1070s390_compare_and_branch_condition_mask (rtx code)
1071{
1072 const int CC0 = 1 << 3;
1073 const int CC1 = 1 << 2;
1074 const int CC2 = 1 << 1;
1075
1076 switch (GET_CODE (code))
1077 {
1078 case EQ:
1079 return CC0;
1080 case NE:
1081 return CC1 | CC2;
1082 case LT:
1083 case LTU:
1084 return CC1;
1085 case GT:
1086 case GTU:
1087 return CC2;
1088 case LE:
1089 case LEU:
1090 return CC0 | CC1;
1091 case GE:
1092 case GEU:
1093 return CC0 | CC2;
1094 default:
1095 gcc_unreachable ();
1096 }
1097 return -1;
1098}
1099
c7453384
EC
1100/* If INV is false, return assembler mnemonic string to implement
1101 a branch specified by CODE. If INV is true, return mnemonic
ba956982
UW
1102 for the corresponding inverted branch. */
1103
1104static const char *
9c808aad 1105s390_branch_condition_mnemonic (rtx code, int inv)
ba956982 1106{
963fc8d0
AK
1107 int mask;
1108
0139adca 1109 static const char *const mnemonic[16] =
ba956982
UW
1110 {
1111 NULL, "o", "h", "nle",
1112 "l", "nhe", "lh", "ne",
1113 "e", "nlh", "he", "nl",
1114 "le", "nh", "no", NULL
1115 };
1116
963fc8d0
AK
1117 if (GET_CODE (XEXP (code, 0)) == REG
1118 && REGNO (XEXP (code, 0)) == CC_REGNUM
1119 && XEXP (code, 1) == const0_rtx)
1120 mask = s390_branch_condition_mask (code);
1121 else
1122 mask = s390_compare_and_branch_condition_mask (code);
1123
5b022de5 1124 gcc_assert (mask >= 0);
ba956982
UW
1125
1126 if (inv)
1127 mask ^= 15;
1128
8d933e31 1129 gcc_assert (mask >= 1 && mask <= 14);
ba956982
UW
1130
1131 return mnemonic[mask];
1132}
1133
f19a9af7
AK
1134/* Return the part of op which has a value different from def.
1135 The size of the part is determined by mode.
38899e29 1136 Use this function only if you already know that op really
f19a9af7 1137 contains such a part. */
4023fb28 1138
f19a9af7
AK
1139unsigned HOST_WIDE_INT
1140s390_extract_part (rtx op, enum machine_mode mode, int def)
4023fb28 1141{
f19a9af7
AK
1142 unsigned HOST_WIDE_INT value = 0;
1143 int max_parts = HOST_BITS_PER_WIDE_INT / GET_MODE_BITSIZE (mode);
1144 int part_bits = GET_MODE_BITSIZE (mode);
c4d50129
AK
1145 unsigned HOST_WIDE_INT part_mask
1146 = ((unsigned HOST_WIDE_INT)1 << part_bits) - 1;
f19a9af7 1147 int i;
38899e29 1148
f19a9af7 1149 for (i = 0; i < max_parts; i++)
4023fb28 1150 {
f19a9af7
AK
1151 if (i == 0)
1152 value = (unsigned HOST_WIDE_INT) INTVAL (op);
4023fb28 1153 else
f19a9af7 1154 value >>= part_bits;
38899e29 1155
f19a9af7
AK
1156 if ((value & part_mask) != (def & part_mask))
1157 return value & part_mask;
4023fb28 1158 }
38899e29 1159
8d933e31 1160 gcc_unreachable ();
4023fb28
UW
1161}
1162
1163/* If OP is an integer constant of mode MODE with exactly one
f19a9af7
AK
1164 part of mode PART_MODE unequal to DEF, return the number of that
1165 part. Otherwise, return -1. */
4023fb28
UW
1166
1167int
38899e29
EC
1168s390_single_part (rtx op,
1169 enum machine_mode mode,
f19a9af7
AK
1170 enum machine_mode part_mode,
1171 int def)
1172{
1173 unsigned HOST_WIDE_INT value = 0;
1174 int n_parts = GET_MODE_SIZE (mode) / GET_MODE_SIZE (part_mode);
c4d50129
AK
1175 unsigned HOST_WIDE_INT part_mask
1176 = ((unsigned HOST_WIDE_INT)1 << GET_MODE_BITSIZE (part_mode)) - 1;
f19a9af7
AK
1177 int i, part = -1;
1178
1179 if (GET_CODE (op) != CONST_INT)
1180 return -1;
38899e29 1181
f19a9af7
AK
1182 for (i = 0; i < n_parts; i++)
1183 {
1184 if (i == 0)
1185 value = (unsigned HOST_WIDE_INT) INTVAL (op);
4023fb28 1186 else
f19a9af7 1187 value >>= GET_MODE_BITSIZE (part_mode);
38899e29 1188
f19a9af7
AK
1189 if ((value & part_mask) != (def & part_mask))
1190 {
1191 if (part != -1)
1192 return -1;
1193 else
1194 part = i;
1195 }
4023fb28 1196 }
f19a9af7 1197 return part == -1 ? -1 : n_parts - 1 - part;
4023fb28
UW
1198}
1199
963fc8d0
AK
1200/* Return true if IN contains a contiguous bitfield in the lower SIZE
1201 bits and no other bits are set in IN. POS and LENGTH can be used
1202 to obtain the start position and the length of the bitfield.
1203
1204 POS gives the position of the first bit of the bitfield counting
1205 from the lowest order bit starting with zero. In order to use this
1206 value for S/390 instructions this has to be converted to "bits big
1207 endian" style. */
1208
1209bool
1210s390_contiguous_bitmask_p (unsigned HOST_WIDE_INT in, int size,
1211 int *pos, int *length)
1212{
1213 int tmp_pos = 0;
1214 int tmp_length = 0;
1215 int i;
1216 unsigned HOST_WIDE_INT mask = 1ULL;
1217 bool contiguous = false;
1218
1219 for (i = 0; i < size; mask <<= 1, i++)
1220 {
1221 if (contiguous)
1222 {
1223 if (mask & in)
1224 tmp_length++;
1225 else
1226 break;
1227 }
1228 else
1229 {
1230 if (mask & in)
1231 {
1232 contiguous = true;
1233 tmp_length++;
1234 }
1235 else
1236 tmp_pos++;
1237 }
1238 }
1239
1240 if (!tmp_length)
1241 return false;
1242
1243 /* Calculate a mask for all bits beyond the contiguous bits. */
1244 mask = (-1LL & ~(((1ULL << (tmp_length + tmp_pos - 1)) << 1) - 1));
1245
1246 if (mask & in)
1247 return false;
1248
1249 if (tmp_length + tmp_pos - 1 > size)
1250 return false;
1251
1252 if (length)
1253 *length = tmp_length;
1254
1255 if (pos)
1256 *pos = tmp_pos;
1257
1258 return true;
1259}
1260
c7453384
EC
1261/* Check whether we can (and want to) split a double-word
1262 move in mode MODE from SRC to DST into two single-word
dc65c307
UW
1263 moves, moving the subword FIRST_SUBWORD first. */
1264
1265bool
9c808aad 1266s390_split_ok_p (rtx dst, rtx src, enum machine_mode mode, int first_subword)
dc65c307
UW
1267{
1268 /* Floating point registers cannot be split. */
1269 if (FP_REG_P (src) || FP_REG_P (dst))
1270 return false;
1271
fae778eb 1272 /* We don't need to split if operands are directly accessible. */
dc65c307
UW
1273 if (s_operand (src, mode) || s_operand (dst, mode))
1274 return false;
1275
1276 /* Non-offsettable memory references cannot be split. */
1277 if ((GET_CODE (src) == MEM && !offsettable_memref_p (src))
1278 || (GET_CODE (dst) == MEM && !offsettable_memref_p (dst)))
1279 return false;
1280
1281 /* Moving the first subword must not clobber a register
1282 needed to move the second subword. */
1283 if (register_operand (dst, mode))
1284 {
1285 rtx subreg = operand_subword (dst, first_subword, 0, mode);
1286 if (reg_overlap_mentioned_p (subreg, src))
1287 return false;
1288 }
1289
1290 return true;
1291}
1292
bcf8c1cc
AK
1293/* Return true if it can be proven that [MEM1, MEM1 + SIZE]
1294 and [MEM2, MEM2 + SIZE] do overlap and false
1295 otherwise. */
1296
1297bool
1298s390_overlap_p (rtx mem1, rtx mem2, HOST_WIDE_INT size)
1299{
1300 rtx addr1, addr2, addr_delta;
1301 HOST_WIDE_INT delta;
1302
1303 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
1304 return true;
1305
1306 if (size == 0)
1307 return false;
1308
1309 addr1 = XEXP (mem1, 0);
1310 addr2 = XEXP (mem2, 0);
1311
1312 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
1313
1314 /* This overlapping check is used by peepholes merging memory block operations.
1315 Overlapping operations would otherwise be recognized by the S/390 hardware
1316 and would fall back to a slower implementation. Allowing overlapping
1317 operations would lead to slow code but not to wrong code. Therefore we are
5116a5d2 1318 somewhat optimistic if we cannot prove that the memory blocks are
bcf8c1cc
AK
1319 overlapping.
1320 That's why we return false here although this may accept operations on
1321 overlapping memory areas. */
1322 if (!addr_delta || GET_CODE (addr_delta) != CONST_INT)
1323 return false;
1324
1325 delta = INTVAL (addr_delta);
1326
1327 if (delta == 0
1328 || (delta > 0 && delta < size)
1329 || (delta < 0 && -delta < size))
1330 return true;
1331
1332 return false;
1333}
1334
19b63d8e
UW
1335/* Check whether the address of memory reference MEM2 equals exactly
1336 the address of memory reference MEM1 plus DELTA. Return true if
1337 we can prove this to be the case, false otherwise. */
1338
1339bool
1340s390_offset_p (rtx mem1, rtx mem2, rtx delta)
1341{
1342 rtx addr1, addr2, addr_delta;
1343
1344 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
1345 return false;
1346
1347 addr1 = XEXP (mem1, 0);
1348 addr2 = XEXP (mem2, 0);
1349
1350 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
1351 if (!addr_delta || !rtx_equal_p (addr_delta, delta))
1352 return false;
1353
1354 return true;
1355}
1356
8cb66696
UW
1357/* Expand logical operator CODE in mode MODE with operands OPERANDS. */
1358
1359void
1360s390_expand_logical_operator (enum rtx_code code, enum machine_mode mode,
1361 rtx *operands)
1362{
1363 enum machine_mode wmode = mode;
1364 rtx dst = operands[0];
1365 rtx src1 = operands[1];
1366 rtx src2 = operands[2];
1367 rtx op, clob, tem;
1368
1369 /* If we cannot handle the operation directly, use a temp register. */
1370 if (!s390_logical_operator_ok_p (operands))
1371 dst = gen_reg_rtx (mode);
1372
1373 /* QImode and HImode patterns make sense only if we have a destination
1374 in memory. Otherwise perform the operation in SImode. */
1375 if ((mode == QImode || mode == HImode) && GET_CODE (dst) != MEM)
1376 wmode = SImode;
1377
1378 /* Widen operands if required. */
1379 if (mode != wmode)
1380 {
1381 if (GET_CODE (dst) == SUBREG
1382 && (tem = simplify_subreg (wmode, dst, mode, 0)) != 0)
1383 dst = tem;
1384 else if (REG_P (dst))
1385 dst = gen_rtx_SUBREG (wmode, dst, 0);
1386 else
1387 dst = gen_reg_rtx (wmode);
1388
1389 if (GET_CODE (src1) == SUBREG
1390 && (tem = simplify_subreg (wmode, src1, mode, 0)) != 0)
1391 src1 = tem;
1392 else if (GET_MODE (src1) != VOIDmode)
1393 src1 = gen_rtx_SUBREG (wmode, force_reg (mode, src1), 0);
1394
1395 if (GET_CODE (src2) == SUBREG
1396 && (tem = simplify_subreg (wmode, src2, mode, 0)) != 0)
1397 src2 = tem;
1398 else if (GET_MODE (src2) != VOIDmode)
1399 src2 = gen_rtx_SUBREG (wmode, force_reg (mode, src2), 0);
1400 }
1401
1402 /* Emit the instruction. */
1403 op = gen_rtx_SET (VOIDmode, dst, gen_rtx_fmt_ee (code, wmode, src1, src2));
1404 clob = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
1405 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, op, clob)));
1406
1407 /* Fix up the destination if needed. */
1408 if (dst != operands[0])
1409 emit_move_insn (operands[0], gen_lowpart (mode, dst));
1410}
1411
1412/* Check whether OPERANDS are OK for a logical operation (AND, IOR, XOR). */
1413
1414bool
1415s390_logical_operator_ok_p (rtx *operands)
1416{
1417 /* If the destination operand is in memory, it needs to coincide
1418 with one of the source operands. After reload, it has to be
1419 the first source operand. */
1420 if (GET_CODE (operands[0]) == MEM)
1421 return rtx_equal_p (operands[0], operands[1])
1422 || (!reload_completed && rtx_equal_p (operands[0], operands[2]));
1423
1424 return true;
1425}
1426
0dfa6c5e
UW
1427/* Narrow logical operation CODE of memory operand MEMOP with immediate
1428 operand IMMOP to switch from SS to SI type instructions. */
1429
1430void
1431s390_narrow_logical_operator (enum rtx_code code, rtx *memop, rtx *immop)
1432{
1433 int def = code == AND ? -1 : 0;
1434 HOST_WIDE_INT mask;
1435 int part;
1436
1437 gcc_assert (GET_CODE (*memop) == MEM);
1438 gcc_assert (!MEM_VOLATILE_P (*memop));
1439
1440 mask = s390_extract_part (*immop, QImode, def);
1441 part = s390_single_part (*immop, GET_MODE (*memop), QImode, def);
1442 gcc_assert (part >= 0);
1443
1444 *memop = adjust_address (*memop, QImode, part);
1445 *immop = gen_int_mode (mask, QImode);
1446}
1447
ba956982 1448
ab96de7e
AS
1449/* How to allocate a 'struct machine_function'. */
1450
1451static struct machine_function *
1452s390_init_machine_status (void)
1453{
5ead67f6 1454 return GGC_CNEW (struct machine_function);
ab96de7e
AS
1455}
1456
c7453384 1457/* Change optimizations to be performed, depending on the
994fe660
UW
1458 optimization level.
1459
1460 LEVEL is the optimization level specified; 2 if `-O2' is
1461 specified, 1 if `-O' is specified, and 0 if neither is specified.
1462
5e7a8ee0 1463 SIZE is nonzero if `-Os' is specified and zero otherwise. */
9db1d521
HP
1464
1465void
9c808aad 1466optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
9db1d521 1467{
8e509cf9
UW
1468 /* ??? There are apparently still problems with -fcaller-saves. */
1469 flag_caller_saves = 0;
2120e3cd
UW
1470
1471 /* By default, always emit DWARF-2 unwind info. This allows debugging
1472 without maintaining a stack frame back-chain. */
1473 flag_asynchronous_unwind_tables = 1;
8daf098e
AS
1474
1475 /* Use MVCLE instructions to decrease code size if requested. */
1476 if (size != 0)
1477 target_flags |= MASK_MVCLE;
9db1d521
HP
1478}
1479
f5db779b
RS
1480/* Return true if ARG is the name of a processor. Set *TYPE and *FLAGS
1481 to the associated processor_type and processor_flags if so. */
1482
1483static bool
1484s390_handle_arch_option (const char *arg,
1485 enum processor_type *type,
1486 enum processor_flags *flags)
4023fb28 1487{
1fec52be
HP
1488 static struct pta
1489 {
1490 const char *const name; /* processor name or nickname. */
1491 const enum processor_type processor;
f13e0d4e 1492 const enum processor_flags flags;
1fec52be
HP
1493 }
1494 const processor_alias_table[] =
1495 {
f13e0d4e
UW
1496 {"g5", PROCESSOR_9672_G5, PF_IEEE_FLOAT},
1497 {"g6", PROCESSOR_9672_G6, PF_IEEE_FLOAT},
1498 {"z900", PROCESSOR_2064_Z900, PF_IEEE_FLOAT | PF_ZARCH},
c7453384 1499 {"z990", PROCESSOR_2084_Z990, PF_IEEE_FLOAT | PF_ZARCH
f13e0d4e 1500 | PF_LONG_DISPLACEMENT},
ec24698e
UW
1501 {"z9-109", PROCESSOR_2094_Z9_109, PF_IEEE_FLOAT | PF_ZARCH
1502 | PF_LONG_DISPLACEMENT | PF_EXTIMM},
3443392a
AK
1503 {"z9-ec", PROCESSOR_2094_Z9_109, PF_IEEE_FLOAT | PF_ZARCH
1504 | PF_LONG_DISPLACEMENT | PF_EXTIMM | PF_DFP },
93538e8e
AK
1505 {"z10", PROCESSOR_2097_Z10, PF_IEEE_FLOAT | PF_ZARCH
1506 | PF_LONG_DISPLACEMENT | PF_EXTIMM | PF_DFP | PF_Z10},
1fec52be 1507 };
f5db779b
RS
1508 size_t i;
1509
1510 for (i = 0; i < ARRAY_SIZE (processor_alias_table); i++)
1511 if (strcmp (arg, processor_alias_table[i].name) == 0)
1512 {
1513 *type = processor_alias_table[i].processor;
1514 *flags = processor_alias_table[i].flags;
1515 return true;
1516 }
1517 return false;
1518}
1519
1520/* Implement TARGET_HANDLE_OPTION. */
1fec52be 1521
f5db779b
RS
1522static bool
1523s390_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
1524{
1525 switch (code)
1526 {
1527 case OPT_march_:
f5db779b
RS
1528 return s390_handle_arch_option (arg, &s390_arch, &s390_arch_flags);
1529
1530 case OPT_mstack_guard_:
1531 if (sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_stack_guard) != 1)
1532 return false;
1533 if (exact_log2 (s390_stack_guard) == -1)
1534 error ("stack guard value must be an exact power of 2");
1535 return true;
1536
1537 case OPT_mstack_size_:
1538 if (sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_stack_size) != 1)
1539 return false;
1540 if (exact_log2 (s390_stack_size) == -1)
1541 error ("stack size must be an exact power of 2");
1542 return true;
1543
1544 case OPT_mtune_:
1545 return s390_handle_arch_option (arg, &s390_tune, &s390_tune_flags);
1546
1547 case OPT_mwarn_framesize_:
1548 return sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_warn_framesize) == 1;
1549
1550 default:
1551 return true;
1552 }
1553}
1fec52be 1554
f5db779b
RS
1555void
1556override_options (void)
1557{
29742ba4
HP
1558 /* Set up function hooks. */
1559 init_machine_status = s390_init_machine_status;
f13e0d4e
UW
1560
1561 /* Architecture mode defaults according to ABI. */
1562 if (!(target_flags_explicit & MASK_ZARCH))
1563 {
1564 if (TARGET_64BIT)
1565 target_flags |= MASK_ZARCH;
1566 else
1567 target_flags &= ~MASK_ZARCH;
1568 }
1569
1570 /* Determine processor architectural level. */
1fec52be 1571 if (!s390_arch_string)
f5db779b
RS
1572 {
1573 s390_arch_string = TARGET_ZARCH? "z900" : "g5";
1574 s390_handle_arch_option (s390_arch_string, &s390_arch, &s390_arch_flags);
1575 }
1fec52be 1576
f13e0d4e 1577 /* Determine processor to tune for. */
f5db779b 1578 if (s390_tune == PROCESSOR_max)
1fec52be 1579 {
f13e0d4e
UW
1580 s390_tune = s390_arch;
1581 s390_tune_flags = s390_arch_flags;
1fec52be
HP
1582 }
1583
f13e0d4e 1584 /* Sanity checks. */
3443392a 1585 if (TARGET_ZARCH && !TARGET_CPU_ZARCH)
c85ce869 1586 error ("z/Architecture mode not supported on %s", s390_arch_string);
f13e0d4e 1587 if (TARGET_64BIT && !TARGET_ZARCH)
c85ce869 1588 error ("64-bit ABI not supported in ESA/390 mode");
d75f90f1 1589
47d94c1a 1590 if (TARGET_HARD_DFP && !TARGET_DFP)
3443392a 1591 {
47d94c1a 1592 if (target_flags_explicit & MASK_HARD_DFP)
3443392a
AK
1593 {
1594 if (!TARGET_CPU_DFP)
1595 error ("Hardware decimal floating point instructions"
1596 " not available on %s", s390_arch_string);
1597 if (!TARGET_ZARCH)
1598 error ("Hardware decimal floating point instructions"
1599 " not available in ESA/390 mode");
1600 }
1601 else
47d94c1a 1602 target_flags &= ~MASK_HARD_DFP;
3443392a
AK
1603 }
1604
1605 if ((target_flags_explicit & MASK_SOFT_FLOAT) && TARGET_SOFT_FLOAT)
1606 {
47d94c1a 1607 if ((target_flags_explicit & MASK_HARD_DFP) && TARGET_HARD_DFP)
3443392a
AK
1608 error ("-mhard-dfp can't be used in conjunction with -msoft-float");
1609
47d94c1a 1610 target_flags &= ~MASK_HARD_DFP;
3443392a
AK
1611 }
1612
017e0eb9 1613 /* Set processor cost function. */
93538e8e
AK
1614 switch (s390_tune)
1615 {
1616 case PROCESSOR_2084_Z990:
1617 s390_cost = &z990_cost;
1618 break;
1619 case PROCESSOR_2094_Z9_109:
1620 s390_cost = &z9_109_cost;
1621 break;
1622 case PROCESSOR_2097_Z10:
1623 s390_cost = &z10_cost;
1624 break;
1625 default:
1626 s390_cost = &z900_cost;
1627 }
1628
6b78f6be
AK
1629 if (TARGET_BACKCHAIN && TARGET_PACKED_STACK && TARGET_HARD_FLOAT)
1630 error ("-mbackchain -mpacked-stack -mhard-float are not supported "
c85ce869 1631 "in combination");
6b78f6be 1632
f5db779b 1633 if (s390_stack_size)
d75f90f1 1634 {
690e7b63 1635 if (s390_stack_guard >= s390_stack_size)
f5db779b 1636 error ("stack size must be greater than the stack guard value");
f695eccf
AK
1637 else if (s390_stack_size > 1 << 16)
1638 error ("stack size must not be greater than 64k");
d75f90f1 1639 }
f5db779b 1640 else if (s390_stack_guard)
d75f90f1 1641 error ("-mstack-guard implies use of -mstack-size");
ed965309
JJ
1642
1643#ifdef TARGET_DEFAULT_LONG_DOUBLE_128
1644 if (!(target_flags_explicit & MASK_LONG_DOUBLE_128))
1645 target_flags |= MASK_LONG_DOUBLE_128;
1646#endif
29742ba4 1647}
9db1d521
HP
1648
1649/* Map for smallest class containing reg regno. */
1650
0139adca 1651const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
9db1d521
HP
1652{ GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1653 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1654 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1655 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1656 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1657 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1658 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1659 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
c5aa1d12
UW
1660 ADDR_REGS, CC_REGS, ADDR_REGS, ADDR_REGS,
1661 ACCESS_REGS, ACCESS_REGS
9db1d521
HP
1662};
1663
077dab3b
HP
1664/* Return attribute type of insn. */
1665
1666static enum attr_type
9c808aad 1667s390_safe_attr_type (rtx insn)
077dab3b
HP
1668{
1669 if (recog_memoized (insn) >= 0)
1670 return get_attr_type (insn);
1671 else
1672 return TYPE_NONE;
1673}
9db1d521 1674
d3632d41
UW
1675/* Return true if DISP is a valid short displacement. */
1676
3ed99cc9 1677static bool
9c808aad 1678s390_short_displacement (rtx disp)
d3632d41
UW
1679{
1680 /* No displacement is OK. */
1681 if (!disp)
3ed99cc9 1682 return true;
d3632d41
UW
1683
1684 /* Integer displacement in range. */
1685 if (GET_CODE (disp) == CONST_INT)
1686 return INTVAL (disp) >= 0 && INTVAL (disp) < 4096;
1687
1688 /* GOT offset is not OK, the GOT can be large. */
1689 if (GET_CODE (disp) == CONST
1690 && GET_CODE (XEXP (disp, 0)) == UNSPEC
227a39fa
UW
1691 && (XINT (XEXP (disp, 0), 1) == UNSPEC_GOT
1692 || XINT (XEXP (disp, 0), 1) == UNSPEC_GOTNTPOFF))
3ed99cc9 1693 return false;
d3632d41
UW
1694
1695 /* All other symbolic constants are literal pool references,
1696 which are OK as the literal pool must be small. */
1697 if (GET_CODE (disp) == CONST)
3ed99cc9 1698 return true;
d3632d41 1699
3ed99cc9 1700 return false;
d3632d41
UW
1701}
1702
ab96de7e
AS
1703/* Decompose a RTL expression ADDR for a memory address into
1704 its components, returned in OUT.
ccfc6cc8 1705
3ed99cc9 1706 Returns false if ADDR is not a valid memory address, true
ab96de7e
AS
1707 otherwise. If OUT is NULL, don't return the components,
1708 but check for validity only.
ccfc6cc8 1709
ab96de7e
AS
1710 Note: Only addresses in canonical form are recognized.
1711 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1712 canonical form so that they will be recognized. */
f19a9af7 1713
ab96de7e 1714static int
5d81b82b 1715s390_decompose_address (rtx addr, struct s390_address *out)
ab96de7e
AS
1716{
1717 HOST_WIDE_INT offset = 0;
1718 rtx base = NULL_RTX;
1719 rtx indx = NULL_RTX;
1720 rtx disp = NULL_RTX;
1721 rtx orig_disp;
3ed99cc9
AS
1722 bool pointer = false;
1723 bool base_ptr = false;
1724 bool indx_ptr = false;
f01cf809
UW
1725 bool literal_pool = false;
1726
1727 /* We may need to substitute the literal pool base register into the address
1728 below. However, at this point we do not know which register is going to
1729 be used as base, so we substitute the arg pointer register. This is going
1730 to be treated as holding a pointer below -- it shouldn't be used for any
1731 other purpose. */
1732 rtx fake_pool_base = gen_rtx_REG (Pmode, ARG_POINTER_REGNUM);
0dfa6c5e 1733
ab96de7e 1734 /* Decompose address into base + index + displacement. */
0dfa6c5e 1735
ab96de7e
AS
1736 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
1737 base = addr;
0dfa6c5e 1738
ab96de7e 1739 else if (GET_CODE (addr) == PLUS)
e221ef54 1740 {
ab96de7e
AS
1741 rtx op0 = XEXP (addr, 0);
1742 rtx op1 = XEXP (addr, 1);
1743 enum rtx_code code0 = GET_CODE (op0);
1744 enum rtx_code code1 = GET_CODE (op1);
e221ef54 1745
ab96de7e
AS
1746 if (code0 == REG || code0 == UNSPEC)
1747 {
1748 if (code1 == REG || code1 == UNSPEC)
1749 {
1750 indx = op0; /* index + base */
1751 base = op1;
1752 }
e221ef54 1753
ab96de7e
AS
1754 else
1755 {
1756 base = op0; /* base + displacement */
1757 disp = op1;
1758 }
1759 }
ccfc6cc8 1760
ab96de7e 1761 else if (code0 == PLUS)
d3632d41 1762 {
ab96de7e
AS
1763 indx = XEXP (op0, 0); /* index + base + disp */
1764 base = XEXP (op0, 1);
1765 disp = op1;
d3632d41 1766 }
d3632d41 1767
ab96de7e 1768 else
d3632d41 1769 {
3ed99cc9 1770 return false;
d3632d41 1771 }
ab96de7e 1772 }
d3632d41 1773
ab96de7e
AS
1774 else
1775 disp = addr; /* displacement */
d3632d41 1776
ab96de7e
AS
1777 /* Extract integer part of displacement. */
1778 orig_disp = disp;
1779 if (disp)
1780 {
1781 if (GET_CODE (disp) == CONST_INT)
d3632d41 1782 {
ab96de7e
AS
1783 offset = INTVAL (disp);
1784 disp = NULL_RTX;
d3632d41 1785 }
ab96de7e
AS
1786 else if (GET_CODE (disp) == CONST
1787 && GET_CODE (XEXP (disp, 0)) == PLUS
1788 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
1789 {
1790 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
1791 disp = XEXP (XEXP (disp, 0), 0);
1792 }
1793 }
d3632d41 1794
ab96de7e
AS
1795 /* Strip off CONST here to avoid special case tests later. */
1796 if (disp && GET_CODE (disp) == CONST)
1797 disp = XEXP (disp, 0);
ac32b25e 1798
ab96de7e
AS
1799 /* We can convert literal pool addresses to
1800 displacements by basing them off the base register. */
1801 if (disp && GET_CODE (disp) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (disp))
1802 {
1803 /* Either base or index must be free to hold the base register. */
1804 if (!base)
f01cf809 1805 base = fake_pool_base, literal_pool = true;
ab96de7e 1806 else if (!indx)
f01cf809 1807 indx = fake_pool_base, literal_pool = true;
ab96de7e 1808 else
3ed99cc9 1809 return false;
ab96de7e
AS
1810
1811 /* Mark up the displacement. */
1812 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp),
1813 UNSPEC_LTREL_OFFSET);
d3632d41 1814 }
ccfc6cc8 1815
ab96de7e
AS
1816 /* Validate base register. */
1817 if (base)
1818 {
1819 if (GET_CODE (base) == UNSPEC)
1820 switch (XINT (base, 1))
1821 {
1822 case UNSPEC_LTREF:
1823 if (!disp)
1824 disp = gen_rtx_UNSPEC (Pmode,
1825 gen_rtvec (1, XVECEXP (base, 0, 0)),
1826 UNSPEC_LTREL_OFFSET);
1827 else
3ed99cc9 1828 return false;
ccfc6cc8 1829
f01cf809 1830 base = XVECEXP (base, 0, 1);
ab96de7e 1831 break;
f19a9af7 1832
ab96de7e 1833 case UNSPEC_LTREL_BASE:
f01cf809
UW
1834 if (XVECLEN (base, 0) == 1)
1835 base = fake_pool_base, literal_pool = true;
1836 else
1837 base = XVECEXP (base, 0, 1);
ab96de7e 1838 break;
f19a9af7 1839
ab96de7e 1840 default:
3ed99cc9 1841 return false;
ab96de7e 1842 }
f19a9af7 1843
93fa8428
AK
1844 if (!REG_P (base)
1845 || (GET_MODE (base) != SImode
1846 && GET_MODE (base) != Pmode))
3ed99cc9 1847 return false;
ab96de7e 1848
f01cf809 1849 if (REGNO (base) == STACK_POINTER_REGNUM
ab96de7e
AS
1850 || REGNO (base) == FRAME_POINTER_REGNUM
1851 || ((reload_completed || reload_in_progress)
1852 && frame_pointer_needed
1853 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
1854 || REGNO (base) == ARG_POINTER_REGNUM
1855 || (flag_pic
1856 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
3ed99cc9 1857 pointer = base_ptr = true;
f01cf809
UW
1858
1859 if ((reload_completed || reload_in_progress)
1860 && base == cfun->machine->base_reg)
1861 pointer = base_ptr = literal_pool = true;
ab96de7e
AS
1862 }
1863
1864 /* Validate index register. */
1865 if (indx)
f19a9af7 1866 {
ab96de7e
AS
1867 if (GET_CODE (indx) == UNSPEC)
1868 switch (XINT (indx, 1))
1869 {
1870 case UNSPEC_LTREF:
1871 if (!disp)
1872 disp = gen_rtx_UNSPEC (Pmode,
1873 gen_rtvec (1, XVECEXP (indx, 0, 0)),
1874 UNSPEC_LTREL_OFFSET);
1875 else
3ed99cc9 1876 return false;
f19a9af7 1877
f01cf809 1878 indx = XVECEXP (indx, 0, 1);
ab96de7e 1879 break;
f19a9af7 1880
ab96de7e 1881 case UNSPEC_LTREL_BASE:
f01cf809
UW
1882 if (XVECLEN (indx, 0) == 1)
1883 indx = fake_pool_base, literal_pool = true;
1884 else
1885 indx = XVECEXP (indx, 0, 1);
ab96de7e 1886 break;
f19a9af7 1887
ab96de7e 1888 default:
3ed99cc9 1889 return false;
ab96de7e 1890 }
f19a9af7 1891
93fa8428
AK
1892 if (!REG_P (indx)
1893 || (GET_MODE (indx) != SImode
1894 && GET_MODE (indx) != Pmode))
3ed99cc9 1895 return false;
f19a9af7 1896
f01cf809 1897 if (REGNO (indx) == STACK_POINTER_REGNUM
ab96de7e
AS
1898 || REGNO (indx) == FRAME_POINTER_REGNUM
1899 || ((reload_completed || reload_in_progress)
1900 && frame_pointer_needed
1901 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
1902 || REGNO (indx) == ARG_POINTER_REGNUM
1903 || (flag_pic
1904 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
3ed99cc9 1905 pointer = indx_ptr = true;
f01cf809
UW
1906
1907 if ((reload_completed || reload_in_progress)
1908 && indx == cfun->machine->base_reg)
1909 pointer = indx_ptr = literal_pool = true;
ab96de7e 1910 }
38899e29 1911
ab96de7e
AS
1912 /* Prefer to use pointer as base, not index. */
1913 if (base && indx && !base_ptr
1914 && (indx_ptr || (!REG_POINTER (base) && REG_POINTER (indx))))
1915 {
1916 rtx tmp = base;
1917 base = indx;
1918 indx = tmp;
1919 }
f19a9af7 1920
ab96de7e
AS
1921 /* Validate displacement. */
1922 if (!disp)
1923 {
63296cb1
AK
1924 /* If virtual registers are involved, the displacement will change later
1925 anyway as the virtual registers get eliminated. This could make a
1926 valid displacement invalid, but it is more likely to make an invalid
1927 displacement valid, because we sometimes access the register save area
1928 via negative offsets to one of those registers.
ab96de7e
AS
1929 Thus we don't check the displacement for validity here. If after
1930 elimination the displacement turns out to be invalid after all,
1931 this is fixed up by reload in any case. */
1932 if (base != arg_pointer_rtx
1933 && indx != arg_pointer_rtx
1934 && base != return_address_pointer_rtx
63296cb1
AK
1935 && indx != return_address_pointer_rtx
1936 && base != frame_pointer_rtx
1937 && indx != frame_pointer_rtx
1938 && base != virtual_stack_vars_rtx
1939 && indx != virtual_stack_vars_rtx)
ab96de7e 1940 if (!DISP_IN_RANGE (offset))
3ed99cc9 1941 return false;
ab96de7e
AS
1942 }
1943 else
1944 {
1945 /* All the special cases are pointers. */
3ed99cc9 1946 pointer = true;
f19a9af7 1947
ab96de7e
AS
1948 /* In the small-PIC case, the linker converts @GOT
1949 and @GOTNTPOFF offsets to possible displacements. */
1950 if (GET_CODE (disp) == UNSPEC
1951 && (XINT (disp, 1) == UNSPEC_GOT
1952 || XINT (disp, 1) == UNSPEC_GOTNTPOFF)
ab96de7e
AS
1953 && flag_pic == 1)
1954 {
1955 ;
1956 }
f19a9af7 1957
ab96de7e 1958 /* Accept chunkified literal pool symbol references. */
3cd045d1
UW
1959 else if (cfun && cfun->machine
1960 && cfun->machine->decomposed_literal_pool_addresses_ok_p
1961 && GET_CODE (disp) == MINUS
ab96de7e
AS
1962 && GET_CODE (XEXP (disp, 0)) == LABEL_REF
1963 && GET_CODE (XEXP (disp, 1)) == LABEL_REF)
1964 {
1965 ;
1966 }
f19a9af7 1967
ab96de7e
AS
1968 /* Accept literal pool references. */
1969 else if (GET_CODE (disp) == UNSPEC
1970 && XINT (disp, 1) == UNSPEC_LTREL_OFFSET)
1971 {
1972 orig_disp = gen_rtx_CONST (Pmode, disp);
1973 if (offset)
1974 {
1975 /* If we have an offset, make sure it does not
1976 exceed the size of the constant pool entry. */
1977 rtx sym = XVECEXP (disp, 0, 0);
1978 if (offset >= GET_MODE_SIZE (get_pool_mode (sym)))
3ed99cc9 1979 return false;
f19a9af7 1980
ab96de7e
AS
1981 orig_disp = plus_constant (orig_disp, offset);
1982 }
1983 }
1984
1985 else
3ed99cc9 1986 return false;
f19a9af7
AK
1987 }
1988
ab96de7e 1989 if (!base && !indx)
3ed99cc9 1990 pointer = true;
ab96de7e
AS
1991
1992 if (out)
1993 {
1994 out->base = base;
1995 out->indx = indx;
1996 out->disp = orig_disp;
1997 out->pointer = pointer;
f01cf809 1998 out->literal_pool = literal_pool;
ab96de7e
AS
1999 }
2000
3ed99cc9 2001 return true;
f19a9af7
AK
2002}
2003
d98ad410
UW
2004/* Decompose a RTL expression OP for a shift count into its components,
2005 and return the base register in BASE and the offset in OFFSET.
2006
d98ad410
UW
2007 Return true if OP is a valid shift count, false if not. */
2008
2009bool
4989e88a 2010s390_decompose_shift_count (rtx op, rtx *base, HOST_WIDE_INT *offset)
d98ad410
UW
2011{
2012 HOST_WIDE_INT off = 0;
2013
d98ad410
UW
2014 /* We can have an integer constant, an address register,
2015 or a sum of the two. */
2016 if (GET_CODE (op) == CONST_INT)
2017 {
2018 off = INTVAL (op);
2019 op = NULL_RTX;
2020 }
2021 if (op && GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
2022 {
2023 off = INTVAL (XEXP (op, 1));
2024 op = XEXP (op, 0);
2025 }
2026 while (op && GET_CODE (op) == SUBREG)
2027 op = SUBREG_REG (op);
2028
2029 if (op && GET_CODE (op) != REG)
2030 return false;
2031
2032 if (offset)
2033 *offset = off;
2034 if (base)
2035 *base = op;
2036
2037 return true;
2038}
2039
2040
ab96de7e 2041/* Return true if CODE is a valid address without index. */
3c50106f 2042
ab96de7e
AS
2043bool
2044s390_legitimate_address_without_index_p (rtx op)
2045{
2046 struct s390_address addr;
2047
2048 if (!s390_decompose_address (XEXP (op, 0), &addr))
2049 return false;
2050 if (addr.indx)
2051 return false;
2052
2053 return true;
2054}
2055
cd8dc1f9
WG
2056
2057/* Evaluates constraint strings described by the regular expression
2058 ([A|B](Q|R|S|T))|U|W and returns 1 if OP is a valid operand for the
2059 constraint given in STR, or 0 else. */
ab96de7e
AS
2060
2061int
cd8dc1f9 2062s390_mem_constraint (const char *str, rtx op)
ab96de7e
AS
2063{
2064 struct s390_address addr;
cd8dc1f9 2065 char c = str[0];
ab96de7e
AS
2066
2067 /* Check for offsettable variants of memory constraints. */
2068 if (c == 'A')
2069 {
2070 /* Only accept non-volatile MEMs. */
2071 if (!MEM_P (op) || MEM_VOLATILE_P (op))
2072 return 0;
2073
2074 if ((reload_completed || reload_in_progress)
cd8dc1f9 2075 ? !offsettable_memref_p (op) : !offsettable_nonstrict_memref_p (op))
ab96de7e
AS
2076 return 0;
2077
2078 c = str[1];
2079 }
2080
2081 /* Check for non-literal-pool variants of memory constraints. */
2082 else if (c == 'B')
2083 {
2084 if (GET_CODE (op) != MEM)
2085 return 0;
2086 if (!s390_decompose_address (XEXP (op, 0), &addr))
2087 return 0;
f01cf809 2088 if (addr.literal_pool)
ab96de7e
AS
2089 return 0;
2090
2091 c = str[1];
2092 }
2093
2094 switch (c)
2095 {
2096 case 'Q':
2097 if (GET_CODE (op) != MEM)
2098 return 0;
2099 if (!s390_decompose_address (XEXP (op, 0), &addr))
2100 return 0;
2101 if (addr.indx)
2102 return 0;
2103
2104 if (TARGET_LONG_DISPLACEMENT)
2105 {
2106 if (!s390_short_displacement (addr.disp))
2107 return 0;
2108 }
2109 break;
2110
2111 case 'R':
2112 if (GET_CODE (op) != MEM)
2113 return 0;
2114
2115 if (TARGET_LONG_DISPLACEMENT)
2116 {
2117 if (!s390_decompose_address (XEXP (op, 0), &addr))
2118 return 0;
2119 if (!s390_short_displacement (addr.disp))
2120 return 0;
2121 }
2122 break;
2123
2124 case 'S':
2125 if (!TARGET_LONG_DISPLACEMENT)
2126 return 0;
2127 if (GET_CODE (op) != MEM)
2128 return 0;
2129 if (!s390_decompose_address (XEXP (op, 0), &addr))
2130 return 0;
2131 if (addr.indx)
2132 return 0;
2133 if (s390_short_displacement (addr.disp))
2134 return 0;
2135 break;
2136
2137 case 'T':
2138 if (!TARGET_LONG_DISPLACEMENT)
2139 return 0;
2140 if (GET_CODE (op) != MEM)
2141 return 0;
963fc8d0
AK
2142 if (!s390_decompose_address (XEXP (op, 0), &addr))
2143 return 0;
2144 if (s390_short_displacement (addr.disp))
ab96de7e
AS
2145 return 0;
2146 break;
2147
2148 case 'U':
2149 if (TARGET_LONG_DISPLACEMENT)
2150 {
2151 if (!s390_decompose_address (op, &addr))
2152 return 0;
2153 if (!s390_short_displacement (addr.disp))
2154 return 0;
2155 }
2156 break;
2157
2158 case 'W':
2159 if (!TARGET_LONG_DISPLACEMENT)
2160 return 0;
963fc8d0
AK
2161 if (!s390_decompose_address (op, &addr))
2162 return 0;
2163 if (s390_short_displacement (addr.disp))
ab96de7e
AS
2164 return 0;
2165 break;
2166
2167 case 'Y':
d98ad410
UW
2168 /* Simply check for the basic form of a shift count. Reload will
2169 take care of making sure we have a proper base register. */
4989e88a 2170 if (!s390_decompose_shift_count (op, NULL, NULL))
d98ad410
UW
2171 return 0;
2172 break;
ab96de7e
AS
2173
2174 default:
2175 return 0;
2176 }
2177
2178 return 1;
2179}
2180
cd8dc1f9
WG
2181
2182
2183/* Evaluates constraint strings starting with letter O. Input
2184 parameter C is the second letter following the "O" in the constraint
2185 string. Returns 1 if VALUE meets the respective constraint and 0
2186 otherwise. */
ab96de7e 2187
d096725d 2188int
cd8dc1f9 2189s390_O_constraint_str (const char c, HOST_WIDE_INT value)
d096725d 2190{
cd8dc1f9
WG
2191 if (!TARGET_EXTIMM)
2192 return 0;
d096725d 2193
cd8dc1f9 2194 switch (c)
d096725d 2195 {
cd8dc1f9
WG
2196 case 's':
2197 return trunc_int_for_mode (value, SImode) == value;
2198
2199 case 'p':
2200 return value == 0
2201 || s390_single_part (GEN_INT (value), DImode, SImode, 0) == 1;
2202
2203 case 'n':
ee3f3449 2204 return s390_single_part (GEN_INT (value - 1), DImode, SImode, -1) == 1;
cd8dc1f9 2205
d096725d 2206 default:
cd8dc1f9 2207 gcc_unreachable ();
d096725d
AS
2208 }
2209}
2210
cd8dc1f9
WG
2211
2212/* Evaluates constraint strings starting with letter N. Parameter STR
2213 contains the letters following letter "N" in the constraint string.
2214 Returns true if VALUE matches the constraint. */
d096725d 2215
ab96de7e 2216int
cd8dc1f9 2217s390_N_constraint_str (const char *str, HOST_WIDE_INT value)
ab96de7e
AS
2218{
2219 enum machine_mode mode, part_mode;
2220 int def;
2221 int part, part_goal;
2222
ab96de7e 2223
cd8dc1f9
WG
2224 if (str[0] == 'x')
2225 part_goal = -1;
2226 else
2227 part_goal = str[0] - '0';
ab96de7e 2228
cd8dc1f9
WG
2229 switch (str[1])
2230 {
2231 case 'Q':
2232 part_mode = QImode;
ab96de7e 2233 break;
cd8dc1f9
WG
2234 case 'H':
2235 part_mode = HImode;
ec24698e 2236 break;
cd8dc1f9
WG
2237 case 'S':
2238 part_mode = SImode;
2239 break;
2240 default:
2241 return 0;
2242 }
ec24698e 2243
cd8dc1f9
WG
2244 switch (str[2])
2245 {
2246 case 'H':
2247 mode = HImode;
2248 break;
2249 case 'S':
2250 mode = SImode;
2251 break;
2252 case 'D':
2253 mode = DImode;
2254 break;
2255 default:
2256 return 0;
2257 }
11598938 2258
cd8dc1f9
WG
2259 switch (str[3])
2260 {
2261 case '0':
2262 def = 0;
2263 break;
2264 case 'F':
2265 def = -1;
2266 break;
ab96de7e
AS
2267 default:
2268 return 0;
2269 }
2270
cd8dc1f9
WG
2271 if (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (part_mode))
2272 return 0;
2273
2274 part = s390_single_part (GEN_INT (value), mode, part_mode, def);
2275 if (part < 0)
2276 return 0;
2277 if (part_goal != -1 && part_goal != part)
2278 return 0;
2279
ab96de7e
AS
2280 return 1;
2281}
2282
cd8dc1f9
WG
2283
2284/* Returns true if the input parameter VALUE is a float zero. */
2285
2286int
2287s390_float_const_zero_p (rtx value)
2288{
2289 return (GET_MODE_CLASS (GET_MODE (value)) == MODE_FLOAT
2290 && value == CONST0_RTX (GET_MODE (value)));
2291}
2292
2293
ab96de7e
AS
2294/* Compute a (partial) cost for rtx X. Return true if the complete
2295 cost has been computed, and false if subexpressions should be
2296 scanned. In either case, *TOTAL contains the cost result.
2297 CODE contains GET_CODE (x), OUTER_CODE contains the code
2298 of the superexpression of x. */
2299
2300static bool
2301s390_rtx_costs (rtx x, int code, int outer_code, int *total)
3c50106f
RH
2302{
2303 switch (code)
2304 {
2305 case CONST:
3c50106f 2306 case CONST_INT:
3c50106f
RH
2307 case LABEL_REF:
2308 case SYMBOL_REF:
2309 case CONST_DOUBLE:
6fa5b390 2310 case MEM:
3c50106f
RH
2311 *total = 0;
2312 return true;
2313
2314 case ASHIFT:
2315 case ASHIFTRT:
2316 case LSHIFTRT:
017e0eb9
MD
2317 case ROTATE:
2318 case ROTATERT:
3c50106f
RH
2319 case AND:
2320 case IOR:
2321 case XOR:
3c50106f
RH
2322 case NEG:
2323 case NOT:
2324 *total = COSTS_N_INSNS (1);
017e0eb9 2325 return false;
3c50106f 2326
2742a1ed
MD
2327 case PLUS:
2328 case MINUS:
2329 /* Check for multiply and add. */
b75d6bab 2330 if ((GET_MODE (x) == DFmode || GET_MODE (x) == SFmode)
2742a1ed 2331 && GET_CODE (XEXP (x, 0)) == MULT
142cd70f 2332 && TARGET_HARD_FLOAT && TARGET_FUSED_MADD)
2742a1ed
MD
2333 {
2334 /* This is the multiply and add case. */
b75d6bab
MD
2335 if (GET_MODE (x) == DFmode)
2336 *total = s390_cost->madbr;
2337 else
2338 *total = s390_cost->maebr;
2339 *total += rtx_cost (XEXP (XEXP (x, 0), 0), MULT)
2742a1ed
MD
2340 + rtx_cost (XEXP (XEXP (x, 0), 1), MULT)
2341 + rtx_cost (XEXP (x, 1), code);
2342 return true; /* Do not do an additional recursive descent. */
2343 }
2344 *total = COSTS_N_INSNS (1);
2345 return false;
2346
017e0eb9
MD
2347 case MULT:
2348 switch (GET_MODE (x))
2349 {
2350 case SImode:
2742a1ed 2351 {
017e0eb9
MD
2352 rtx left = XEXP (x, 0);
2353 rtx right = XEXP (x, 1);
2354 if (GET_CODE (right) == CONST_INT
b5c67a49 2355 && CONST_OK_FOR_K (INTVAL (right)))
017e0eb9
MD
2356 *total = s390_cost->mhi;
2357 else if (GET_CODE (left) == SIGN_EXTEND)
2358 *total = s390_cost->mh;
2359 else
2360 *total = s390_cost->ms; /* msr, ms, msy */
2361 break;
2362 }
2363 case DImode:
2364 {
2365 rtx left = XEXP (x, 0);
2366 rtx right = XEXP (x, 1);
2367 if (TARGET_64BIT)
2368 {
2369 if (GET_CODE (right) == CONST_INT
b5c67a49 2370 && CONST_OK_FOR_K (INTVAL (right)))
017e0eb9
MD
2371 *total = s390_cost->mghi;
2372 else if (GET_CODE (left) == SIGN_EXTEND)
2373 *total = s390_cost->msgf;
2374 else
2375 *total = s390_cost->msg; /* msgr, msg */
2376 }
2377 else /* TARGET_31BIT */
2378 {
2379 if (GET_CODE (left) == SIGN_EXTEND
2380 && GET_CODE (right) == SIGN_EXTEND)
2381 /* mulsidi case: mr, m */
2382 *total = s390_cost->m;
2742a1ed
MD
2383 else if (GET_CODE (left) == ZERO_EXTEND
2384 && GET_CODE (right) == ZERO_EXTEND
2385 && TARGET_CPU_ZARCH)
2386 /* umulsidi case: ml, mlr */
2387 *total = s390_cost->ml;
017e0eb9
MD
2388 else
2389 /* Complex calculation is required. */
2390 *total = COSTS_N_INSNS (40);
2391 }
2392 break;
2393 }
2394 case SFmode:
2395 case DFmode:
2396 *total = s390_cost->mult_df;
2397 break;
f61a2c7d
AK
2398 case TFmode:
2399 *total = s390_cost->mxbr;
2400 break;
017e0eb9
MD
2401 default:
2402 return false;
2403 }
2404 return false;
3c50106f 2405
6fa5b390
MD
2406 case UDIV:
2407 case UMOD:
2408 if (GET_MODE (x) == TImode) /* 128 bit division */
2409 *total = s390_cost->dlgr;
2410 else if (GET_MODE (x) == DImode)
2411 {
2412 rtx right = XEXP (x, 1);
2413 if (GET_CODE (right) == ZERO_EXTEND) /* 64 by 32 bit division */
2414 *total = s390_cost->dlr;
2415 else /* 64 by 64 bit division */
2416 *total = s390_cost->dlgr;
2417 }
2418 else if (GET_MODE (x) == SImode) /* 32 bit division */
2419 *total = s390_cost->dlr;
2420 return false;
2421
3c50106f 2422 case DIV:
6fa5b390
MD
2423 case MOD:
2424 if (GET_MODE (x) == DImode)
2425 {
2426 rtx right = XEXP (x, 1);
2427 if (GET_CODE (right) == ZERO_EXTEND) /* 64 by 32 bit division */
2428 if (TARGET_64BIT)
2429 *total = s390_cost->dsgfr;
2430 else
2431 *total = s390_cost->dr;
2432 else /* 64 by 64 bit division */
2433 *total = s390_cost->dsgr;
2434 }
2435 else if (GET_MODE (x) == SImode) /* 32 bit division */
2436 *total = s390_cost->dlr;
2437 else if (GET_MODE (x) == SFmode)
98fd0d70 2438 {
142cd70f 2439 *total = s390_cost->debr;
98fd0d70
MD
2440 }
2441 else if (GET_MODE (x) == DFmode)
2442 {
142cd70f 2443 *total = s390_cost->ddbr;
98fd0d70 2444 }
f61a2c7d
AK
2445 else if (GET_MODE (x) == TFmode)
2446 {
142cd70f 2447 *total = s390_cost->dxbr;
f61a2c7d 2448 }
017e0eb9
MD
2449 return false;
2450
2742a1ed
MD
2451 case SQRT:
2452 if (GET_MODE (x) == SFmode)
2453 *total = s390_cost->sqebr;
f61a2c7d 2454 else if (GET_MODE (x) == DFmode)
2742a1ed 2455 *total = s390_cost->sqdbr;
f61a2c7d
AK
2456 else /* TFmode */
2457 *total = s390_cost->sqxbr;
2742a1ed
MD
2458 return false;
2459
017e0eb9 2460 case SIGN_EXTEND:
2742a1ed 2461 case ZERO_EXTEND:
6fa5b390
MD
2462 if (outer_code == MULT || outer_code == DIV || outer_code == MOD
2463 || outer_code == PLUS || outer_code == MINUS
2464 || outer_code == COMPARE)
017e0eb9
MD
2465 *total = 0;
2466 return false;
3c50106f 2467
6fa5b390
MD
2468 case COMPARE:
2469 *total = COSTS_N_INSNS (1);
2470 if (GET_CODE (XEXP (x, 0)) == AND
2471 && GET_CODE (XEXP (x, 1)) == CONST_INT
2472 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2473 {
2474 rtx op0 = XEXP (XEXP (x, 0), 0);
2475 rtx op1 = XEXP (XEXP (x, 0), 1);
2476 rtx op2 = XEXP (x, 1);
2477
2478 if (memory_operand (op0, GET_MODE (op0))
2479 && s390_tm_ccmode (op1, op2, 0) != VOIDmode)
2480 return true;
2481 if (register_operand (op0, GET_MODE (op0))
2482 && s390_tm_ccmode (op1, op2, 1) != VOIDmode)
2483 return true;
2484 }
2485 return false;
2486
3c50106f
RH
2487 default:
2488 return false;
2489 }
2490}
2491
dea09b1b
UW
2492/* Return the cost of an address rtx ADDR. */
2493
dcefdf67 2494static int
9c808aad 2495s390_address_cost (rtx addr)
dea09b1b
UW
2496{
2497 struct s390_address ad;
2498 if (!s390_decompose_address (addr, &ad))
2499 return 1000;
2500
2501 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
2502}
2503
fd3cd001
UW
2504/* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
2505 otherwise return 0. */
2506
2507int
5d81b82b 2508tls_symbolic_operand (rtx op)
fd3cd001 2509{
fd3cd001
UW
2510 if (GET_CODE (op) != SYMBOL_REF)
2511 return 0;
114278e7 2512 return SYMBOL_REF_TLS_MODEL (op);
fd3cd001 2513}
9db1d521 2514\f
c5aa1d12
UW
2515/* Split DImode access register reference REG (on 64-bit) into its constituent
2516 low and high parts, and store them into LO and HI. Note that gen_lowpart/
2517 gen_highpart cannot be used as they assume all registers are word-sized,
2518 while our access registers have only half that size. */
2519
2520void
2521s390_split_access_reg (rtx reg, rtx *lo, rtx *hi)
2522{
2523 gcc_assert (TARGET_64BIT);
2524 gcc_assert (ACCESS_REG_P (reg));
2525 gcc_assert (GET_MODE (reg) == DImode);
2526 gcc_assert (!(REGNO (reg) & 1));
2527
2528 *lo = gen_rtx_REG (SImode, REGNO (reg) + 1);
2529 *hi = gen_rtx_REG (SImode, REGNO (reg));
2530}
9db1d521 2531
994fe660 2532/* Return true if OP contains a symbol reference */
9db1d521 2533
3ed99cc9 2534bool
9c808aad 2535symbolic_reference_mentioned_p (rtx op)
9db1d521 2536{
5d81b82b
AS
2537 const char *fmt;
2538 int i;
9db1d521
HP
2539
2540 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
2541 return 1;
2542
2543 fmt = GET_RTX_FORMAT (GET_CODE (op));
2544 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2545 {
2546 if (fmt[i] == 'E')
2547 {
5d81b82b 2548 int j;
9db1d521
HP
2549
2550 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2551 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2552 return 1;
2553 }
2554
2555 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
2556 return 1;
2557 }
2558
2559 return 0;
2560}
2561
fd3cd001
UW
2562/* Return true if OP contains a reference to a thread-local symbol. */
2563
3ed99cc9 2564bool
9c808aad 2565tls_symbolic_reference_mentioned_p (rtx op)
fd3cd001 2566{
5d81b82b
AS
2567 const char *fmt;
2568 int i;
fd3cd001
UW
2569
2570 if (GET_CODE (op) == SYMBOL_REF)
2571 return tls_symbolic_operand (op);
2572
2573 fmt = GET_RTX_FORMAT (GET_CODE (op));
2574 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2575 {
2576 if (fmt[i] == 'E')
2577 {
5d81b82b 2578 int j;
fd3cd001
UW
2579
2580 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2581 if (tls_symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
3ed99cc9 2582 return true;
fd3cd001
UW
2583 }
2584
2585 else if (fmt[i] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op, i)))
3ed99cc9 2586 return true;
fd3cd001
UW
2587 }
2588
3ed99cc9 2589 return false;
fd3cd001
UW
2590}
2591
9db1d521 2592
c7453384
EC
2593/* Return true if OP is a legitimate general operand when
2594 generating PIC code. It is given that flag_pic is on
994fe660
UW
2595 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2596
9db1d521 2597int
5d81b82b 2598legitimate_pic_operand_p (rtx op)
9db1d521 2599{
4023fb28 2600 /* Accept all non-symbolic constants. */
9db1d521
HP
2601 if (!SYMBOLIC_CONST (op))
2602 return 1;
2603
c7453384 2604 /* Reject everything else; must be handled
fd3cd001 2605 via emit_symbolic_move. */
9db1d521
HP
2606 return 0;
2607}
2608
994fe660
UW
2609/* Returns true if the constant value OP is a legitimate general operand.
2610 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2611
9db1d521 2612int
5d81b82b 2613legitimate_constant_p (rtx op)
9db1d521 2614{
4023fb28 2615 /* Accept all non-symbolic constants. */
9db1d521
HP
2616 if (!SYMBOLIC_CONST (op))
2617 return 1;
2618
fd3cd001 2619 /* Accept immediate LARL operands. */
9e8327e3 2620 if (TARGET_CPU_ZARCH && larl_operand (op, VOIDmode))
fd3cd001
UW
2621 return 1;
2622
2623 /* Thread-local symbols are never legal constants. This is
2624 so that emit_call knows that computing such addresses
2625 might require a function call. */
2626 if (TLS_SYMBOLIC_CONST (op))
2627 return 0;
2628
9db1d521
HP
2629 /* In the PIC case, symbolic constants must *not* be
2630 forced into the literal pool. We accept them here,
fd3cd001 2631 so that they will be handled by emit_symbolic_move. */
9db1d521
HP
2632 if (flag_pic)
2633 return 1;
2634
9db1d521
HP
2635 /* All remaining non-PIC symbolic constants are
2636 forced into the literal pool. */
2637 return 0;
2638}
2639
fd3cd001
UW
2640/* Determine if it's legal to put X into the constant pool. This
2641 is not possible if X contains the address of a symbol that is
2642 not constant (TLS) or not known at final link time (PIC). */
2643
2644static bool
9c808aad 2645s390_cannot_force_const_mem (rtx x)
fd3cd001
UW
2646{
2647 switch (GET_CODE (x))
2648 {
2649 case CONST_INT:
2650 case CONST_DOUBLE:
2651 /* Accept all non-symbolic constants. */
2652 return false;
2653
2654 case LABEL_REF:
2655 /* Labels are OK iff we are non-PIC. */
2656 return flag_pic != 0;
2657
2658 case SYMBOL_REF:
2659 /* 'Naked' TLS symbol references are never OK,
2660 non-TLS symbols are OK iff we are non-PIC. */
2661 if (tls_symbolic_operand (x))
2662 return true;
2663 else
2664 return flag_pic != 0;
2665
2666 case CONST:
2667 return s390_cannot_force_const_mem (XEXP (x, 0));
2668 case PLUS:
2669 case MINUS:
2670 return s390_cannot_force_const_mem (XEXP (x, 0))
2671 || s390_cannot_force_const_mem (XEXP (x, 1));
2672
2673 case UNSPEC:
2674 switch (XINT (x, 1))
2675 {
2676 /* Only lt-relative or GOT-relative UNSPECs are OK. */
fd7643fb
UW
2677 case UNSPEC_LTREL_OFFSET:
2678 case UNSPEC_GOT:
2679 case UNSPEC_GOTOFF:
2680 case UNSPEC_PLTOFF:
fd3cd001
UW
2681 case UNSPEC_TLSGD:
2682 case UNSPEC_TLSLDM:
2683 case UNSPEC_NTPOFF:
2684 case UNSPEC_DTPOFF:
2685 case UNSPEC_GOTNTPOFF:
2686 case UNSPEC_INDNTPOFF:
2687 return false;
2688
9bb86f41
UW
2689 /* If the literal pool shares the code section, be put
2690 execute template placeholders into the pool as well. */
2691 case UNSPEC_INSN:
2692 return TARGET_CPU_ZARCH;
2693
fd3cd001
UW
2694 default:
2695 return true;
2696 }
2697 break;
2698
2699 default:
8d933e31 2700 gcc_unreachable ();
fd3cd001
UW
2701 }
2702}
2703
4023fb28 2704/* Returns true if the constant value OP is a legitimate general
c7453384 2705 operand during and after reload. The difference to
4023fb28
UW
2706 legitimate_constant_p is that this function will not accept
2707 a constant that would need to be forced to the literal pool
2708 before it can be used as operand. */
2709
3ed99cc9 2710bool
5d81b82b 2711legitimate_reload_constant_p (rtx op)
4023fb28 2712{
d3632d41 2713 /* Accept la(y) operands. */
c7453384 2714 if (GET_CODE (op) == CONST_INT
d3632d41 2715 && DISP_IN_RANGE (INTVAL (op)))
3ed99cc9 2716 return true;
d3632d41 2717
ec24698e 2718 /* Accept l(g)hi/l(g)fi operands. */
4023fb28 2719 if (GET_CODE (op) == CONST_INT
ec24698e 2720 && (CONST_OK_FOR_K (INTVAL (op)) || CONST_OK_FOR_Os (INTVAL (op))))
3ed99cc9 2721 return true;
4023fb28
UW
2722
2723 /* Accept lliXX operands. */
9e8327e3 2724 if (TARGET_ZARCH
11598938
UW
2725 && GET_CODE (op) == CONST_INT
2726 && trunc_int_for_mode (INTVAL (op), word_mode) == INTVAL (op)
2727 && s390_single_part (op, word_mode, HImode, 0) >= 0)
3ed99cc9 2728 return true;
4023fb28 2729
ec24698e
UW
2730 if (TARGET_EXTIMM
2731 && GET_CODE (op) == CONST_INT
2732 && trunc_int_for_mode (INTVAL (op), word_mode) == INTVAL (op)
2733 && s390_single_part (op, word_mode, SImode, 0) >= 0)
2734 return true;
2735
4023fb28 2736 /* Accept larl operands. */
9e8327e3 2737 if (TARGET_CPU_ZARCH
4023fb28 2738 && larl_operand (op, VOIDmode))
3ed99cc9 2739 return true;
4023fb28 2740
d096725d
AS
2741 /* Accept lzXX operands. */
2742 if (GET_CODE (op) == CONST_DOUBLE
2743 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', "G"))
2744 return true;
2745
11598938
UW
2746 /* Accept double-word operands that can be split. */
2747 if (GET_CODE (op) == CONST_INT
2748 && trunc_int_for_mode (INTVAL (op), word_mode) != INTVAL (op))
2749 {
2750 enum machine_mode dword_mode = word_mode == SImode ? DImode : TImode;
2751 rtx hi = operand_subword (op, 0, 0, dword_mode);
2752 rtx lo = operand_subword (op, 1, 0, dword_mode);
2753 return legitimate_reload_constant_p (hi)
2754 && legitimate_reload_constant_p (lo);
2755 }
2756
4023fb28 2757 /* Everything else cannot be handled without reload. */
3ed99cc9 2758 return false;
4023fb28
UW
2759}
2760
2761/* Given an rtx OP being reloaded into a reg required to be in class CLASS,
2762 return the class of reg to actually use. */
2763
2764enum reg_class
9c808aad 2765s390_preferred_reload_class (rtx op, enum reg_class class)
4023fb28 2766{
4023fb28
UW
2767 switch (GET_CODE (op))
2768 {
2769 /* Constants we cannot reload must be forced into the
0796c16a
UW
2770 literal pool. */
2771
4023fb28
UW
2772 case CONST_DOUBLE:
2773 case CONST_INT:
0796c16a 2774 if (legitimate_reload_constant_p (op))
4023fb28 2775 return class;
0796c16a
UW
2776 else
2777 return NO_REGS;
4023fb28
UW
2778
2779 /* If a symbolic constant or a PLUS is reloaded,
14b3e8ef
UW
2780 it is most likely being used as an address, so
2781 prefer ADDR_REGS. If 'class' is not a superset
2782 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
4023fb28
UW
2783 case PLUS:
2784 case LABEL_REF:
2785 case SYMBOL_REF:
2786 case CONST:
14b3e8ef
UW
2787 if (reg_class_subset_p (ADDR_REGS, class))
2788 return ADDR_REGS;
2789 else
2790 return NO_REGS;
4023fb28
UW
2791
2792 default:
2793 break;
2794 }
2795
2796 return class;
2797}
9db1d521 2798
963fc8d0
AK
2799/* Return true if ADDR is of kind symbol_ref or symbol_ref + const_int
2800 and return these parts in SYMREF and ADDEND. You can pass NULL in
2801 SYMREF and/or ADDEND if you are not interested in these values. */
2802
2803static bool
2804s390_symref_operand_p (rtx addr, rtx *symref, HOST_WIDE_INT *addend)
2805{
2806 HOST_WIDE_INT tmpaddend = 0;
2807
2808 if (GET_CODE (addr) == CONST)
2809 addr = XEXP (addr, 0);
2810
2811 if (GET_CODE (addr) == PLUS)
2812 {
2813 if (GET_CODE (XEXP (addr, 0)) == SYMBOL_REF
2814 && CONST_INT_P (XEXP (addr, 1)))
2815 {
2816 tmpaddend = INTVAL (XEXP (addr, 1));
2817 addr = XEXP (addr, 0);
2818 }
2819 else
2820 return false;
2821 }
2822 else
2823 if (GET_CODE (addr) != SYMBOL_REF)
2824 return false;
2825
2826 if (symref)
2827 *symref = addr;
2828 if (addend)
2829 *addend = tmpaddend;
2830
2831 return true;
2832}
2833
2834/* Return true if ADDR is SYMBOL_REF + addend with addend being a
2835 multiple of ALIGNMENT and the SYMBOL_REF being naturally
2836 aligned. */
2837
2838bool
2839s390_check_symref_alignment (rtx addr, HOST_WIDE_INT alignment)
2840{
2841 HOST_WIDE_INT addend;
2842 rtx symref;
2843
2844 if (!s390_symref_operand_p (addr, &symref, &addend))
2845 return false;
2846
2847 return (!SYMBOL_REF_NOT_NATURALLY_ALIGNED_P (symref)
2848 && !(addend & (alignment - 1)));
2849}
2850
2851/* ADDR is moved into REG using larl. If ADDR isn't a valid larl
2852 operand SCRATCH is used to reload the even part of the address and
2853 adding one. */
2854
2855void
2856s390_reload_larl_operand (rtx reg, rtx addr, rtx scratch)
2857{
2858 HOST_WIDE_INT addend;
2859 rtx symref;
2860
2861 if (!s390_symref_operand_p (addr, &symref, &addend))
2862 gcc_unreachable ();
2863
2864 if (!(addend & 1))
2865 /* Easy case. The addend is even so larl will do fine. */
2866 emit_move_insn (reg, addr);
2867 else
2868 {
2869 /* We can leave the scratch register untouched if the target
2870 register is a valid base register. */
2871 if (REGNO (reg) < FIRST_PSEUDO_REGISTER
2872 && REGNO_REG_CLASS (REGNO (reg)) == ADDR_REGS)
2873 scratch = reg;
2874
2875 gcc_assert (REGNO (scratch) < FIRST_PSEUDO_REGISTER);
2876 gcc_assert (REGNO_REG_CLASS (REGNO (scratch)) == ADDR_REGS);
2877
2878 if (addend != 1)
2879 emit_move_insn (scratch,
2880 gen_rtx_CONST (Pmode,
2881 gen_rtx_PLUS (Pmode, symref,
2882 GEN_INT (addend - 1))));
2883 else
2884 emit_move_insn (scratch, symref);
2885
2886 /* Increment the address using la in order to avoid clobbering cc. */
2887 emit_move_insn (reg, gen_rtx_PLUS (Pmode, scratch, const1_rtx));
2888 }
2889}
2890
2891/* Generate what is necessary to move between REG and MEM using
2892 SCRATCH. The direction is given by TOMEM. */
2893
2894void
2895s390_reload_symref_address (rtx reg, rtx mem, rtx scratch, bool tomem)
2896{
2897 /* Reload might have pulled a constant out of the literal pool.
2898 Force it back in. */
2899 if (CONST_INT_P (mem) || GET_CODE (mem) == CONST_DOUBLE
2900 || GET_CODE (mem) == CONST)
2901 mem = force_const_mem (GET_MODE (reg), mem);
2902
2903 gcc_assert (MEM_P (mem));
2904
2905 /* For a load from memory we can leave the scratch register
2906 untouched if the target register is a valid base register. */
2907 if (!tomem
2908 && REGNO (reg) < FIRST_PSEUDO_REGISTER
2909 && REGNO_REG_CLASS (REGNO (reg)) == ADDR_REGS
2910 && GET_MODE (reg) == GET_MODE (scratch))
2911 scratch = reg;
2912
2913 /* Load address into scratch register. Since we can't have a
2914 secondary reload for a secondary reload we have to cover the case
2915 where larl would need a secondary reload here as well. */
2916 s390_reload_larl_operand (scratch, XEXP (mem, 0), scratch);
2917
2918 /* Now we can use a standard load/store to do the move. */
2919 if (tomem)
2920 emit_move_insn (replace_equiv_address (mem, scratch), reg);
2921 else
2922 emit_move_insn (reg, replace_equiv_address (mem, scratch));
2923}
2924
833cd70a
AK
2925/* Inform reload about cases where moving X with a mode MODE to a register in
2926 CLASS requires an extra scratch or immediate register. Return the class
2927 needed for the immediate register. */
f61a2c7d 2928
833cd70a
AK
2929static enum reg_class
2930s390_secondary_reload (bool in_p, rtx x, enum reg_class class,
2931 enum machine_mode mode, secondary_reload_info *sri)
2932{
2933 /* Intermediate register needed. */
02887425 2934 if (reg_classes_intersect_p (CC_REGS, class))
9dc62c00
AK
2935 return GENERAL_REGS;
2936
963fc8d0
AK
2937 if (TARGET_Z10)
2938 {
2939 /* On z10 several optimizer steps may generate larl operands with
2940 an odd addend. */
2941 if (in_p
2942 && s390_symref_operand_p (x, NULL, NULL)
2943 && mode == Pmode
2944 && !s390_check_symref_alignment (x, 2))
2945 sri->icode = ((mode == DImode) ? CODE_FOR_reloaddi_larl_odd_addend_z10
2946 : CODE_FOR_reloadsi_larl_odd_addend_z10);
2947
2948 /* On z10 we need a scratch register when moving QI, TI or floating
2949 point mode values from or to a memory location with a SYMBOL_REF
2950 or if the symref addend of a SI or DI move is not aligned to the
2951 width of the access. */
2952 if (MEM_P (x)
2953 && s390_symref_operand_p (XEXP (x, 0), NULL, NULL)
2954 && (mode == QImode || mode == TImode || FLOAT_MODE_P (mode)
2955 || (!TARGET_64BIT && mode == DImode)
2956 || ((mode == HImode || mode == SImode || mode == DImode)
2957 && (!s390_check_symref_alignment (XEXP (x, 0),
2958 GET_MODE_SIZE (mode))))))
2959 {
2960#define __SECONDARY_RELOAD_CASE(M,m) \
2961 case M##mode: \
2962 if (TARGET_64BIT) \
2963 sri->icode = in_p ? CODE_FOR_reload##m##di_toreg_z10 : \
2964 CODE_FOR_reload##m##di_tomem_z10; \
2965 else \
2966 sri->icode = in_p ? CODE_FOR_reload##m##si_toreg_z10 : \
2967 CODE_FOR_reload##m##si_tomem_z10; \
2968 break;
2969
2970 switch (GET_MODE (x))
2971 {
2972 __SECONDARY_RELOAD_CASE (QI, qi);
2973 __SECONDARY_RELOAD_CASE (HI, hi);
2974 __SECONDARY_RELOAD_CASE (SI, si);
2975 __SECONDARY_RELOAD_CASE (DI, di);
2976 __SECONDARY_RELOAD_CASE (TI, ti);
2977 __SECONDARY_RELOAD_CASE (SF, sf);
2978 __SECONDARY_RELOAD_CASE (DF, df);
2979 __SECONDARY_RELOAD_CASE (TF, tf);
2980 __SECONDARY_RELOAD_CASE (SD, sd);
2981 __SECONDARY_RELOAD_CASE (DD, dd);
2982 __SECONDARY_RELOAD_CASE (TD, td);
2983
2984 default:
2985 gcc_unreachable ();
2986 }
2987#undef __SECONDARY_RELOAD_CASE
2988 }
2989 }
2990
833cd70a
AK
2991 /* We need a scratch register when loading a PLUS expression which
2992 is not a legitimate operand of the LOAD ADDRESS instruction. */
2993 if (in_p && s390_plus_operand (x, mode))
2994 sri->icode = (TARGET_64BIT ?
2995 CODE_FOR_reloaddi_plus : CODE_FOR_reloadsi_plus);
2996
7fa7289d 2997 /* Performing a multiword move from or to memory we have to make sure the
833cd70a
AK
2998 second chunk in memory is addressable without causing a displacement
2999 overflow. If that would be the case we calculate the address in
3000 a scratch register. */
3001 if (MEM_P (x)
3002 && GET_CODE (XEXP (x, 0)) == PLUS
3003 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3004 && !DISP_IN_RANGE (INTVAL (XEXP (XEXP (x, 0), 1))
0ca89db7 3005 + GET_MODE_SIZE (mode) - 1))
833cd70a 3006 {
7fa7289d 3007 /* For GENERAL_REGS a displacement overflow is no problem if occurring
833cd70a
AK
3008 in a s_operand address since we may fallback to lm/stm. So we only
3009 have to care about overflows in the b+i+d case. */
3010 if ((reg_classes_intersect_p (GENERAL_REGS, class)
3011 && s390_class_max_nregs (GENERAL_REGS, mode) > 1
3012 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS)
3013 /* For FP_REGS no lm/stm is available so this check is triggered
3014 for displacement overflows in b+i+d and b+d like addresses. */
3015 || (reg_classes_intersect_p (FP_REGS, class)
3016 && s390_class_max_nregs (FP_REGS, mode) > 1))
3017 {
3018 if (in_p)
3019 sri->icode = (TARGET_64BIT ?
3020 CODE_FOR_reloaddi_nonoffmem_in :
3021 CODE_FOR_reloadsi_nonoffmem_in);
3022 else
3023 sri->icode = (TARGET_64BIT ?
3024 CODE_FOR_reloaddi_nonoffmem_out :
3025 CODE_FOR_reloadsi_nonoffmem_out);
3026 }
3027 }
9dc62c00 3028
833cd70a 3029 /* Either scratch or no register needed. */
dc65c307
UW
3030 return NO_REGS;
3031}
3032
f3e9edff
UW
3033/* Generate code to load SRC, which is PLUS that is not a
3034 legitimate operand for the LA instruction, into TARGET.
3035 SCRATCH may be used as scratch register. */
3036
3037void
5d81b82b
AS
3038s390_expand_plus_operand (rtx target, rtx src,
3039 rtx scratch)
f3e9edff 3040{
7974fe63 3041 rtx sum1, sum2;
b808c04c 3042 struct s390_address ad;
6a4e49c1 3043
6a4e49c1 3044 /* src must be a PLUS; get its two operands. */
8d933e31
AS
3045 gcc_assert (GET_CODE (src) == PLUS);
3046 gcc_assert (GET_MODE (src) == Pmode);
f3e9edff 3047
7c82a1ed
UW
3048 /* Check if any of the two operands is already scheduled
3049 for replacement by reload. This can happen e.g. when
3050 float registers occur in an address. */
3051 sum1 = find_replacement (&XEXP (src, 0));
3052 sum2 = find_replacement (&XEXP (src, 1));
ccfc6cc8 3053 src = gen_rtx_PLUS (Pmode, sum1, sum2);
ccfc6cc8 3054
7974fe63
UW
3055 /* If the address is already strictly valid, there's nothing to do. */
3056 if (!s390_decompose_address (src, &ad)
93fa8428
AK
3057 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
3058 || (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx))))
f3e9edff 3059 {
7974fe63
UW
3060 /* Otherwise, one of the operands cannot be an address register;
3061 we reload its value into the scratch register. */
3062 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
3063 {
3064 emit_move_insn (scratch, sum1);
3065 sum1 = scratch;
3066 }
3067 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
3068 {
3069 emit_move_insn (scratch, sum2);
3070 sum2 = scratch;
3071 }
f3e9edff 3072
7974fe63
UW
3073 /* According to the way these invalid addresses are generated
3074 in reload.c, it should never happen (at least on s390) that
3075 *neither* of the PLUS components, after find_replacements
3076 was applied, is an address register. */
3077 if (sum1 == scratch && sum2 == scratch)
3078 {
3079 debug_rtx (src);
8d933e31 3080 gcc_unreachable ();
7974fe63 3081 }
f3e9edff 3082
7974fe63 3083 src = gen_rtx_PLUS (Pmode, sum1, sum2);
f3e9edff
UW
3084 }
3085
3086 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
3087 is only ever performed on addresses, so we can mark the
3088 sum as legitimate for LA in any case. */
a41c6c53 3089 s390_load_address (target, src);
f3e9edff
UW
3090}
3091
3092
3ed99cc9 3093/* Return true if ADDR is a valid memory address.
ab96de7e 3094 STRICT specifies whether strict register checking applies. */
9db1d521 3095
3ed99cc9 3096bool
963fc8d0 3097legitimate_address_p (enum machine_mode mode, rtx addr, int strict)
9db1d521 3098{
ab96de7e 3099 struct s390_address ad;
963fc8d0
AK
3100
3101 if (TARGET_Z10
3102 && larl_operand (addr, VOIDmode)
3103 && (mode == VOIDmode
3104 || s390_check_symref_alignment (addr, GET_MODE_SIZE (mode))))
3105 return true;
3106
ab96de7e 3107 if (!s390_decompose_address (addr, &ad))
3ed99cc9 3108 return false;
b808c04c
UW
3109
3110 if (strict)
3111 {
93fa8428 3112 if (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
3ed99cc9 3113 return false;
93fa8428
AK
3114
3115 if (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx)))
3ed99cc9 3116 return false;
b808c04c
UW
3117 }
3118 else
3119 {
93fa8428
AK
3120 if (ad.base
3121 && !(REGNO (ad.base) >= FIRST_PSEUDO_REGISTER
3122 || REGNO_REG_CLASS (REGNO (ad.base)) == ADDR_REGS))
3ed99cc9 3123 return false;
93fa8428
AK
3124
3125 if (ad.indx
3126 && !(REGNO (ad.indx) >= FIRST_PSEUDO_REGISTER
3127 || REGNO_REG_CLASS (REGNO (ad.indx)) == ADDR_REGS))
3128 return false;
b808c04c 3129 }
3ed99cc9 3130 return true;
9db1d521
HP
3131}
3132
3ed99cc9 3133/* Return true if OP is a valid operand for the LA instruction.
ba956982
UW
3134 In 31-bit, we need to prove that the result is used as an
3135 address, as LA performs only a 31-bit addition. */
3136
3ed99cc9 3137bool
5d81b82b 3138legitimate_la_operand_p (rtx op)
ba956982
UW
3139{
3140 struct s390_address addr;
b808c04c 3141 if (!s390_decompose_address (op, &addr))
3ed99cc9 3142 return false;
ba956982 3143
3ed99cc9 3144 return (TARGET_64BIT || addr.pointer);
f3e9edff 3145}
ba956982 3146
3ed99cc9 3147/* Return true if it is valid *and* preferable to use LA to
e1d5ee28 3148 compute the sum of OP1 and OP2. */
c7453384 3149
3ed99cc9 3150bool
e1d5ee28 3151preferred_la_operand_p (rtx op1, rtx op2)
100c7420
UW
3152{
3153 struct s390_address addr;
e1d5ee28
UW
3154
3155 if (op2 != const0_rtx)
3156 op1 = gen_rtx_PLUS (Pmode, op1, op2);
3157
3158 if (!s390_decompose_address (op1, &addr))
3ed99cc9 3159 return false;
93fa8428 3160 if (addr.base && !REGNO_OK_FOR_BASE_P (REGNO (addr.base)))
3ed99cc9 3161 return false;
93fa8428 3162 if (addr.indx && !REGNO_OK_FOR_INDEX_P (REGNO (addr.indx)))
3ed99cc9 3163 return false;
100c7420
UW
3164
3165 if (!TARGET_64BIT && !addr.pointer)
3ed99cc9 3166 return false;
100c7420
UW
3167
3168 if (addr.pointer)
3ed99cc9 3169 return true;
100c7420 3170
4888ec5d
UW
3171 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
3172 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
3ed99cc9 3173 return true;
100c7420 3174
3ed99cc9 3175 return false;
100c7420
UW
3176}
3177
a41c6c53
UW
3178/* Emit a forced load-address operation to load SRC into DST.
3179 This will use the LOAD ADDRESS instruction even in situations
3180 where legitimate_la_operand_p (SRC) returns false. */
ba956982 3181
a41c6c53 3182void
9c808aad 3183s390_load_address (rtx dst, rtx src)
f3e9edff 3184{
a41c6c53
UW
3185 if (TARGET_64BIT)
3186 emit_move_insn (dst, src);
3187 else
3188 emit_insn (gen_force_la_31 (dst, src));
ba956982
UW
3189}
3190
9db1d521
HP
3191/* Return a legitimate reference for ORIG (an address) using the
3192 register REG. If REG is 0, a new pseudo is generated.
3193
3194 There are two types of references that must be handled:
3195
3196 1. Global data references must load the address from the GOT, via
3197 the PIC reg. An insn is emitted to do this load, and the reg is
3198 returned.
3199
3200 2. Static data references, constant pool addresses, and code labels
3201 compute the address as an offset from the GOT, whose base is in
114278e7 3202 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
9db1d521
HP
3203 differentiate them from global data objects. The returned
3204 address is the PIC reg + an unspec constant.
3205
3206 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
3207 reg also appears in the address. */
3208
3209rtx
9c808aad 3210legitimize_pic_address (rtx orig, rtx reg)
9db1d521
HP
3211{
3212 rtx addr = orig;
3213 rtx new = orig;
3214 rtx base;
3215
cf9d7618
ANM
3216 gcc_assert (!TLS_SYMBOLIC_CONST (addr));
3217
9db1d521 3218 if (GET_CODE (addr) == LABEL_REF
114278e7 3219 || (GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (addr)))
9db1d521
HP
3220 {
3221 /* This is a local symbol. */
9e8327e3 3222 if (TARGET_CPU_ZARCH && larl_operand (addr, VOIDmode))
9db1d521 3223 {
c7453384
EC
3224 /* Access local symbols PC-relative via LARL.
3225 This is the same as in the non-PIC case, so it is
d65f7478 3226 handled automatically ... */
9db1d521
HP
3227 }
3228 else
3229 {
fd7643fb 3230 /* Access local symbols relative to the GOT. */
9db1d521
HP
3231
3232 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3233
fd7643fb 3234 if (reload_in_progress || reload_completed)
6fb5fa3c 3235 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
fd7643fb
UW
3236
3237 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTOFF);
e23795ea
UW
3238 addr = gen_rtx_CONST (Pmode, addr);
3239 addr = force_const_mem (Pmode, addr);
9db1d521
HP
3240 emit_move_insn (temp, addr);
3241
fd7643fb 3242 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
9db1d521
HP
3243 if (reg != 0)
3244 {
6aea2f6d 3245 s390_load_address (reg, new);
9db1d521
HP
3246 new = reg;
3247 }
3248 }
3249 }
3250 else if (GET_CODE (addr) == SYMBOL_REF)
3251 {
3252 if (reg == 0)
3253 reg = gen_reg_rtx (Pmode);
3254
3255 if (flag_pic == 1)
3256 {
3257 /* Assume GOT offset < 4k. This is handled the same way
fd7643fb 3258 in both 31- and 64-bit code (@GOT). */
9db1d521 3259
c3cc6b78 3260 if (reload_in_progress || reload_completed)
6fb5fa3c 3261 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
9db1d521 3262
fd7643fb 3263 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
9db1d521
HP
3264 new = gen_rtx_CONST (Pmode, new);
3265 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
542a8afa 3266 new = gen_const_mem (Pmode, new);
9db1d521
HP
3267 emit_move_insn (reg, new);
3268 new = reg;
3269 }
9e8327e3 3270 else if (TARGET_CPU_ZARCH)
9db1d521
HP
3271 {
3272 /* If the GOT offset might be >= 4k, we determine the position
3273 of the GOT entry via a PC-relative LARL (@GOTENT). */
3274
3275 rtx temp = gen_reg_rtx (Pmode);
3276
fd7643fb 3277 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTENT);
9db1d521
HP
3278 new = gen_rtx_CONST (Pmode, new);
3279 emit_move_insn (temp, new);
3280
542a8afa 3281 new = gen_const_mem (Pmode, temp);
9db1d521
HP
3282 emit_move_insn (reg, new);
3283 new = reg;
3284 }
3285 else
3286 {
c7453384 3287 /* If the GOT offset might be >= 4k, we have to load it
9db1d521
HP
3288 from the literal pool (@GOT). */
3289
3290 rtx temp = gen_reg_rtx (Pmode);
3291
c3cc6b78 3292 if (reload_in_progress || reload_completed)
6fb5fa3c 3293 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
9db1d521 3294
fd7643fb 3295 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
e23795ea
UW
3296 addr = gen_rtx_CONST (Pmode, addr);
3297 addr = force_const_mem (Pmode, addr);
9db1d521
HP
3298 emit_move_insn (temp, addr);
3299
3300 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
542a8afa 3301 new = gen_const_mem (Pmode, new);
9db1d521
HP
3302 emit_move_insn (reg, new);
3303 new = reg;
3304 }
c7453384 3305 }
9db1d521
HP
3306 else
3307 {
3308 if (GET_CODE (addr) == CONST)
3309 {
3310 addr = XEXP (addr, 0);
3311 if (GET_CODE (addr) == UNSPEC)
3312 {
8d933e31 3313 gcc_assert (XVECLEN (addr, 0) == 1);
9db1d521
HP
3314 switch (XINT (addr, 1))
3315 {
fd7643fb 3316 /* If someone moved a GOT-relative UNSPEC
9db1d521 3317 out of the literal pool, force them back in. */
fd7643fb
UW
3318 case UNSPEC_GOTOFF:
3319 case UNSPEC_PLTOFF:
e23795ea 3320 new = force_const_mem (Pmode, orig);
9db1d521
HP
3321 break;
3322
fd7643fb
UW
3323 /* @GOT is OK as is if small. */
3324 case UNSPEC_GOT:
3325 if (flag_pic == 2)
3326 new = force_const_mem (Pmode, orig);
3327 break;
3328
9db1d521 3329 /* @GOTENT is OK as is. */
fd7643fb 3330 case UNSPEC_GOTENT:
9db1d521
HP
3331 break;
3332
3333 /* @PLT is OK as is on 64-bit, must be converted to
fd7643fb
UW
3334 GOT-relative @PLTOFF on 31-bit. */
3335 case UNSPEC_PLT:
9e8327e3 3336 if (!TARGET_CPU_ZARCH)
9db1d521
HP
3337 {
3338 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3339
fd7643fb 3340 if (reload_in_progress || reload_completed)
6fb5fa3c 3341 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
fd7643fb 3342
9db1d521 3343 addr = XVECEXP (addr, 0, 0);
c7453384 3344 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
fd7643fb 3345 UNSPEC_PLTOFF);
e23795ea
UW
3346 addr = gen_rtx_CONST (Pmode, addr);
3347 addr = force_const_mem (Pmode, addr);
9db1d521
HP
3348 emit_move_insn (temp, addr);
3349
fd7643fb 3350 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
9db1d521
HP
3351 if (reg != 0)
3352 {
6aea2f6d 3353 s390_load_address (reg, new);
9db1d521
HP
3354 new = reg;
3355 }
3356 }
3357 break;
3358
3359 /* Everything else cannot happen. */
3360 default:
8d933e31 3361 gcc_unreachable ();
9db1d521
HP
3362 }
3363 }
8d933e31
AS
3364 else
3365 gcc_assert (GET_CODE (addr) == PLUS);
9db1d521
HP
3366 }
3367 if (GET_CODE (addr) == PLUS)
3368 {
3369 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
cf9d7618
ANM
3370
3371 gcc_assert (!TLS_SYMBOLIC_CONST (op0));
3372 gcc_assert (!TLS_SYMBOLIC_CONST (op1));
3373
c7453384 3374 /* Check first to see if this is a constant offset
9db1d521
HP
3375 from a local symbol reference. */
3376 if ((GET_CODE (op0) == LABEL_REF
114278e7 3377 || (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (op0)))
9db1d521
HP
3378 && GET_CODE (op1) == CONST_INT)
3379 {
e064939e
MM
3380 if (TARGET_CPU_ZARCH
3381 && larl_operand (op0, VOIDmode)
3382 && INTVAL (op1) < (HOST_WIDE_INT)1 << 31
3383 && INTVAL (op1) >= -((HOST_WIDE_INT)1 << 31))
9db1d521
HP
3384 {
3385 if (INTVAL (op1) & 1)
3386 {
c7453384 3387 /* LARL can't handle odd offsets, so emit a
9db1d521
HP
3388 pair of LARL and LA. */
3389 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3390
d3632d41 3391 if (!DISP_IN_RANGE (INTVAL (op1)))
9db1d521 3392 {
e064939e 3393 HOST_WIDE_INT even = INTVAL (op1) - 1;
9db1d521 3394 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
b30d2115 3395 op0 = gen_rtx_CONST (Pmode, op0);
a556fd39 3396 op1 = const1_rtx;
9db1d521
HP
3397 }
3398
3399 emit_move_insn (temp, op0);
3400 new = gen_rtx_PLUS (Pmode, temp, op1);
3401
3402 if (reg != 0)
3403 {
6aea2f6d 3404 s390_load_address (reg, new);
9db1d521
HP
3405 new = reg;
3406 }
3407 }
3408 else
3409 {
3410 /* If the offset is even, we can just use LARL.
3411 This will happen automatically. */
3412 }
3413 }
3414 else
3415 {
fd7643fb 3416 /* Access local symbols relative to the GOT. */
9db1d521
HP
3417
3418 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3419
fd7643fb 3420 if (reload_in_progress || reload_completed)
6fb5fa3c 3421 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
fd7643fb 3422
c7453384 3423 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op0),
fd7643fb 3424 UNSPEC_GOTOFF);
e23795ea
UW
3425 addr = gen_rtx_PLUS (Pmode, addr, op1);
3426 addr = gen_rtx_CONST (Pmode, addr);
3427 addr = force_const_mem (Pmode, addr);
cfbab41c 3428 emit_move_insn (temp, addr);
9db1d521 3429
fd7643fb 3430 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
9db1d521
HP
3431 if (reg != 0)
3432 {
6aea2f6d 3433 s390_load_address (reg, new);
9db1d521
HP
3434 new = reg;
3435 }
3436 }
3437 }
3438
fd7643fb 3439 /* Now, check whether it is a GOT relative symbol plus offset
9db1d521
HP
3440 that was pulled out of the literal pool. Force it back in. */
3441
3442 else if (GET_CODE (op0) == UNSPEC
cfbab41c
JJ
3443 && GET_CODE (op1) == CONST_INT
3444 && XINT (op0, 1) == UNSPEC_GOTOFF)
9db1d521 3445 {
8d933e31 3446 gcc_assert (XVECLEN (op0, 0) == 1);
9db1d521 3447
e23795ea 3448 new = force_const_mem (Pmode, orig);
9db1d521
HP
3449 }
3450
3451 /* Otherwise, compute the sum. */
3452 else
3453 {
3454 base = legitimize_pic_address (XEXP (addr, 0), reg);
3455 new = legitimize_pic_address (XEXP (addr, 1),
3456 base == reg ? NULL_RTX : reg);
3457 if (GET_CODE (new) == CONST_INT)
3458 new = plus_constant (base, INTVAL (new));
3459 else
3460 {
3461 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
3462 {
3463 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
3464 new = XEXP (new, 1);
3465 }
3466 new = gen_rtx_PLUS (Pmode, base, new);
3467 }
3468
3469 if (GET_CODE (new) == CONST)
3470 new = XEXP (new, 0);
3471 new = force_operand (new, 0);
3472 }
3473 }
3474 }
3475 return new;
3476}
3477
fd3cd001
UW
3478/* Load the thread pointer into a register. */
3479
7b8acc34
AK
3480rtx
3481s390_get_thread_pointer (void)
fd3cd001 3482{
c5aa1d12 3483 rtx tp = gen_reg_rtx (Pmode);
fd3cd001 3484
c5aa1d12 3485 emit_move_insn (tp, gen_rtx_REG (Pmode, TP_REGNUM));
fd3cd001
UW
3486 mark_reg_pointer (tp, BITS_PER_WORD);
3487
3488 return tp;
3489}
3490
ed9676cf
AK
3491/* Emit a tls call insn. The call target is the SYMBOL_REF stored
3492 in s390_tls_symbol which always refers to __tls_get_offset.
3493 The returned offset is written to RESULT_REG and an USE rtx is
3494 generated for TLS_CALL. */
fd3cd001
UW
3495
3496static GTY(()) rtx s390_tls_symbol;
ed9676cf
AK
3497
3498static void
3499s390_emit_tls_call_insn (rtx result_reg, rtx tls_call)
fd3cd001 3500{
ed9676cf 3501 rtx insn;
38899e29 3502
8d933e31 3503 gcc_assert (flag_pic);
ed9676cf 3504
fd3cd001
UW
3505 if (!s390_tls_symbol)
3506 s390_tls_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_offset");
3507
38899e29
EC
3508 insn = s390_emit_call (s390_tls_symbol, tls_call, result_reg,
3509 gen_rtx_REG (Pmode, RETURN_REGNUM));
ed9676cf
AK
3510
3511 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), result_reg);
becfd6e5 3512 RTL_CONST_CALL_P (insn) = 1;
fd3cd001
UW
3513}
3514
3515/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3516 this (thread-local) address. REG may be used as temporary. */
3517
3518static rtx
9c808aad 3519legitimize_tls_address (rtx addr, rtx reg)
fd3cd001
UW
3520{
3521 rtx new, tls_call, temp, base, r2, insn;
3522
3523 if (GET_CODE (addr) == SYMBOL_REF)
3524 switch (tls_symbolic_operand (addr))
3525 {
3526 case TLS_MODEL_GLOBAL_DYNAMIC:
3527 start_sequence ();
3528 r2 = gen_rtx_REG (Pmode, 2);
3529 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_TLSGD);
3530 new = gen_rtx_CONST (Pmode, tls_call);
3531 new = force_const_mem (Pmode, new);
3532 emit_move_insn (r2, new);
ed9676cf 3533 s390_emit_tls_call_insn (r2, tls_call);
fd3cd001
UW
3534 insn = get_insns ();
3535 end_sequence ();
3536
3537 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3538 temp = gen_reg_rtx (Pmode);
3539 emit_libcall_block (insn, temp, r2, new);
3540
7b8acc34 3541 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
fd3cd001
UW
3542 if (reg != 0)
3543 {
3544 s390_load_address (reg, new);
3545 new = reg;
3546 }
3547 break;
3548
3549 case TLS_MODEL_LOCAL_DYNAMIC:
3550 start_sequence ();
3551 r2 = gen_rtx_REG (Pmode, 2);
3552 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM);
3553 new = gen_rtx_CONST (Pmode, tls_call);
3554 new = force_const_mem (Pmode, new);
3555 emit_move_insn (r2, new);
ed9676cf 3556 s390_emit_tls_call_insn (r2, tls_call);
fd3cd001
UW
3557 insn = get_insns ();
3558 end_sequence ();
3559
3560 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_NTPOFF);
3561 temp = gen_reg_rtx (Pmode);
3562 emit_libcall_block (insn, temp, r2, new);
3563
7b8acc34 3564 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
fd3cd001
UW
3565 base = gen_reg_rtx (Pmode);
3566 s390_load_address (base, new);
3567
3568 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_DTPOFF);
3569 new = gen_rtx_CONST (Pmode, new);
3570 new = force_const_mem (Pmode, new);
3571 temp = gen_reg_rtx (Pmode);
3572 emit_move_insn (temp, new);
3573
3574 new = gen_rtx_PLUS (Pmode, base, temp);
3575 if (reg != 0)
3576 {
3577 s390_load_address (reg, new);
3578 new = reg;
3579 }
3580 break;
3581
3582 case TLS_MODEL_INITIAL_EXEC:
3583 if (flag_pic == 1)
3584 {
3585 /* Assume GOT offset < 4k. This is handled the same way
3586 in both 31- and 64-bit code. */
3587
3588 if (reload_in_progress || reload_completed)
6fb5fa3c 3589 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
fd3cd001
UW
3590
3591 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3592 new = gen_rtx_CONST (Pmode, new);
3593 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
542a8afa 3594 new = gen_const_mem (Pmode, new);
fd3cd001
UW
3595 temp = gen_reg_rtx (Pmode);
3596 emit_move_insn (temp, new);
3597 }
9e8327e3 3598 else if (TARGET_CPU_ZARCH)
fd3cd001
UW
3599 {
3600 /* If the GOT offset might be >= 4k, we determine the position
3601 of the GOT entry via a PC-relative LARL. */
3602
3603 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3604 new = gen_rtx_CONST (Pmode, new);
3605 temp = gen_reg_rtx (Pmode);
3606 emit_move_insn (temp, new);
3607
542a8afa 3608 new = gen_const_mem (Pmode, temp);
fd3cd001
UW
3609 temp = gen_reg_rtx (Pmode);
3610 emit_move_insn (temp, new);
3611 }
3612 else if (flag_pic)
3613 {
c7453384 3614 /* If the GOT offset might be >= 4k, we have to load it
fd3cd001
UW
3615 from the literal pool. */
3616
3617 if (reload_in_progress || reload_completed)
6fb5fa3c 3618 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
fd3cd001
UW
3619
3620 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3621 new = gen_rtx_CONST (Pmode, new);
3622 new = force_const_mem (Pmode, new);
3623 temp = gen_reg_rtx (Pmode);
3624 emit_move_insn (temp, new);
3625
3626 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
542a8afa 3627 new = gen_const_mem (Pmode, new);
fd3cd001
UW
3628
3629 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
3630 temp = gen_reg_rtx (Pmode);
3631 emit_insn (gen_rtx_SET (Pmode, temp, new));
3632 }
3633 else
3634 {
3635 /* In position-dependent code, load the absolute address of
3636 the GOT entry from the literal pool. */
3637
3638 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3639 new = gen_rtx_CONST (Pmode, new);
3640 new = force_const_mem (Pmode, new);
3641 temp = gen_reg_rtx (Pmode);
3642 emit_move_insn (temp, new);
3643
3644 new = temp;
542a8afa 3645 new = gen_const_mem (Pmode, new);
fd3cd001
UW
3646 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
3647 temp = gen_reg_rtx (Pmode);
3648 emit_insn (gen_rtx_SET (Pmode, temp, new));
3649 }
3650
7b8acc34 3651 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
fd3cd001
UW
3652 if (reg != 0)
3653 {
3654 s390_load_address (reg, new);
3655 new = reg;
3656 }
3657 break;
3658
3659 case TLS_MODEL_LOCAL_EXEC:
3660 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3661 new = gen_rtx_CONST (Pmode, new);
3662 new = force_const_mem (Pmode, new);
3663 temp = gen_reg_rtx (Pmode);
3664 emit_move_insn (temp, new);
3665
7b8acc34 3666 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
fd3cd001
UW
3667 if (reg != 0)
3668 {
3669 s390_load_address (reg, new);
3670 new = reg;
3671 }
3672 break;
3673
3674 default:
8d933e31 3675 gcc_unreachable ();
fd3cd001
UW
3676 }
3677
3678 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == UNSPEC)
3679 {
3680 switch (XINT (XEXP (addr, 0), 1))
3681 {
3682 case UNSPEC_INDNTPOFF:
8d933e31
AS
3683 gcc_assert (TARGET_CPU_ZARCH);
3684 new = addr;
fd3cd001
UW
3685 break;
3686
3687 default:
8d933e31 3688 gcc_unreachable ();
fd3cd001
UW
3689 }
3690 }
3691
578d1468
UW
3692 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3693 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3694 {
3695 new = XEXP (XEXP (addr, 0), 0);
3696 if (GET_CODE (new) != SYMBOL_REF)
3697 new = gen_rtx_CONST (Pmode, new);
3698
3699 new = legitimize_tls_address (new, reg);
3700 new = plus_constant (new, INTVAL (XEXP (XEXP (addr, 0), 1)));
3701 new = force_operand (new, 0);
3702 }
3703
fd3cd001 3704 else
8d933e31 3705 gcc_unreachable (); /* for now ... */
fd3cd001
UW
3706
3707 return new;
3708}
3709
9db1d521
HP
3710/* Emit insns to move operands[1] into operands[0]. */
3711
3712void
9c808aad 3713emit_symbolic_move (rtx *operands)
9db1d521 3714{
b3a13419 3715 rtx temp = !can_create_pseudo_p () ? operands[0] : gen_reg_rtx (Pmode);
9db1d521 3716
fd3cd001 3717 if (GET_CODE (operands[0]) == MEM)
9db1d521 3718 operands[1] = force_reg (Pmode, operands[1]);
fd3cd001
UW
3719 else if (TLS_SYMBOLIC_CONST (operands[1]))
3720 operands[1] = legitimize_tls_address (operands[1], temp);
3721 else if (flag_pic)
9db1d521
HP
3722 operands[1] = legitimize_pic_address (operands[1], temp);
3723}
3724
994fe660 3725/* Try machine-dependent ways of modifying an illegitimate address X
9db1d521 3726 to be legitimate. If we find one, return the new, valid address.
9db1d521
HP
3727
3728 OLDX is the address as it was before break_out_memory_refs was called.
3729 In some cases it is useful to look at this to decide what needs to be done.
3730
994fe660 3731 MODE is the mode of the operand pointed to by X.
9db1d521
HP
3732
3733 When -fpic is used, special handling is needed for symbolic references.
3734 See comments by legitimize_pic_address for details. */
3735
3736rtx
5d81b82b 3737legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
9c808aad 3738 enum machine_mode mode ATTRIBUTE_UNUSED)
9db1d521 3739{
ba956982 3740 rtx constant_term = const0_rtx;
9db1d521 3741
fd3cd001
UW
3742 if (TLS_SYMBOLIC_CONST (x))
3743 {
3744 x = legitimize_tls_address (x, 0);
3745
3746 if (legitimate_address_p (mode, x, FALSE))
3747 return x;
3748 }
cf9d7618
ANM
3749 else if (GET_CODE (x) == PLUS
3750 && (TLS_SYMBOLIC_CONST (XEXP (x, 0))
3751 || TLS_SYMBOLIC_CONST (XEXP (x, 1))))
3752 {
3753 return x;
3754 }
fd3cd001 3755 else if (flag_pic)
9db1d521 3756 {
ba956982 3757 if (SYMBOLIC_CONST (x)
c7453384
EC
3758 || (GET_CODE (x) == PLUS
3759 && (SYMBOLIC_CONST (XEXP (x, 0))
ba956982
UW
3760 || SYMBOLIC_CONST (XEXP (x, 1)))))
3761 x = legitimize_pic_address (x, 0);
3762
3763 if (legitimate_address_p (mode, x, FALSE))
3764 return x;
9db1d521 3765 }
9db1d521 3766
ba956982 3767 x = eliminate_constant_term (x, &constant_term);
994fe660 3768
61f02ff5
UW
3769 /* Optimize loading of large displacements by splitting them
3770 into the multiple of 4K and the rest; this allows the
c7453384 3771 former to be CSE'd if possible.
61f02ff5
UW
3772
3773 Don't do this if the displacement is added to a register
3774 pointing into the stack frame, as the offsets will
3775 change later anyway. */
3776
3777 if (GET_CODE (constant_term) == CONST_INT
d3632d41
UW
3778 && !TARGET_LONG_DISPLACEMENT
3779 && !DISP_IN_RANGE (INTVAL (constant_term))
61f02ff5
UW
3780 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
3781 {
3782 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
3783 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
3784
3785 rtx temp = gen_reg_rtx (Pmode);
3786 rtx val = force_operand (GEN_INT (upper), temp);
3787 if (val != temp)
3788 emit_move_insn (temp, val);
3789
3790 x = gen_rtx_PLUS (Pmode, x, temp);
3791 constant_term = GEN_INT (lower);
3792 }
3793
ba956982 3794 if (GET_CODE (x) == PLUS)
9db1d521 3795 {
ba956982
UW
3796 if (GET_CODE (XEXP (x, 0)) == REG)
3797 {
5d81b82b
AS
3798 rtx temp = gen_reg_rtx (Pmode);
3799 rtx val = force_operand (XEXP (x, 1), temp);
ba956982
UW
3800 if (val != temp)
3801 emit_move_insn (temp, val);
3802
3803 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
3804 }
3805
3806 else if (GET_CODE (XEXP (x, 1)) == REG)
3807 {
5d81b82b
AS
3808 rtx temp = gen_reg_rtx (Pmode);
3809 rtx val = force_operand (XEXP (x, 0), temp);
ba956982
UW
3810 if (val != temp)
3811 emit_move_insn (temp, val);
3812
3813 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
3814 }
9db1d521 3815 }
ba956982
UW
3816
3817 if (constant_term != const0_rtx)
3818 x = gen_rtx_PLUS (Pmode, x, constant_term);
3819
3820 return x;
9db1d521
HP
3821}
3822
0b540f12
UW
3823/* Try a machine-dependent way of reloading an illegitimate address AD
3824 operand. If we find one, push the reload and and return the new address.
3825
3826 MODE is the mode of the enclosing MEM. OPNUM is the operand number
3827 and TYPE is the reload type of the current reload. */
3828
3829rtx
3830legitimize_reload_address (rtx ad, enum machine_mode mode ATTRIBUTE_UNUSED,
3831 int opnum, int type)
3832{
3833 if (!optimize || TARGET_LONG_DISPLACEMENT)
3834 return NULL_RTX;
3835
3836 if (GET_CODE (ad) == PLUS)
3837 {
3838 rtx tem = simplify_binary_operation (PLUS, Pmode,
3839 XEXP (ad, 0), XEXP (ad, 1));
3840 if (tem)
3841 ad = tem;
3842 }
3843
3844 if (GET_CODE (ad) == PLUS
3845 && GET_CODE (XEXP (ad, 0)) == REG
3846 && GET_CODE (XEXP (ad, 1)) == CONST_INT
3847 && !DISP_IN_RANGE (INTVAL (XEXP (ad, 1))))
3848 {
3849 HOST_WIDE_INT lower = INTVAL (XEXP (ad, 1)) & 0xfff;
3850 HOST_WIDE_INT upper = INTVAL (XEXP (ad, 1)) ^ lower;
3851 rtx cst, tem, new;
3852
3853 cst = GEN_INT (upper);
3854 if (!legitimate_reload_constant_p (cst))
3855 cst = force_const_mem (Pmode, cst);
3856
3857 tem = gen_rtx_PLUS (Pmode, XEXP (ad, 0), cst);
3858 new = gen_rtx_PLUS (Pmode, tem, GEN_INT (lower));
3859
3860 push_reload (XEXP (tem, 1), 0, &XEXP (tem, 1), 0,
3861 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3862 opnum, (enum reload_type) type);
3863 return new;
3864 }
3865
3866 return NULL_RTX;
3867}
3868
a41c6c53
UW
3869/* Emit code to move LEN bytes from DST to SRC. */
3870
3871void
70128ad9 3872s390_expand_movmem (rtx dst, rtx src, rtx len)
a41c6c53 3873{
a41c6c53
UW
3874 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3875 {
3876 if (INTVAL (len) > 0)
70128ad9 3877 emit_insn (gen_movmem_short (dst, src, GEN_INT (INTVAL (len) - 1)));
a41c6c53
UW
3878 }
3879
3880 else if (TARGET_MVCLE)
3881 {
70128ad9 3882 emit_insn (gen_movmem_long (dst, src, convert_to_mode (Pmode, len, 1)));
a41c6c53
UW
3883 }
3884
3885 else
3886 {
3887 rtx dst_addr, src_addr, count, blocks, temp;
70315fcd 3888 rtx loop_start_label = gen_label_rtx ();
6de9cd9a 3889 rtx loop_end_label = gen_label_rtx ();
a41c6c53
UW
3890 rtx end_label = gen_label_rtx ();
3891 enum machine_mode mode;
a41c6c53
UW
3892
3893 mode = GET_MODE (len);
3894 if (mode == VOIDmode)
b9404c99 3895 mode = Pmode;
a41c6c53 3896
a41c6c53
UW
3897 dst_addr = gen_reg_rtx (Pmode);
3898 src_addr = gen_reg_rtx (Pmode);
3899 count = gen_reg_rtx (mode);
3900 blocks = gen_reg_rtx (mode);
3901
3902 convert_move (count, len, 1);
c7453384 3903 emit_cmp_and_jump_insns (count, const0_rtx,
a41c6c53
UW
3904 EQ, NULL_RTX, mode, 1, end_label);
3905
3906 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3907 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
3908 dst = change_address (dst, VOIDmode, dst_addr);
3909 src = change_address (src, VOIDmode, src_addr);
c7453384 3910
a41c6c53
UW
3911 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3912 if (temp != count)
3913 emit_move_insn (count, temp);
3914
c9f59991 3915 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, 0);
a41c6c53
UW
3916 if (temp != blocks)
3917 emit_move_insn (blocks, temp);
3918
6de9cd9a
DN
3919 emit_cmp_and_jump_insns (blocks, const0_rtx,
3920 EQ, NULL_RTX, mode, 1, loop_end_label);
70315fcd
SB
3921
3922 emit_label (loop_start_label);
a41c6c53 3923
70128ad9 3924 emit_insn (gen_movmem_short (dst, src, GEN_INT (255)));
c7453384 3925 s390_load_address (dst_addr,
a41c6c53 3926 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
c7453384 3927 s390_load_address (src_addr,
a41c6c53 3928 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
c7453384 3929
a41c6c53
UW
3930 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3931 if (temp != blocks)
3932 emit_move_insn (blocks, temp);
3933
6de9cd9a
DN
3934 emit_cmp_and_jump_insns (blocks, const0_rtx,
3935 EQ, NULL_RTX, mode, 1, loop_end_label);
70315fcd
SB
3936
3937 emit_jump (loop_start_label);
6de9cd9a 3938 emit_label (loop_end_label);
a41c6c53 3939
70128ad9 3940 emit_insn (gen_movmem_short (dst, src,
b9404c99 3941 convert_to_mode (Pmode, count, 1)));
a41c6c53
UW
3942 emit_label (end_label);
3943 }
3944}
3945
6d057022
AS
3946/* Emit code to set LEN bytes at DST to VAL.
3947 Make use of clrmem if VAL is zero. */
a41c6c53
UW
3948
3949void
6d057022 3950s390_expand_setmem (rtx dst, rtx len, rtx val)
a41c6c53 3951{
c9f59991
AK
3952 if (GET_CODE (len) == CONST_INT && INTVAL (len) == 0)
3953 return;
3954
6d057022
AS
3955 gcc_assert (GET_CODE (val) == CONST_INT || GET_MODE (val) == QImode);
3956
c9f59991 3957 if (GET_CODE (len) == CONST_INT && INTVAL (len) > 0 && INTVAL (len) <= 257)
a41c6c53 3958 {
6d057022 3959 if (val == const0_rtx && INTVAL (len) <= 256)
70128ad9 3960 emit_insn (gen_clrmem_short (dst, GEN_INT (INTVAL (len) - 1)));
6d057022
AS
3961 else
3962 {
3963 /* Initialize memory by storing the first byte. */
3964 emit_move_insn (adjust_address (dst, QImode, 0), val);
3965
3966 if (INTVAL (len) > 1)
3967 {
3968 /* Initiate 1 byte overlap move.
3969 The first byte of DST is propagated through DSTP1.
3970 Prepare a movmem for: DST+1 = DST (length = LEN - 1).
3971 DST is set to size 1 so the rest of the memory location
3972 does not count as source operand. */
3973 rtx dstp1 = adjust_address (dst, VOIDmode, 1);
3974 set_mem_size (dst, const1_rtx);
3975
3976 emit_insn (gen_movmem_short (dstp1, dst,
3977 GEN_INT (INTVAL (len) - 2)));
3978 }
3979 }
a41c6c53
UW
3980 }
3981
3982 else if (TARGET_MVCLE)
3983 {
6d057022
AS
3984 val = force_not_mem (convert_modes (Pmode, QImode, val, 1));
3985 emit_insn (gen_setmem_long (dst, convert_to_mode (Pmode, len, 1), val));
a41c6c53
UW
3986 }
3987
3988 else
3989 {
6d057022 3990 rtx dst_addr, src_addr, count, blocks, temp, dstp1 = NULL_RTX;
70315fcd 3991 rtx loop_start_label = gen_label_rtx ();
6de9cd9a 3992 rtx loop_end_label = gen_label_rtx ();
a41c6c53
UW
3993 rtx end_label = gen_label_rtx ();
3994 enum machine_mode mode;
a41c6c53
UW
3995
3996 mode = GET_MODE (len);
3997 if (mode == VOIDmode)
b9404c99 3998 mode = Pmode;
a41c6c53 3999
a41c6c53
UW
4000 dst_addr = gen_reg_rtx (Pmode);
4001 src_addr = gen_reg_rtx (Pmode);
4002 count = gen_reg_rtx (mode);
4003 blocks = gen_reg_rtx (mode);
4004
4005 convert_move (count, len, 1);
c7453384 4006 emit_cmp_and_jump_insns (count, const0_rtx,
a41c6c53
UW
4007 EQ, NULL_RTX, mode, 1, end_label);
4008
4009 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
4010 dst = change_address (dst, VOIDmode, dst_addr);
c7453384 4011
6d057022
AS
4012 if (val == const0_rtx)
4013 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
4014 else
4015 {
4016 dstp1 = adjust_address (dst, VOIDmode, 1);
4017 set_mem_size (dst, const1_rtx);
4018
4019 /* Initialize memory by storing the first byte. */
4020 emit_move_insn (adjust_address (dst, QImode, 0), val);
4021
4022 /* If count is 1 we are done. */
4023 emit_cmp_and_jump_insns (count, const1_rtx,
4024 EQ, NULL_RTX, mode, 1, end_label);
4025
4026 temp = expand_binop (mode, add_optab, count, GEN_INT (-2), count, 1, 0);
4027 }
a41c6c53
UW
4028 if (temp != count)
4029 emit_move_insn (count, temp);
4030
c9f59991 4031 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, 0);
a41c6c53
UW
4032 if (temp != blocks)
4033 emit_move_insn (blocks, temp);
4034
6de9cd9a
DN
4035 emit_cmp_and_jump_insns (blocks, const0_rtx,
4036 EQ, NULL_RTX, mode, 1, loop_end_label);
70315fcd
SB
4037
4038 emit_label (loop_start_label);
a41c6c53 4039
6d057022
AS
4040 if (val == const0_rtx)
4041 emit_insn (gen_clrmem_short (dst, GEN_INT (255)));
4042 else
4043 emit_insn (gen_movmem_short (dstp1, dst, GEN_INT (255)));
c7453384 4044 s390_load_address (dst_addr,
a41c6c53 4045 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
c7453384 4046
a41c6c53
UW
4047 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
4048 if (temp != blocks)
4049 emit_move_insn (blocks, temp);
4050
6de9cd9a
DN
4051 emit_cmp_and_jump_insns (blocks, const0_rtx,
4052 EQ, NULL_RTX, mode, 1, loop_end_label);
70315fcd
SB
4053
4054 emit_jump (loop_start_label);
6de9cd9a 4055 emit_label (loop_end_label);
a41c6c53 4056
6d057022
AS
4057 if (val == const0_rtx)
4058 emit_insn (gen_clrmem_short (dst, convert_to_mode (Pmode, count, 1)));
4059 else
4060 emit_insn (gen_movmem_short (dstp1, dst, convert_to_mode (Pmode, count, 1)));
a41c6c53
UW
4061 emit_label (end_label);
4062 }
4063}
4064
4065/* Emit code to compare LEN bytes at OP0 with those at OP1,
4066 and return the result in TARGET. */
4067
4068void
9c808aad 4069s390_expand_cmpmem (rtx target, rtx op0, rtx op1, rtx len)
a41c6c53 4070{
5b022de5 4071 rtx ccreg = gen_rtx_REG (CCUmode, CC_REGNUM);
02887425
UW
4072 rtx tmp;
4073
4074 /* As the result of CMPINT is inverted compared to what we need,
4075 we have to swap the operands. */
4076 tmp = op0; op0 = op1; op1 = tmp;
a41c6c53 4077
a41c6c53
UW
4078 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
4079 {
4080 if (INTVAL (len) > 0)
4081 {
b9404c99 4082 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (INTVAL (len) - 1)));
02887425 4083 emit_insn (gen_cmpint (target, ccreg));
a41c6c53
UW
4084 }
4085 else
4086 emit_move_insn (target, const0_rtx);
4087 }
9dc62c00 4088 else if (TARGET_MVCLE)
a41c6c53 4089 {
b9404c99 4090 emit_insn (gen_cmpmem_long (op0, op1, convert_to_mode (Pmode, len, 1)));
02887425 4091 emit_insn (gen_cmpint (target, ccreg));
a41c6c53 4092 }
a41c6c53
UW
4093 else
4094 {
4095 rtx addr0, addr1, count, blocks, temp;
70315fcd 4096 rtx loop_start_label = gen_label_rtx ();
6de9cd9a 4097 rtx loop_end_label = gen_label_rtx ();
a41c6c53
UW
4098 rtx end_label = gen_label_rtx ();
4099 enum machine_mode mode;
a41c6c53
UW
4100
4101 mode = GET_MODE (len);
4102 if (mode == VOIDmode)
b9404c99 4103 mode = Pmode;
a41c6c53 4104
a41c6c53
UW
4105 addr0 = gen_reg_rtx (Pmode);
4106 addr1 = gen_reg_rtx (Pmode);
4107 count = gen_reg_rtx (mode);
4108 blocks = gen_reg_rtx (mode);
4109
4110 convert_move (count, len, 1);
c7453384 4111 emit_cmp_and_jump_insns (count, const0_rtx,
a41c6c53
UW
4112 EQ, NULL_RTX, mode, 1, end_label);
4113
4114 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
4115 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
4116 op0 = change_address (op0, VOIDmode, addr0);
4117 op1 = change_address (op1, VOIDmode, addr1);
c7453384 4118
a41c6c53
UW
4119 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
4120 if (temp != count)
4121 emit_move_insn (count, temp);
4122
c9f59991 4123 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, 0);
a41c6c53
UW
4124 if (temp != blocks)
4125 emit_move_insn (blocks, temp);
4126
6de9cd9a
DN
4127 emit_cmp_and_jump_insns (blocks, const0_rtx,
4128 EQ, NULL_RTX, mode, 1, loop_end_label);
70315fcd
SB
4129
4130 emit_label (loop_start_label);
a41c6c53 4131
b9404c99 4132 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (255)));
5b022de5 4133 temp = gen_rtx_NE (VOIDmode, ccreg, const0_rtx);
c7453384 4134 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
a41c6c53
UW
4135 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
4136 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
4137 emit_jump_insn (temp);
4138
c7453384 4139 s390_load_address (addr0,
a41c6c53 4140 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
c7453384 4141 s390_load_address (addr1,
a41c6c53 4142 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
c7453384 4143
a41c6c53
UW
4144 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
4145 if (temp != blocks)
4146 emit_move_insn (blocks, temp);
4147
6de9cd9a
DN
4148 emit_cmp_and_jump_insns (blocks, const0_rtx,
4149 EQ, NULL_RTX, mode, 1, loop_end_label);
70315fcd
SB
4150
4151 emit_jump (loop_start_label);
6de9cd9a 4152 emit_label (loop_end_label);
a41c6c53 4153
38899e29 4154 emit_insn (gen_cmpmem_short (op0, op1,
b9404c99 4155 convert_to_mode (Pmode, count, 1)));
a41c6c53
UW
4156 emit_label (end_label);
4157
02887425 4158 emit_insn (gen_cmpint (target, ccreg));
a41c6c53
UW
4159 }
4160}
4161
5d880bd2
UW
4162
4163/* Expand conditional increment or decrement using alc/slb instructions.
4164 Should generate code setting DST to either SRC or SRC + INCREMENT,
4165 depending on the result of the comparison CMP_OP0 CMP_CODE CMP_OP1.
00bda920
AK
4166 Returns true if successful, false otherwise.
4167
4168 That makes it possible to implement some if-constructs without jumps e.g.:
4169 (borrow = CC0 | CC1 and carry = CC2 | CC3)
4170 unsigned int a, b, c;
4171 if (a < b) c++; -> CCU b > a -> CC2; c += carry;
4172 if (a < b) c--; -> CCL3 a - b -> borrow; c -= borrow;
4173 if (a <= b) c++; -> CCL3 b - a -> borrow; c += carry;
4174 if (a <= b) c--; -> CCU a <= b -> borrow; c -= borrow;
4175
4176 Checks for EQ and NE with a nonzero value need an additional xor e.g.:
4177 if (a == b) c++; -> CCL3 a ^= b; 0 - a -> borrow; c += carry;
4178 if (a == b) c--; -> CCU a ^= b; a <= 0 -> CC0 | CC1; c -= borrow;
4179 if (a != b) c++; -> CCU a ^= b; a > 0 -> CC2; c += carry;
4180 if (a != b) c--; -> CCL3 a ^= b; 0 - a -> borrow; c -= borrow; */
5d880bd2
UW
4181
4182bool
4183s390_expand_addcc (enum rtx_code cmp_code, rtx cmp_op0, rtx cmp_op1,
4184 rtx dst, rtx src, rtx increment)
4185{
4186 enum machine_mode cmp_mode;
4187 enum machine_mode cc_mode;
4188 rtx op_res;
4189 rtx insn;
4190 rtvec p;
8d933e31 4191 int ret;
5d880bd2
UW
4192
4193 if ((GET_MODE (cmp_op0) == SImode || GET_MODE (cmp_op0) == VOIDmode)
4194 && (GET_MODE (cmp_op1) == SImode || GET_MODE (cmp_op1) == VOIDmode))
4195 cmp_mode = SImode;
4196 else if ((GET_MODE (cmp_op0) == DImode || GET_MODE (cmp_op0) == VOIDmode)
4197 && (GET_MODE (cmp_op1) == DImode || GET_MODE (cmp_op1) == VOIDmode))
4198 cmp_mode = DImode;
4199 else
4200 return false;
4201
4202 /* Try ADD LOGICAL WITH CARRY. */
4203 if (increment == const1_rtx)
4204 {
4205 /* Determine CC mode to use. */
4206 if (cmp_code == EQ || cmp_code == NE)
4207 {
4208 if (cmp_op1 != const0_rtx)
4209 {
4210 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
4211 NULL_RTX, 0, OPTAB_WIDEN);
4212 cmp_op1 = const0_rtx;
4213 }
4214
4215 cmp_code = cmp_code == EQ ? LEU : GTU;
4216 }
4217
4218 if (cmp_code == LTU || cmp_code == LEU)
4219 {
4220 rtx tem = cmp_op0;
4221 cmp_op0 = cmp_op1;
4222 cmp_op1 = tem;
4223 cmp_code = swap_condition (cmp_code);
4224 }
4225
4226 switch (cmp_code)
4227 {
4228 case GTU:
4229 cc_mode = CCUmode;
4230 break;
4231
4232 case GEU:
4233 cc_mode = CCL3mode;
4234 break;
4235
4236 default:
4237 return false;
4238 }
4239
4240 /* Emit comparison instruction pattern. */
4241 if (!register_operand (cmp_op0, cmp_mode))
4242 cmp_op0 = force_reg (cmp_mode, cmp_op0);
4243
4244 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
4245 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
4246 /* We use insn_invalid_p here to add clobbers if required. */
8d933e31
AS
4247 ret = insn_invalid_p (emit_insn (insn));
4248 gcc_assert (!ret);
5d880bd2
UW
4249
4250 /* Emit ALC instruction pattern. */
4251 op_res = gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
4252 gen_rtx_REG (cc_mode, CC_REGNUM),
4253 const0_rtx);
4254
4255 if (src != const0_rtx)
4256 {
4257 if (!register_operand (src, GET_MODE (dst)))
4258 src = force_reg (GET_MODE (dst), src);
4259
a94a76a7
UW
4260 op_res = gen_rtx_PLUS (GET_MODE (dst), op_res, src);
4261 op_res = gen_rtx_PLUS (GET_MODE (dst), op_res, const0_rtx);
5d880bd2
UW
4262 }
4263
4264 p = rtvec_alloc (2);
4265 RTVEC_ELT (p, 0) =
4266 gen_rtx_SET (VOIDmode, dst, op_res);
4267 RTVEC_ELT (p, 1) =
4268 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
4269 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
4270
4271 return true;
4272 }
4273
4274 /* Try SUBTRACT LOGICAL WITH BORROW. */
4275 if (increment == constm1_rtx)
4276 {
4277 /* Determine CC mode to use. */
4278 if (cmp_code == EQ || cmp_code == NE)
4279 {
4280 if (cmp_op1 != const0_rtx)
4281 {
4282 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
4283 NULL_RTX, 0, OPTAB_WIDEN);
4284 cmp_op1 = const0_rtx;
4285 }
4286
4287 cmp_code = cmp_code == EQ ? LEU : GTU;
4288 }
4289
4290 if (cmp_code == GTU || cmp_code == GEU)
4291 {
4292 rtx tem = cmp_op0;
4293 cmp_op0 = cmp_op1;
4294 cmp_op1 = tem;
4295 cmp_code = swap_condition (cmp_code);
4296 }
4297
4298 switch (cmp_code)
4299 {
4300 case LEU:
4301 cc_mode = CCUmode;
4302 break;
4303
4304 case LTU:
4305 cc_mode = CCL3mode;
4306 break;
4307
4308 default:
4309 return false;
4310 }
4311
4312 /* Emit comparison instruction pattern. */
4313 if (!register_operand (cmp_op0, cmp_mode))
4314 cmp_op0 = force_reg (cmp_mode, cmp_op0);
4315
4316 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
4317 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
4318 /* We use insn_invalid_p here to add clobbers if required. */
8d933e31
AS
4319 ret = insn_invalid_p (emit_insn (insn));
4320 gcc_assert (!ret);
5d880bd2
UW
4321
4322 /* Emit SLB instruction pattern. */
4323 if (!register_operand (src, GET_MODE (dst)))
4324 src = force_reg (GET_MODE (dst), src);
4325
4326 op_res = gen_rtx_MINUS (GET_MODE (dst),
4327 gen_rtx_MINUS (GET_MODE (dst), src, const0_rtx),
4328 gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
4329 gen_rtx_REG (cc_mode, CC_REGNUM),
4330 const0_rtx));
4331 p = rtvec_alloc (2);
4332 RTVEC_ELT (p, 0) =
4333 gen_rtx_SET (VOIDmode, dst, op_res);
4334 RTVEC_ELT (p, 1) =
4335 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
4336 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
4337
4338 return true;
4339 }
4340
4341 return false;
4342}
4343
963fc8d0 4344/* Expand code for the insv template. Return true if successful. */
6fa05db6 4345
963fc8d0 4346bool
6fa05db6
AS
4347s390_expand_insv (rtx dest, rtx op1, rtx op2, rtx src)
4348{
4349 int bitsize = INTVAL (op1);
4350 int bitpos = INTVAL (op2);
4351
963fc8d0
AK
4352 /* On z10 we can use the risbg instruction to implement insv. */
4353 if (TARGET_Z10
4354 && ((GET_MODE (dest) == DImode && GET_MODE (src) == DImode)
4355 || (GET_MODE (dest) == SImode && GET_MODE (src) == SImode)))
4356 {
4357 rtx op;
4358 rtx clobber;
4359
4360 op = gen_rtx_SET (GET_MODE(src),
4361 gen_rtx_ZERO_EXTRACT (GET_MODE (dest), dest, op1, op2),
4362 src);
4363 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
4364 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, op, clobber)));
4365
4366 return true;
4367 }
4368
c83eecad 4369 /* We need byte alignment. */
6fa05db6
AS
4370 if (bitsize % BITS_PER_UNIT)
4371 return false;
4372
4373 if (bitpos == 0
4374 && memory_operand (dest, VOIDmode)
4375 && (register_operand (src, word_mode)
4376 || const_int_operand (src, VOIDmode)))
4377 {
4378 /* Emit standard pattern if possible. */
4379 enum machine_mode mode = smallest_mode_for_size (bitsize, MODE_INT);
4380 if (GET_MODE_BITSIZE (mode) == bitsize)
4381 emit_move_insn (adjust_address (dest, mode, 0), gen_lowpart (mode, src));
4382
4383 /* (set (ze (mem)) (const_int)). */
4384 else if (const_int_operand (src, VOIDmode))
4385 {
4386 int size = bitsize / BITS_PER_UNIT;
4387 rtx src_mem = adjust_address (force_const_mem (word_mode, src), BLKmode,
4388 GET_MODE_SIZE (word_mode) - size);
4389
4390 dest = adjust_address (dest, BLKmode, 0);
4391 set_mem_size (dest, GEN_INT (size));
4392 s390_expand_movmem (dest, src_mem, GEN_INT (size));
4393 }
4394
4395 /* (set (ze (mem)) (reg)). */
4396 else if (register_operand (src, word_mode))
4397 {
4398 if (bitsize <= GET_MODE_BITSIZE (SImode))
4399 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, op1,
4400 const0_rtx), src);
4401 else
4402 {
4403 /* Emit st,stcmh sequence. */
4404 int stcmh_width = bitsize - GET_MODE_BITSIZE (SImode);
4405 int size = stcmh_width / BITS_PER_UNIT;
4406
4407 emit_move_insn (adjust_address (dest, SImode, size),
4408 gen_lowpart (SImode, src));
4409 set_mem_size (dest, GEN_INT (size));
4410 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, GEN_INT
4411 (stcmh_width), const0_rtx),
4412 gen_rtx_LSHIFTRT (word_mode, src, GEN_INT
4413 (GET_MODE_BITSIZE (SImode))));
4414 }
4415 }
4416 else
4417 return false;
4418
4419 return true;
4420 }
4421
4422 /* (set (ze (reg)) (const_int)). */
4423 if (TARGET_ZARCH
4424 && register_operand (dest, word_mode)
4425 && (bitpos % 16) == 0
4426 && (bitsize % 16) == 0
4427 && const_int_operand (src, VOIDmode))
4428 {
4429 HOST_WIDE_INT val = INTVAL (src);
4430 int regpos = bitpos + bitsize;
4431
4432 while (regpos > bitpos)
4433 {
4434 enum machine_mode putmode;
4435 int putsize;
4436
4437 if (TARGET_EXTIMM && (regpos % 32 == 0) && (regpos >= bitpos + 32))
4438 putmode = SImode;
4439 else
4440 putmode = HImode;
4441
4442 putsize = GET_MODE_BITSIZE (putmode);
4443 regpos -= putsize;
4444 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest,
4445 GEN_INT (putsize),
4446 GEN_INT (regpos)),
4447 gen_int_mode (val, putmode));
4448 val >>= putsize;
4449 }
4450 gcc_assert (regpos == bitpos);
4451 return true;
4452 }
4453
4454 return false;
4455}
5d880bd2 4456
45d18331
AS
4457/* A subroutine of s390_expand_cs_hqi and s390_expand_atomic which returns a
4458 register that holds VAL of mode MODE shifted by COUNT bits. */
3093f076
AS
4459
4460static inline rtx
4461s390_expand_mask_and_shift (rtx val, enum machine_mode mode, rtx count)
4462{
4463 val = expand_simple_binop (SImode, AND, val, GEN_INT (GET_MODE_MASK (mode)),
4464 NULL_RTX, 1, OPTAB_DIRECT);
4465 return expand_simple_binop (SImode, ASHIFT, val, count,
4466 NULL_RTX, 1, OPTAB_DIRECT);
4467}
4468
4469/* Structure to hold the initial parameters for a compare_and_swap operation
4470 in HImode and QImode. */
4471
4472struct alignment_context
4473{
4474 rtx memsi; /* SI aligned memory location. */
4475 rtx shift; /* Bit offset with regard to lsb. */
4476 rtx modemask; /* Mask of the HQImode shifted by SHIFT bits. */
4477 rtx modemaski; /* ~modemask */
6416ae7f 4478 bool aligned; /* True if memory is aligned, false else. */
3093f076
AS
4479};
4480
45d18331
AS
4481/* A subroutine of s390_expand_cs_hqi and s390_expand_atomic to initialize
4482 structure AC for transparent simplifying, if the memory alignment is known
4483 to be at least 32bit. MEM is the memory location for the actual operation
4484 and MODE its mode. */
3093f076
AS
4485
4486static void
4487init_alignment_context (struct alignment_context *ac, rtx mem,
4488 enum machine_mode mode)
4489{
4490 ac->shift = GEN_INT (GET_MODE_SIZE (SImode) - GET_MODE_SIZE (mode));
4491 ac->aligned = (MEM_ALIGN (mem) >= GET_MODE_BITSIZE (SImode));
4492
4493 if (ac->aligned)
4494 ac->memsi = adjust_address (mem, SImode, 0); /* Memory is aligned. */
4495 else
4496 {
4497 /* Alignment is unknown. */
4498 rtx byteoffset, addr, align;
4499
4500 /* Force the address into a register. */
4501 addr = force_reg (Pmode, XEXP (mem, 0));
4502
4503 /* Align it to SImode. */
4504 align = expand_simple_binop (Pmode, AND, addr,
4505 GEN_INT (-GET_MODE_SIZE (SImode)),
4506 NULL_RTX, 1, OPTAB_DIRECT);
4507 /* Generate MEM. */
4508 ac->memsi = gen_rtx_MEM (SImode, align);
4509 MEM_VOLATILE_P (ac->memsi) = MEM_VOLATILE_P (mem);
44d64274 4510 set_mem_alias_set (ac->memsi, ALIAS_SET_MEMORY_BARRIER);
3093f076
AS
4511 set_mem_align (ac->memsi, GET_MODE_BITSIZE (SImode));
4512
4513 /* Calculate shiftcount. */
4514 byteoffset = expand_simple_binop (Pmode, AND, addr,
4515 GEN_INT (GET_MODE_SIZE (SImode) - 1),
4516 NULL_RTX, 1, OPTAB_DIRECT);
4517 /* As we already have some offset, evaluate the remaining distance. */
4518 ac->shift = expand_simple_binop (SImode, MINUS, ac->shift, byteoffset,
4519 NULL_RTX, 1, OPTAB_DIRECT);
4520
4521 }
4522 /* Shift is the byte count, but we need the bitcount. */
4523 ac->shift = expand_simple_binop (SImode, MULT, ac->shift, GEN_INT (BITS_PER_UNIT),
4524 NULL_RTX, 1, OPTAB_DIRECT);
4525 /* Calculate masks. */
4526 ac->modemask = expand_simple_binop (SImode, ASHIFT,
4527 GEN_INT (GET_MODE_MASK (mode)), ac->shift,
4528 NULL_RTX, 1, OPTAB_DIRECT);
4529 ac->modemaski = expand_simple_unop (SImode, NOT, ac->modemask, NULL_RTX, 1);
4530}
4531
4532/* Expand an atomic compare and swap operation for HImode and QImode. MEM is
4533 the memory location, CMP the old value to compare MEM with and NEW the value
4534 to set if CMP == MEM.
4535 CMP is never in memory for compare_and_swap_cc because
4536 expand_bool_compare_and_swap puts it into a register for later compare. */
4537
4538void
4539s390_expand_cs_hqi (enum machine_mode mode, rtx target, rtx mem, rtx cmp, rtx new)
4540{
4541 struct alignment_context ac;
4542 rtx cmpv, newv, val, resv, cc;
4543 rtx res = gen_reg_rtx (SImode);
4544 rtx csloop = gen_label_rtx ();
4545 rtx csend = gen_label_rtx ();
4546
4547 gcc_assert (register_operand (target, VOIDmode));
4548 gcc_assert (MEM_P (mem));
4549
4550 init_alignment_context (&ac, mem, mode);
4551
4552 /* Shift the values to the correct bit positions. */
4553 if (!(ac.aligned && MEM_P (cmp)))
4554 cmp = s390_expand_mask_and_shift (cmp, mode, ac.shift);
4555 if (!(ac.aligned && MEM_P (new)))
4556 new = s390_expand_mask_and_shift (new, mode, ac.shift);
4557
4558 /* Load full word. Subsequent loads are performed by CS. */
4559 val = expand_simple_binop (SImode, AND, ac.memsi, ac.modemaski,
4560 NULL_RTX, 1, OPTAB_DIRECT);
4561
4562 /* Start CS loop. */
4563 emit_label (csloop);
4564 /* val = "<mem>00..0<mem>"
4565 * cmp = "00..0<cmp>00..0"
4566 * new = "00..0<new>00..0"
4567 */
4568
4569 /* Patch cmp and new with val at correct position. */
4570 if (ac.aligned && MEM_P (cmp))
4571 {
4572 cmpv = force_reg (SImode, val);
4573 store_bit_field (cmpv, GET_MODE_BITSIZE (mode), 0, SImode, cmp);
4574 }
4575 else
4576 cmpv = force_reg (SImode, expand_simple_binop (SImode, IOR, cmp, val,
4577 NULL_RTX, 1, OPTAB_DIRECT));
4578 if (ac.aligned && MEM_P (new))
4579 {
4580 newv = force_reg (SImode, val);
4581 store_bit_field (newv, GET_MODE_BITSIZE (mode), 0, SImode, new);
4582 }
4583 else
4584 newv = force_reg (SImode, expand_simple_binop (SImode, IOR, new, val,
4585 NULL_RTX, 1, OPTAB_DIRECT));
4586
3093f076 4587 /* Jump to end if we're done (likely?). */
8bb501bb
AK
4588 s390_emit_jump (csend, s390_emit_compare_and_swap (EQ, res, ac.memsi,
4589 cmpv, newv));
3093f076
AS
4590
4591 /* Check for changes outside mode. */
4592 resv = expand_simple_binop (SImode, AND, res, ac.modemaski,
4593 NULL_RTX, 1, OPTAB_DIRECT);
4594 cc = s390_emit_compare (NE, resv, val);
4595 emit_move_insn (val, resv);
4596 /* Loop internal if so. */
4597 s390_emit_jump (csloop, cc);
4598
4599 emit_label (csend);
4600
4601 /* Return the correct part of the bitfield. */
4602 convert_move (target, expand_simple_binop (SImode, LSHIFTRT, res, ac.shift,
4603 NULL_RTX, 1, OPTAB_DIRECT), 1);
4604}
4605
45d18331 4606/* Expand an atomic operation CODE of mode MODE. MEM is the memory location
ea2c620c 4607 and VAL the value to play with. If AFTER is true then store the value
45d18331
AS
4608 MEM holds after the operation, if AFTER is false then store the value MEM
4609 holds before the operation. If TARGET is zero then discard that value, else
4610 store it to TARGET. */
4611
4612void
4613s390_expand_atomic (enum machine_mode mode, enum rtx_code code,
4614 rtx target, rtx mem, rtx val, bool after)
4615{
4616 struct alignment_context ac;
4617 rtx cmp;
4618 rtx new = gen_reg_rtx (SImode);
4619 rtx orig = gen_reg_rtx (SImode);
4620 rtx csloop = gen_label_rtx ();
4621
4622 gcc_assert (!target || register_operand (target, VOIDmode));
4623 gcc_assert (MEM_P (mem));
4624
4625 init_alignment_context (&ac, mem, mode);
4626
4627 /* Shift val to the correct bit positions.
4628 Preserve "icm", but prevent "ex icm". */
4629 if (!(ac.aligned && code == SET && MEM_P (val)))
4630 val = s390_expand_mask_and_shift (val, mode, ac.shift);
4631
4632 /* Further preparation insns. */
4633 if (code == PLUS || code == MINUS)
4634 emit_move_insn (orig, val);
4635 else if (code == MULT || code == AND) /* val = "11..1<val>11..1" */
4636 val = expand_simple_binop (SImode, XOR, val, ac.modemaski,
4637 NULL_RTX, 1, OPTAB_DIRECT);
4638
4639 /* Load full word. Subsequent loads are performed by CS. */
4640 cmp = force_reg (SImode, ac.memsi);
4641
4642 /* Start CS loop. */
4643 emit_label (csloop);
4644 emit_move_insn (new, cmp);
4645
4646 /* Patch new with val at correct position. */
4647 switch (code)
4648 {
4649 case PLUS:
4650 case MINUS:
4651 val = expand_simple_binop (SImode, code, new, orig,
4652 NULL_RTX, 1, OPTAB_DIRECT);
4653 val = expand_simple_binop (SImode, AND, val, ac.modemask,
4654 NULL_RTX, 1, OPTAB_DIRECT);
4655 /* FALLTHRU */
4656 case SET:
4657 if (ac.aligned && MEM_P (val))
4658 store_bit_field (new, GET_MODE_BITSIZE (mode), 0, SImode, val);
4659 else
4660 {
4661 new = expand_simple_binop (SImode, AND, new, ac.modemaski,
4662 NULL_RTX, 1, OPTAB_DIRECT);
4663 new = expand_simple_binop (SImode, IOR, new, val,
4664 NULL_RTX, 1, OPTAB_DIRECT);
4665 }
4666 break;
4667 case AND:
4668 case IOR:
4669 case XOR:
4670 new = expand_simple_binop (SImode, code, new, val,
4671 NULL_RTX, 1, OPTAB_DIRECT);
4672 break;
4673 case MULT: /* NAND */
4674 new = expand_simple_binop (SImode, XOR, new, ac.modemask,
4675 NULL_RTX, 1, OPTAB_DIRECT);
4676 new = expand_simple_binop (SImode, AND, new, val,
4677 NULL_RTX, 1, OPTAB_DIRECT);
4678 break;
4679 default:
4680 gcc_unreachable ();
4681 }
45d18331 4682
8bb501bb
AK
4683 s390_emit_jump (csloop, s390_emit_compare_and_swap (NE, cmp,
4684 ac.memsi, cmp, new));
45d18331
AS
4685
4686 /* Return the correct part of the bitfield. */
4687 if (target)
4688 convert_move (target, expand_simple_binop (SImode, LSHIFTRT,
4689 after ? new : cmp, ac.shift,
4690 NULL_RTX, 1, OPTAB_DIRECT), 1);
4691}
4692
fdbe66f2 4693/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
6b2300b3
JJ
4694 We need to emit DTP-relative relocations. */
4695
fdbe66f2
EB
4696static void s390_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
4697
4698static void
9c808aad 4699s390_output_dwarf_dtprel (FILE *file, int size, rtx x)
6b2300b3
JJ
4700{
4701 switch (size)
4702 {
4703 case 4:
4704 fputs ("\t.long\t", file);
4705 break;
4706 case 8:
4707 fputs ("\t.quad\t", file);
4708 break;
4709 default:
8d933e31 4710 gcc_unreachable ();
6b2300b3
JJ
4711 }
4712 output_addr_const (file, x);
4713 fputs ("@DTPOFF", file);
4714}
4715
7269aee7 4716#ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
608063c3 4717/* Implement TARGET_MANGLE_TYPE. */
7269aee7
AH
4718
4719static const char *
3101faab 4720s390_mangle_type (const_tree type)
7269aee7
AH
4721{
4722 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
4723 && TARGET_LONG_DOUBLE_128)
4724 return "g";
4725
4726 /* For all other types, use normal C++ mangling. */
4727 return NULL;
4728}
4729#endif
4730
4c8c0dec 4731/* In the name of slightly smaller debug output, and to cater to
aabcd309 4732 general assembler lossage, recognize various UNSPEC sequences
4c8c0dec
JJ
4733 and turn them back into a direct symbol reference. */
4734
69bd9368 4735static rtx
9c808aad 4736s390_delegitimize_address (rtx orig_x)
4c8c0dec
JJ
4737{
4738 rtx x = orig_x, y;
4739
4740 if (GET_CODE (x) != MEM)
4741 return orig_x;
4742
4743 x = XEXP (x, 0);
4744 if (GET_CODE (x) == PLUS
4745 && GET_CODE (XEXP (x, 1)) == CONST
4746 && GET_CODE (XEXP (x, 0)) == REG
4747 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
4748 {
4749 y = XEXP (XEXP (x, 1), 0);
4750 if (GET_CODE (y) == UNSPEC
fd7643fb 4751 && XINT (y, 1) == UNSPEC_GOT)
4c8c0dec
JJ
4752 return XVECEXP (y, 0, 0);
4753 return orig_x;
4754 }
4755
4756 if (GET_CODE (x) == CONST)
4757 {
4758 y = XEXP (x, 0);
4759 if (GET_CODE (y) == UNSPEC
fd7643fb 4760 && XINT (y, 1) == UNSPEC_GOTENT)
4c8c0dec
JJ
4761 return XVECEXP (y, 0, 0);
4762 return orig_x;
4763 }
4764
c7453384 4765 return orig_x;
4c8c0dec 4766}
ba956982 4767
6d057022
AS
4768/* Output operand OP to stdio stream FILE.
4769 OP is an address (register + offset) which is not used to address data;
4770 instead the rightmost bits are interpreted as the value. */
ac32b25e
UW
4771
4772static void
4773print_shift_count_operand (FILE *file, rtx op)
4774{
d98ad410
UW
4775 HOST_WIDE_INT offset;
4776 rtx base;
f83a336d 4777
d98ad410 4778 /* Extract base register and offset. */
4989e88a 4779 if (!s390_decompose_shift_count (op, &base, &offset))
d98ad410 4780 gcc_unreachable ();
ac32b25e
UW
4781
4782 /* Sanity check. */
d98ad410 4783 if (base)
8d933e31 4784 {
d98ad410
UW
4785 gcc_assert (GET_CODE (base) == REG);
4786 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
4787 gcc_assert (REGNO_REG_CLASS (REGNO (base)) == ADDR_REGS);
8d933e31 4788 }
ac32b25e 4789
6d057022
AS
4790 /* Offsets are constricted to twelve bits. */
4791 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset & ((1 << 12) - 1));
d98ad410
UW
4792 if (base)
4793 fprintf (file, "(%s)", reg_names[REGNO (base)]);
ac32b25e
UW
4794}
4795
ab96de7e 4796/* See 'get_some_local_dynamic_name'. */
fd3cd001
UW
4797
4798static int
9c808aad 4799get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
fd3cd001
UW
4800{
4801 rtx x = *px;
4802
4803 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
4804 {
4805 x = get_pool_constant (x);
4806 return for_each_rtx (&x, get_some_local_dynamic_name_1, 0);
4807 }
4808
4809 if (GET_CODE (x) == SYMBOL_REF
4810 && tls_symbolic_operand (x) == TLS_MODEL_LOCAL_DYNAMIC)
4811 {
4812 cfun->machine->some_ld_name = XSTR (x, 0);
4813 return 1;
4814 }
4815
4816 return 0;
4817}
4818
ab96de7e
AS
4819/* Locate some local-dynamic symbol still in use by this function
4820 so that we can print its name in local-dynamic base patterns. */
4821
4822static const char *
4823get_some_local_dynamic_name (void)
4824{
4825 rtx insn;
4826
4827 if (cfun->machine->some_ld_name)
4828 return cfun->machine->some_ld_name;
4829
4830 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
4831 if (INSN_P (insn)
4832 && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
4833 return cfun->machine->some_ld_name;
4834
8d933e31 4835 gcc_unreachable ();
ab96de7e
AS
4836}
4837
38899e29 4838/* Output machine-dependent UNSPECs occurring in address constant X
faeb9bb6
UW
4839 in assembler syntax to stdio stream FILE. Returns true if the
4840 constant X could be recognized, false otherwise. */
9db1d521 4841
faeb9bb6
UW
4842bool
4843s390_output_addr_const_extra (FILE *file, rtx x)
9db1d521 4844{
faeb9bb6
UW
4845 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
4846 switch (XINT (x, 1))
4847 {
4848 case UNSPEC_GOTENT:
4849 output_addr_const (file, XVECEXP (x, 0, 0));
4850 fprintf (file, "@GOTENT");
4851 return true;
4852 case UNSPEC_GOT:
4853 output_addr_const (file, XVECEXP (x, 0, 0));
4854 fprintf (file, "@GOT");
4855 return true;
4856 case UNSPEC_GOTOFF:
4857 output_addr_const (file, XVECEXP (x, 0, 0));
4858 fprintf (file, "@GOTOFF");
4859 return true;
4860 case UNSPEC_PLT:
4861 output_addr_const (file, XVECEXP (x, 0, 0));
4862 fprintf (file, "@PLT");
4863 return true;
4864 case UNSPEC_PLTOFF:
4865 output_addr_const (file, XVECEXP (x, 0, 0));
4866 fprintf (file, "@PLTOFF");
4867 return true;
4868 case UNSPEC_TLSGD:
4869 output_addr_const (file, XVECEXP (x, 0, 0));
4870 fprintf (file, "@TLSGD");
4871 return true;
4872 case UNSPEC_TLSLDM:
4873 assemble_name (file, get_some_local_dynamic_name ());
4874 fprintf (file, "@TLSLDM");
4875 return true;
4876 case UNSPEC_DTPOFF:
4877 output_addr_const (file, XVECEXP (x, 0, 0));
4878 fprintf (file, "@DTPOFF");
4879 return true;
4880 case UNSPEC_NTPOFF:
4881 output_addr_const (file, XVECEXP (x, 0, 0));
4882 fprintf (file, "@NTPOFF");
4883 return true;
4884 case UNSPEC_GOTNTPOFF:
4885 output_addr_const (file, XVECEXP (x, 0, 0));
4886 fprintf (file, "@GOTNTPOFF");
4887 return true;
4888 case UNSPEC_INDNTPOFF:
4889 output_addr_const (file, XVECEXP (x, 0, 0));
4890 fprintf (file, "@INDNTPOFF");
4891 return true;
4892 }
9db1d521 4893
faeb9bb6 4894 return false;
9db1d521
HP
4895}
4896
c7453384 4897/* Output address operand ADDR in assembler syntax to
994fe660 4898 stdio stream FILE. */
9db1d521
HP
4899
4900void
9c808aad 4901print_operand_address (FILE *file, rtx addr)
9db1d521
HP
4902{
4903 struct s390_address ad;
4904
963fc8d0
AK
4905 if (s390_symref_operand_p (addr, NULL, NULL))
4906 {
4907 gcc_assert (TARGET_Z10);
4908 output_addr_const (file, addr);
4909 return;
4910 }
4911
b808c04c 4912 if (!s390_decompose_address (addr, &ad)
93fa8428
AK
4913 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
4914 || (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx))))
c85ce869 4915 output_operand_lossage ("cannot decompose address");
c7453384 4916
9db1d521 4917 if (ad.disp)
faeb9bb6 4918 output_addr_const (file, ad.disp);
9db1d521
HP
4919 else
4920 fprintf (file, "0");
4921
4922 if (ad.base && ad.indx)
4923 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
4924 reg_names[REGNO (ad.base)]);
4925 else if (ad.base)
4926 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
4927}
4928
c7453384
EC
4929/* Output operand X in assembler syntax to stdio stream FILE.
4930 CODE specified the format flag. The following format flags
994fe660
UW
4931 are recognized:
4932
4933 'C': print opcode suffix for branch condition.
4934 'D': print opcode suffix for inverse branch condition.
fd3cd001 4935 'J': print tls_load/tls_gdcall/tls_ldcall suffix
7b8acc34 4936 'G': print the size of the operand in bytes.
994fe660
UW
4937 'O': print only the displacement of a memory reference.
4938 'R': print only the base register of a memory reference.
fc0ea003 4939 'S': print S-type memory reference (base+displacement).
994fe660
UW
4940 'N': print the second word of a DImode operand.
4941 'M': print the second word of a TImode operand.
ac32b25e 4942 'Y': print shift count operand.
994fe660 4943
5519a4f9 4944 'b': print integer X as if it's an unsigned byte.
963fc8d0 4945 'c': print integer X as if it's an signed byte.
da48f5ec
AK
4946 'x': print integer X as if it's an unsigned halfword.
4947 'h': print integer X as if it's a signed halfword.
f19a9af7 4948 'i': print the first nonzero HImode part of X.
da48f5ec
AK
4949 'j': print the first HImode part unequal to -1 of X.
4950 'k': print the first nonzero SImode part of X.
4951 'm': print the first SImode part unequal to -1 of X.
4952 'o': print integer X as if it's an unsigned 32bit word. */
9db1d521
HP
4953
4954void
9c808aad 4955print_operand (FILE *file, rtx x, int code)
9db1d521
HP
4956{
4957 switch (code)
4958 {
4959 case 'C':
ba956982 4960 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
9db1d521
HP
4961 return;
4962
4963 case 'D':
ba956982 4964 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
9db1d521
HP
4965 return;
4966
fd3cd001
UW
4967 case 'J':
4968 if (GET_CODE (x) == SYMBOL_REF)
4969 {
4970 fprintf (file, "%s", ":tls_load:");
4971 output_addr_const (file, x);
4972 }
4973 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD)
4974 {
4975 fprintf (file, "%s", ":tls_gdcall:");
4976 output_addr_const (file, XVECEXP (x, 0, 0));
4977 }
4978 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM)
4979 {
4980 fprintf (file, "%s", ":tls_ldcall:");
4981 assemble_name (file, get_some_local_dynamic_name ());
4982 }
4983 else
8d933e31 4984 gcc_unreachable ();
fd3cd001
UW
4985 return;
4986
7b8acc34
AK
4987 case 'G':
4988 fprintf (file, "%u", GET_MODE_SIZE (GET_MODE (x)));
4989 return;
4990
9db1d521
HP
4991 case 'O':
4992 {
4993 struct s390_address ad;
8d933e31 4994 int ret;
9db1d521 4995
8d933e31
AS
4996 gcc_assert (GET_CODE (x) == MEM);
4997 ret = s390_decompose_address (XEXP (x, 0), &ad);
4998 gcc_assert (ret);
93fa8428 4999 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
8d933e31 5000 gcc_assert (!ad.indx);
9db1d521
HP
5001
5002 if (ad.disp)
faeb9bb6 5003 output_addr_const (file, ad.disp);
9db1d521
HP
5004 else
5005 fprintf (file, "0");
5006 }
5007 return;
5008
5009 case 'R':
5010 {
5011 struct s390_address ad;
8d933e31 5012 int ret;
9db1d521 5013
8d933e31
AS
5014 gcc_assert (GET_CODE (x) == MEM);
5015 ret = s390_decompose_address (XEXP (x, 0), &ad);
5016 gcc_assert (ret);
93fa8428 5017 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
8d933e31 5018 gcc_assert (!ad.indx);
9db1d521
HP
5019
5020 if (ad.base)
5021 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
5022 else
5023 fprintf (file, "0");
5024 }
5025 return;
5026
fc0ea003
UW
5027 case 'S':
5028 {
5029 struct s390_address ad;
8d933e31 5030 int ret;
fc0ea003 5031
8d933e31
AS
5032 gcc_assert (GET_CODE (x) == MEM);
5033 ret = s390_decompose_address (XEXP (x, 0), &ad);
5034 gcc_assert (ret);
93fa8428 5035 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
8d933e31 5036 gcc_assert (!ad.indx);
fc0ea003
UW
5037
5038 if (ad.disp)
5039 output_addr_const (file, ad.disp);
5040 else
5041 fprintf (file, "0");
5042
5043 if (ad.base)
5044 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
5045 }
5046 return;
5047
9db1d521
HP
5048 case 'N':
5049 if (GET_CODE (x) == REG)
5050 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
5051 else if (GET_CODE (x) == MEM)
5052 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
5053 else
8d933e31 5054 gcc_unreachable ();
9db1d521
HP
5055 break;
5056
5057 case 'M':
5058 if (GET_CODE (x) == REG)
5059 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
5060 else if (GET_CODE (x) == MEM)
5061 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
5062 else
8d933e31 5063 gcc_unreachable ();
9db1d521 5064 break;
ac32b25e
UW
5065
5066 case 'Y':
5067 print_shift_count_operand (file, x);
5068 return;
9db1d521
HP
5069 }
5070
5071 switch (GET_CODE (x))
5072 {
5073 case REG:
5074 fprintf (file, "%s", reg_names[REGNO (x)]);
5075 break;
5076
5077 case MEM:
5078 output_address (XEXP (x, 0));
5079 break;
5080
5081 case CONST:
5082 case CODE_LABEL:
5083 case LABEL_REF:
5084 case SYMBOL_REF:
faeb9bb6 5085 output_addr_const (file, x);
9db1d521
HP
5086 break;
5087
5088 case CONST_INT:
5089 if (code == 'b')
4023fb28 5090 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
963fc8d0
AK
5091 else if (code == 'c')
5092 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xff) ^ 0x80) - 0x80);
4023fb28
UW
5093 else if (code == 'x')
5094 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
5095 else if (code == 'h')
5096 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
f19a9af7 5097 else if (code == 'i')
38899e29 5098 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
f19a9af7
AK
5099 s390_extract_part (x, HImode, 0));
5100 else if (code == 'j')
38899e29
EC
5101 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5102 s390_extract_part (x, HImode, -1));
ec24698e
UW
5103 else if (code == 'k')
5104 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5105 s390_extract_part (x, SImode, 0));
5106 else if (code == 'm')
5107 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5108 s390_extract_part (x, SImode, -1));
5109 else if (code == 'o')
5110 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffffffff);
4023fb28
UW
5111 else
5112 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
5113 break;
5114
5115 case CONST_DOUBLE:
8d933e31 5116 gcc_assert (GET_MODE (x) == VOIDmode);
4023fb28
UW
5117 if (code == 'b')
5118 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
9db1d521 5119 else if (code == 'x')
4023fb28 5120 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
9db1d521 5121 else if (code == 'h')
4023fb28 5122 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
9db1d521 5123 else
8d933e31 5124 gcc_unreachable ();
9db1d521
HP
5125 break;
5126
5127 default:
5128 fatal_insn ("UNKNOWN in print_operand !?", x);
5129 break;
5130 }
5131}
5132
301d03af
RS
5133/* Target hook for assembling integer objects. We need to define it
5134 here to work a round a bug in some versions of GAS, which couldn't
5135 handle values smaller than INT_MIN when printed in decimal. */
5136
5137static bool
9c808aad 5138s390_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af
RS
5139{
5140 if (size == 8 && aligned_p
5141 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
5142 {
4a0a75dd
KG
5143 fprintf (asm_out_file, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX "\n",
5144 INTVAL (x));
301d03af
RS
5145 return true;
5146 }
5147 return default_assemble_integer (x, size, aligned_p);
5148}
5149
c7453384 5150/* Returns true if register REGNO is used for forming
994fe660 5151 a memory address in expression X. */
9db1d521 5152
3ed99cc9 5153static bool
9c808aad 5154reg_used_in_mem_p (int regno, rtx x)
9db1d521
HP
5155{
5156 enum rtx_code code = GET_CODE (x);
5157 int i, j;
5158 const char *fmt;
c7453384 5159
9db1d521
HP
5160 if (code == MEM)
5161 {
5162 if (refers_to_regno_p (regno, regno+1,
5163 XEXP (x, 0), 0))
3ed99cc9 5164 return true;
9db1d521 5165 }
c7453384 5166 else if (code == SET
4023fb28
UW
5167 && GET_CODE (SET_DEST (x)) == PC)
5168 {
5169 if (refers_to_regno_p (regno, regno+1,
5170 SET_SRC (x), 0))
3ed99cc9 5171 return true;
4023fb28 5172 }
9db1d521
HP
5173
5174 fmt = GET_RTX_FORMAT (code);
5175 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5176 {
5177 if (fmt[i] == 'e'
5178 && reg_used_in_mem_p (regno, XEXP (x, i)))
3ed99cc9 5179 return true;
c7453384 5180
9db1d521
HP
5181 else if (fmt[i] == 'E')
5182 for (j = 0; j < XVECLEN (x, i); j++)
5183 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
3ed99cc9 5184 return true;
9db1d521 5185 }
3ed99cc9 5186 return false;
9db1d521
HP
5187}
5188
d65f7478 5189/* Returns true if expression DEP_RTX sets an address register
994fe660 5190 used by instruction INSN to address memory. */
9db1d521 5191
3ed99cc9 5192static bool
9c808aad 5193addr_generation_dependency_p (rtx dep_rtx, rtx insn)
9db1d521 5194{
4023fb28 5195 rtx target, pat;
9db1d521 5196
077dab3b
HP
5197 if (GET_CODE (dep_rtx) == INSN)
5198 dep_rtx = PATTERN (dep_rtx);
5199
9db1d521
HP
5200 if (GET_CODE (dep_rtx) == SET)
5201 {
5202 target = SET_DEST (dep_rtx);
cc7ab9b7
UW
5203 if (GET_CODE (target) == STRICT_LOW_PART)
5204 target = XEXP (target, 0);
5205 while (GET_CODE (target) == SUBREG)
5206 target = SUBREG_REG (target);
5207
9db1d521
HP
5208 if (GET_CODE (target) == REG)
5209 {
5210 int regno = REGNO (target);
5211
077dab3b 5212 if (s390_safe_attr_type (insn) == TYPE_LA)
4023fb28
UW
5213 {
5214 pat = PATTERN (insn);
5215 if (GET_CODE (pat) == PARALLEL)
5216 {
8d933e31 5217 gcc_assert (XVECLEN (pat, 0) == 2);
4023fb28
UW
5218 pat = XVECEXP (pat, 0, 0);
5219 }
8d933e31
AS
5220 gcc_assert (GET_CODE (pat) == SET);
5221 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
4023fb28 5222 }
077dab3b 5223 else if (get_attr_atype (insn) == ATYPE_AGEN)
4023fb28
UW
5224 return reg_used_in_mem_p (regno, PATTERN (insn));
5225 }
9db1d521 5226 }
3ed99cc9 5227 return false;
9db1d521
HP
5228}
5229
077dab3b
HP
5230/* Return 1, if dep_insn sets register used in insn in the agen unit. */
5231
c7453384 5232int
9c808aad 5233s390_agen_dep_p (rtx dep_insn, rtx insn)
c7453384 5234{
077dab3b
HP
5235 rtx dep_rtx = PATTERN (dep_insn);
5236 int i;
c7453384
EC
5237
5238 if (GET_CODE (dep_rtx) == SET
077dab3b
HP
5239 && addr_generation_dependency_p (dep_rtx, insn))
5240 return 1;
5241 else if (GET_CODE (dep_rtx) == PARALLEL)
5242 {
5243 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
5244 {
5245 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
5246 return 1;
5247 }
5248 }
5249 return 0;
5250}
5251
52609473
HP
5252/* A C statement (sans semicolon) to update the integer scheduling priority
5253 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
5254 reduce the priority to execute INSN later. Do not define this macro if
c7453384 5255 you do not need to adjust the scheduling priorities of insns.
52609473 5256
c7453384 5257 A STD instruction should be scheduled earlier,
52609473
HP
5258 in order to use the bypass. */
5259
5260static int
9c808aad 5261s390_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
52609473
HP
5262{
5263 if (! INSN_P (insn))
5264 return priority;
5265
ec24698e
UW
5266 if (s390_tune != PROCESSOR_2084_Z990
5267 && s390_tune != PROCESSOR_2094_Z9_109)
52609473
HP
5268 return priority;
5269
5270 switch (s390_safe_attr_type (insn))
5271 {
cfdb984b
AS
5272 case TYPE_FSTOREDF:
5273 case TYPE_FSTORESF:
52609473
HP
5274 priority = priority << 3;
5275 break;
5276 case TYPE_STORE:
ea77e738 5277 case TYPE_STM:
52609473
HP
5278 priority = priority << 1;
5279 break;
5280 default:
5281 break;
5282 }
5283 return priority;
5284}
f2d3c02a 5285
077dab3b 5286/* The number of instructions that can be issued per cycle. */
f2d3c02a 5287
077dab3b 5288static int
9c808aad 5289s390_issue_rate (void)
077dab3b 5290{
93538e8e
AK
5291 switch (s390_tune)
5292 {
5293 case PROCESSOR_2084_Z990:
5294 case PROCESSOR_2094_Z9_109:
5295 return 3;
5296 case PROCESSOR_2097_Z10:
5297 return 2;
5298 default:
5299 return 1;
5300 }
077dab3b 5301}
f2d3c02a 5302
52609473 5303static int
9c808aad 5304s390_first_cycle_multipass_dfa_lookahead (void)
52609473 5305{
64e1e4c4 5306 return 4;
52609473
HP
5307}
5308
52609473 5309
585539a1
UW
5310/* Annotate every literal pool reference in X by an UNSPEC_LTREF expression.
5311 Fix up MEMs as required. */
5312
5313static void
5314annotate_constant_pool_refs (rtx *x)
5315{
5316 int i, j;
5317 const char *fmt;
5318
8d933e31
AS
5319 gcc_assert (GET_CODE (*x) != SYMBOL_REF
5320 || !CONSTANT_POOL_ADDRESS_P (*x));
585539a1
UW
5321
5322 /* Literal pool references can only occur inside a MEM ... */
5323 if (GET_CODE (*x) == MEM)
5324 {
5325 rtx memref = XEXP (*x, 0);
5326
5327 if (GET_CODE (memref) == SYMBOL_REF
5328 && CONSTANT_POOL_ADDRESS_P (memref))
5329 {
5330 rtx base = cfun->machine->base_reg;
5331 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, memref, base),
5332 UNSPEC_LTREF);
5333
5334 *x = replace_equiv_address (*x, addr);
5335 return;
5336 }
5337
5338 if (GET_CODE (memref) == CONST
5339 && GET_CODE (XEXP (memref, 0)) == PLUS
5340 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
5341 && GET_CODE (XEXP (XEXP (memref, 0), 0)) == SYMBOL_REF
5342 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (memref, 0), 0)))
5343 {
5344 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
5345 rtx sym = XEXP (XEXP (memref, 0), 0);
5346 rtx base = cfun->machine->base_reg;
5347 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
5348 UNSPEC_LTREF);
5349
5350 *x = replace_equiv_address (*x, plus_constant (addr, off));
5351 return;
5352 }
5353 }
5354
5355 /* ... or a load-address type pattern. */
5356 if (GET_CODE (*x) == SET)
5357 {
5358 rtx addrref = SET_SRC (*x);
5359
5360 if (GET_CODE (addrref) == SYMBOL_REF
5361 && CONSTANT_POOL_ADDRESS_P (addrref))
5362 {
5363 rtx base = cfun->machine->base_reg;
5364 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, addrref, base),
5365 UNSPEC_LTREF);
5366
5367 SET_SRC (*x) = addr;
5368 return;
5369 }
5370
5371 if (GET_CODE (addrref) == CONST
5372 && GET_CODE (XEXP (addrref, 0)) == PLUS
5373 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
5374 && GET_CODE (XEXP (XEXP (addrref, 0), 0)) == SYMBOL_REF
5375 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (addrref, 0), 0)))
5376 {
5377 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
5378 rtx sym = XEXP (XEXP (addrref, 0), 0);
5379 rtx base = cfun->machine->base_reg;
5380 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
5381 UNSPEC_LTREF);
5382
5383 SET_SRC (*x) = plus_constant (addr, off);
5384 return;
5385 }
5386 }
5387
5388 /* Annotate LTREL_BASE as well. */
5389 if (GET_CODE (*x) == UNSPEC
5390 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
5391 {
5392 rtx base = cfun->machine->base_reg;
5393 *x = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XVECEXP (*x, 0, 0), base),
5394 UNSPEC_LTREL_BASE);
5395 return;
5396 }
5397
5398 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5399 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5400 {
5401 if (fmt[i] == 'e')
5402 {
5403 annotate_constant_pool_refs (&XEXP (*x, i));
5404 }
5405 else if (fmt[i] == 'E')
5406 {
5407 for (j = 0; j < XVECLEN (*x, i); j++)
5408 annotate_constant_pool_refs (&XVECEXP (*x, i, j));
5409 }
5410 }
5411}
5412
ab96de7e
AS
5413/* Split all branches that exceed the maximum distance.
5414 Returns true if this created a new literal pool entry. */
5415
5416static int
5417s390_split_branches (void)
5418{
5419 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
8d933e31 5420 int new_literal = 0, ret;
ab96de7e
AS
5421 rtx insn, pat, tmp, target;
5422 rtx *label;
5423
5424 /* We need correct insn addresses. */
5425
5426 shorten_branches (get_insns ());
5427
5428 /* Find all branches that exceed 64KB, and split them. */
5429
5430 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5431 {
5432 if (GET_CODE (insn) != JUMP_INSN)
5433 continue;
5434
5435 pat = PATTERN (insn);
5436 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
5437 pat = XVECEXP (pat, 0, 0);
5438 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
5439 continue;
5440
5441 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
5442 {
5443 label = &SET_SRC (pat);
5444 }
5445 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
5446 {
5447 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
5448 label = &XEXP (SET_SRC (pat), 1);
5449 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
5450 label = &XEXP (SET_SRC (pat), 2);
5451 else
5452 continue;
5453 }
5454 else
5455 continue;
5456
5457 if (get_attr_length (insn) <= 4)
5458 continue;
5459
e2df5c1d
UW
5460 /* We are going to use the return register as scratch register,
5461 make sure it will be saved/restored by the prologue/epilogue. */
5462 cfun_frame_layout.save_return_addr_p = 1;
5463
ab96de7e
AS
5464 if (!flag_pic)
5465 {
5466 new_literal = 1;
5467 tmp = force_const_mem (Pmode, *label);
5468 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
5469 INSN_ADDRESSES_NEW (tmp, -1);
5470 annotate_constant_pool_refs (&PATTERN (tmp));
5471
5472 target = temp_reg;
5473 }
5474 else
5475 {
5476 new_literal = 1;
5477 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, *label),
5478 UNSPEC_LTREL_OFFSET);
5479 target = gen_rtx_CONST (Pmode, target);
5480 target = force_const_mem (Pmode, target);
5481 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, target), insn);
5482 INSN_ADDRESSES_NEW (tmp, -1);
5483 annotate_constant_pool_refs (&PATTERN (tmp));
5484
5485 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XEXP (target, 0),
5486 cfun->machine->base_reg),
5487 UNSPEC_LTREL_BASE);
5488 target = gen_rtx_PLUS (Pmode, temp_reg, target);
5489 }
5490
8d933e31
AS
5491 ret = validate_change (insn, label, target, 0);
5492 gcc_assert (ret);
ab96de7e
AS
5493 }
5494
5495 return new_literal;
5496}
5497
b2ccb744 5498
585539a1
UW
5499/* Find an annotated literal pool symbol referenced in RTX X,
5500 and store it at REF. Will abort if X contains references to
5501 more than one such pool symbol; multiple references to the same
5502 symbol are allowed, however.
b2ccb744 5503
c7453384 5504 The rtx pointed to by REF must be initialized to NULL_RTX
b2ccb744
UW
5505 by the caller before calling this routine. */
5506
5507static void
9c808aad 5508find_constant_pool_ref (rtx x, rtx *ref)
b2ccb744
UW
5509{
5510 int i, j;
5511 const char *fmt;
5512
fd7643fb
UW
5513 /* Ignore LTREL_BASE references. */
5514 if (GET_CODE (x) == UNSPEC
5515 && XINT (x, 1) == UNSPEC_LTREL_BASE)
5516 return;
5af2f3d3
UW
5517 /* Likewise POOL_ENTRY insns. */
5518 if (GET_CODE (x) == UNSPEC_VOLATILE
5519 && XINT (x, 1) == UNSPECV_POOL_ENTRY)
5520 return;
fd7643fb 5521
8d933e31
AS
5522 gcc_assert (GET_CODE (x) != SYMBOL_REF
5523 || !CONSTANT_POOL_ADDRESS_P (x));
585539a1
UW
5524
5525 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_LTREF)
b2ccb744 5526 {
585539a1 5527 rtx sym = XVECEXP (x, 0, 0);
8d933e31
AS
5528 gcc_assert (GET_CODE (sym) == SYMBOL_REF
5529 && CONSTANT_POOL_ADDRESS_P (sym));
585539a1 5530
b2ccb744 5531 if (*ref == NULL_RTX)
585539a1 5532 *ref = sym;
8d933e31
AS
5533 else
5534 gcc_assert (*ref == sym);
585539a1
UW
5535
5536 return;
b2ccb744
UW
5537 }
5538
5539 fmt = GET_RTX_FORMAT (GET_CODE (x));
5540 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5541 {
5542 if (fmt[i] == 'e')
5543 {
5544 find_constant_pool_ref (XEXP (x, i), ref);
5545 }
5546 else if (fmt[i] == 'E')
5547 {
5548 for (j = 0; j < XVECLEN (x, i); j++)
5549 find_constant_pool_ref (XVECEXP (x, i, j), ref);
5550 }
5551 }
5552}
5553
585539a1
UW
5554/* Replace every reference to the annotated literal pool
5555 symbol REF in X by its base plus OFFSET. */
b2ccb744
UW
5556
5557static void
585539a1 5558replace_constant_pool_ref (rtx *x, rtx ref, rtx offset)
b2ccb744
UW
5559{
5560 int i, j;
5561 const char *fmt;
5562
8d933e31 5563 gcc_assert (*x != ref);
b2ccb744 5564
585539a1
UW
5565 if (GET_CODE (*x) == UNSPEC
5566 && XINT (*x, 1) == UNSPEC_LTREF
5567 && XVECEXP (*x, 0, 0) == ref)
b2ccb744 5568 {
585539a1
UW
5569 *x = gen_rtx_PLUS (Pmode, XVECEXP (*x, 0, 1), offset);
5570 return;
b2ccb744
UW
5571 }
5572
585539a1
UW
5573 if (GET_CODE (*x) == PLUS
5574 && GET_CODE (XEXP (*x, 1)) == CONST_INT
5575 && GET_CODE (XEXP (*x, 0)) == UNSPEC
5576 && XINT (XEXP (*x, 0), 1) == UNSPEC_LTREF
5577 && XVECEXP (XEXP (*x, 0), 0, 0) == ref)
b2ccb744 5578 {
585539a1
UW
5579 rtx addr = gen_rtx_PLUS (Pmode, XVECEXP (XEXP (*x, 0), 0, 1), offset);
5580 *x = plus_constant (addr, INTVAL (XEXP (*x, 1)));
5581 return;
b2ccb744
UW
5582 }
5583
5584 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5585 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5586 {
5587 if (fmt[i] == 'e')
5588 {
585539a1 5589 replace_constant_pool_ref (&XEXP (*x, i), ref, offset);
b2ccb744
UW
5590 }
5591 else if (fmt[i] == 'E')
5592 {
5593 for (j = 0; j < XVECLEN (*x, i); j++)
585539a1 5594 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, offset);
b2ccb744
UW
5595 }
5596 }
5597}
5598
c7453384 5599/* Check whether X contains an UNSPEC_LTREL_BASE.
fd7643fb 5600 Return its constant pool symbol if found, NULL_RTX otherwise. */
aee4e0db 5601
fd7643fb 5602static rtx
9c808aad 5603find_ltrel_base (rtx x)
aee4e0db 5604{
aee4e0db
UW
5605 int i, j;
5606 const char *fmt;
5607
fd7643fb
UW
5608 if (GET_CODE (x) == UNSPEC
5609 && XINT (x, 1) == UNSPEC_LTREL_BASE)
5610 return XVECEXP (x, 0, 0);
aee4e0db
UW
5611
5612 fmt = GET_RTX_FORMAT (GET_CODE (x));
5613 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5614 {
5615 if (fmt[i] == 'e')
5616 {
fd7643fb
UW
5617 rtx fnd = find_ltrel_base (XEXP (x, i));
5618 if (fnd)
5619 return fnd;
aee4e0db
UW
5620 }
5621 else if (fmt[i] == 'E')
5622 {
5623 for (j = 0; j < XVECLEN (x, i); j++)
fd7643fb
UW
5624 {
5625 rtx fnd = find_ltrel_base (XVECEXP (x, i, j));
5626 if (fnd)
5627 return fnd;
5628 }
aee4e0db
UW
5629 }
5630 }
5631
fd7643fb 5632 return NULL_RTX;
aee4e0db
UW
5633}
5634
585539a1 5635/* Replace any occurrence of UNSPEC_LTREL_BASE in X with its base. */
aee4e0db
UW
5636
5637static void
585539a1 5638replace_ltrel_base (rtx *x)
aee4e0db 5639{
fd7643fb 5640 int i, j;
aee4e0db
UW
5641 const char *fmt;
5642
fd7643fb
UW
5643 if (GET_CODE (*x) == UNSPEC
5644 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
aee4e0db 5645 {
585539a1 5646 *x = XVECEXP (*x, 0, 1);
fd7643fb 5647 return;
aee4e0db
UW
5648 }
5649
5650 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5651 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5652 {
5653 if (fmt[i] == 'e')
5654 {
585539a1 5655 replace_ltrel_base (&XEXP (*x, i));
aee4e0db
UW
5656 }
5657 else if (fmt[i] == 'E')
5658 {
5659 for (j = 0; j < XVECLEN (*x, i); j++)
585539a1 5660 replace_ltrel_base (&XVECEXP (*x, i, j));
aee4e0db
UW
5661 }
5662 }
5663}
5664
5665
fd7643fb 5666/* We keep a list of constants which we have to add to internal
b2ccb744
UW
5667 constant tables in the middle of large functions. */
5668
4dc19cc0 5669#define NR_C_MODES 11
c7453384 5670enum machine_mode constant_modes[NR_C_MODES] =
b2ccb744 5671{
4dc19cc0
AK
5672 TFmode, TImode, TDmode,
5673 DFmode, DImode, DDmode,
5674 SFmode, SImode, SDmode,
b2ccb744
UW
5675 HImode,
5676 QImode
5677};
5678
b2ccb744
UW
5679struct constant
5680{
5681 struct constant *next;
5682 rtx value;
5683 rtx label;
5684};
5685
5686struct constant_pool
5687{
5688 struct constant_pool *next;
5689 rtx first_insn;
aee4e0db
UW
5690 rtx pool_insn;
5691 bitmap insns;
03870a04 5692 rtx emit_pool_after;
b2ccb744
UW
5693
5694 struct constant *constants[NR_C_MODES];
9bb86f41 5695 struct constant *execute;
b2ccb744
UW
5696 rtx label;
5697 int size;
5698};
5699
ab96de7e
AS
5700/* Allocate new constant_pool structure. */
5701
5702static struct constant_pool *
5703s390_alloc_pool (void)
5704{
5705 struct constant_pool *pool;
5706 int i;
5707
5708 pool = (struct constant_pool *) xmalloc (sizeof *pool);
5709 pool->next = NULL;
5710 for (i = 0; i < NR_C_MODES; i++)
5711 pool->constants[i] = NULL;
5712
5713 pool->execute = NULL;
5714 pool->label = gen_label_rtx ();
5715 pool->first_insn = NULL_RTX;
5716 pool->pool_insn = NULL_RTX;
5717 pool->insns = BITMAP_ALLOC (NULL);
5718 pool->size = 0;
03870a04 5719 pool->emit_pool_after = NULL_RTX;
ab96de7e
AS
5720
5721 return pool;
5722}
b2ccb744
UW
5723
5724/* Create new constant pool covering instructions starting at INSN
5725 and chain it to the end of POOL_LIST. */
5726
5727static struct constant_pool *
9c808aad 5728s390_start_pool (struct constant_pool **pool_list, rtx insn)
b2ccb744
UW
5729{
5730 struct constant_pool *pool, **prev;
b2ccb744 5731
5af2f3d3 5732 pool = s390_alloc_pool ();
b2ccb744 5733 pool->first_insn = insn;
aee4e0db 5734
b2ccb744
UW
5735 for (prev = pool_list; *prev; prev = &(*prev)->next)
5736 ;
5737 *prev = pool;
5738
5739 return pool;
5740}
5741
aee4e0db
UW
5742/* End range of instructions covered by POOL at INSN and emit
5743 placeholder insn representing the pool. */
b2ccb744
UW
5744
5745static void
9c808aad 5746s390_end_pool (struct constant_pool *pool, rtx insn)
b2ccb744 5747{
aee4e0db
UW
5748 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
5749
5750 if (!insn)
5751 insn = get_last_insn ();
5752
5753 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
5754 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5755}
5756
5757/* Add INSN to the list of insns covered by POOL. */
5758
5759static void
9c808aad 5760s390_add_pool_insn (struct constant_pool *pool, rtx insn)
aee4e0db
UW
5761{
5762 bitmap_set_bit (pool->insns, INSN_UID (insn));
b2ccb744
UW
5763}
5764
5765/* Return pool out of POOL_LIST that covers INSN. */
5766
5767static struct constant_pool *
9c808aad 5768s390_find_pool (struct constant_pool *pool_list, rtx insn)
b2ccb744 5769{
b2ccb744
UW
5770 struct constant_pool *pool;
5771
b2ccb744 5772 for (pool = pool_list; pool; pool = pool->next)
aee4e0db 5773 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
b2ccb744
UW
5774 break;
5775
5776 return pool;
5777}
5778
aee4e0db 5779/* Add constant VAL of mode MODE to the constant pool POOL. */
b2ccb744 5780
aee4e0db 5781static void
9c808aad 5782s390_add_constant (struct constant_pool *pool, rtx val, enum machine_mode mode)
b2ccb744
UW
5783{
5784 struct constant *c;
b2ccb744
UW
5785 int i;
5786
5787 for (i = 0; i < NR_C_MODES; i++)
5788 if (constant_modes[i] == mode)
5789 break;
8d933e31 5790 gcc_assert (i != NR_C_MODES);
b2ccb744
UW
5791
5792 for (c = pool->constants[i]; c != NULL; c = c->next)
5793 if (rtx_equal_p (val, c->value))
5794 break;
5795
5796 if (c == NULL)
5797 {
5798 c = (struct constant *) xmalloc (sizeof *c);
5799 c->value = val;
5800 c->label = gen_label_rtx ();
5801 c->next = pool->constants[i];
5802 pool->constants[i] = c;
5803 pool->size += GET_MODE_SIZE (mode);
5804 }
aee4e0db 5805}
b2ccb744 5806
aee4e0db
UW
5807/* Find constant VAL of mode MODE in the constant pool POOL.
5808 Return an RTX describing the distance from the start of
5809 the pool to the location of the new constant. */
c7453384 5810
aee4e0db 5811static rtx
9c808aad
AJ
5812s390_find_constant (struct constant_pool *pool, rtx val,
5813 enum machine_mode mode)
aee4e0db
UW
5814{
5815 struct constant *c;
5816 rtx offset;
5817 int i;
c7453384 5818
aee4e0db
UW
5819 for (i = 0; i < NR_C_MODES; i++)
5820 if (constant_modes[i] == mode)
5821 break;
8d933e31 5822 gcc_assert (i != NR_C_MODES);
c7453384 5823
aee4e0db
UW
5824 for (c = pool->constants[i]; c != NULL; c = c->next)
5825 if (rtx_equal_p (val, c->value))
5826 break;
c7453384 5827
8d933e31 5828 gcc_assert (c);
c7453384 5829
aee4e0db
UW
5830 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
5831 gen_rtx_LABEL_REF (Pmode, pool->label));
b2ccb744
UW
5832 offset = gen_rtx_CONST (Pmode, offset);
5833 return offset;
5834}
5835
ab96de7e
AS
5836/* Check whether INSN is an execute. Return the label_ref to its
5837 execute target template if so, NULL_RTX otherwise. */
5838
5839static rtx
5840s390_execute_label (rtx insn)
5841{
5842 if (GET_CODE (insn) == INSN
5843 && GET_CODE (PATTERN (insn)) == PARALLEL
5844 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == UNSPEC
5845 && XINT (XVECEXP (PATTERN (insn), 0, 0), 1) == UNSPEC_EXECUTE)
5846 return XVECEXP (XVECEXP (PATTERN (insn), 0, 0), 0, 2);
5847
5848 return NULL_RTX;
5849}
5850
9bb86f41
UW
5851/* Add execute target for INSN to the constant pool POOL. */
5852
5853static void
5854s390_add_execute (struct constant_pool *pool, rtx insn)
5855{
5856 struct constant *c;
5857
5858 for (c = pool->execute; c != NULL; c = c->next)
5859 if (INSN_UID (insn) == INSN_UID (c->value))
5860 break;
5861
5862 if (c == NULL)
5863 {
9bb86f41
UW
5864 c = (struct constant *) xmalloc (sizeof *c);
5865 c->value = insn;
d24959df 5866 c->label = gen_label_rtx ();
9bb86f41
UW
5867 c->next = pool->execute;
5868 pool->execute = c;
d24959df 5869 pool->size += 6;
9bb86f41
UW
5870 }
5871}
5872
5873/* Find execute target for INSN in the constant pool POOL.
5874 Return an RTX describing the distance from the start of
5875 the pool to the location of the execute target. */
5876
5877static rtx
5878s390_find_execute (struct constant_pool *pool, rtx insn)
5879{
5880 struct constant *c;
5881 rtx offset;
5882
5883 for (c = pool->execute; c != NULL; c = c->next)
5884 if (INSN_UID (insn) == INSN_UID (c->value))
5885 break;
5886
8d933e31 5887 gcc_assert (c);
9bb86f41
UW
5888
5889 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
5890 gen_rtx_LABEL_REF (Pmode, pool->label));
5891 offset = gen_rtx_CONST (Pmode, offset);
5892 return offset;
5893}
5894
ab96de7e 5895/* For an execute INSN, extract the execute target template. */
9bb86f41
UW
5896
5897static rtx
ab96de7e 5898s390_execute_target (rtx insn)
9bb86f41 5899{
ab96de7e
AS
5900 rtx pattern = PATTERN (insn);
5901 gcc_assert (s390_execute_label (insn));
9bb86f41
UW
5902
5903 if (XVECLEN (pattern, 0) == 2)
5904 {
5905 pattern = copy_rtx (XVECEXP (pattern, 0, 1));
5906 }
5907 else
5908 {
5909 rtvec vec = rtvec_alloc (XVECLEN (pattern, 0) - 1);
5910 int i;
5911
5912 for (i = 0; i < XVECLEN (pattern, 0) - 1; i++)
5913 RTVEC_ELT (vec, i) = copy_rtx (XVECEXP (pattern, 0, i + 1));
5914
5915 pattern = gen_rtx_PARALLEL (VOIDmode, vec);
5916 }
5917
5918 return pattern;
5919}
5920
5921/* Indicate that INSN cannot be duplicated. This is the case for
5922 execute insns that carry a unique label. */
5923
5924static bool
5925s390_cannot_copy_insn_p (rtx insn)
5926{
5927 rtx label = s390_execute_label (insn);
5928 return label && label != const0_rtx;
5929}
5930
5af2f3d3
UW
5931/* Dump out the constants in POOL. If REMOTE_LABEL is true,
5932 do not emit the pool base label. */
b2ccb744 5933
9bb86f41 5934static void
5af2f3d3 5935s390_dump_pool (struct constant_pool *pool, bool remote_label)
b2ccb744
UW
5936{
5937 struct constant *c;
9bb86f41 5938 rtx insn = pool->pool_insn;
b2ccb744
UW
5939 int i;
5940
9bb86f41
UW
5941 /* Switch to rodata section. */
5942 if (TARGET_CPU_ZARCH)
5943 {
5944 insn = emit_insn_after (gen_pool_section_start (), insn);
5945 INSN_ADDRESSES_NEW (insn, -1);
5946 }
5947
5948 /* Ensure minimum pool alignment. */
9e8327e3 5949 if (TARGET_CPU_ZARCH)
9bb86f41 5950 insn = emit_insn_after (gen_pool_align (GEN_INT (8)), insn);
b2ccb744 5951 else
9bb86f41 5952 insn = emit_insn_after (gen_pool_align (GEN_INT (4)), insn);
b2ccb744
UW
5953 INSN_ADDRESSES_NEW (insn, -1);
5954
9bb86f41 5955 /* Emit pool base label. */
5af2f3d3
UW
5956 if (!remote_label)
5957 {
5958 insn = emit_label_after (pool->label, insn);
5959 INSN_ADDRESSES_NEW (insn, -1);
5960 }
b2ccb744
UW
5961
5962 /* Dump constants in descending alignment requirement order,
5963 ensuring proper alignment for every constant. */
5964 for (i = 0; i < NR_C_MODES; i++)
5965 for (c = pool->constants[i]; c; c = c->next)
5966 {
fd7643fb 5967 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
77340500 5968 rtx value = copy_rtx (c->value);
aee4e0db
UW
5969 if (GET_CODE (value) == CONST
5970 && GET_CODE (XEXP (value, 0)) == UNSPEC
fd7643fb 5971 && XINT (XEXP (value, 0), 1) == UNSPEC_LTREL_OFFSET
aee4e0db
UW
5972 && XVECLEN (XEXP (value, 0), 0) == 1)
5973 {
5974 value = gen_rtx_MINUS (Pmode, XVECEXP (XEXP (value, 0), 0, 0),
9c808aad 5975 gen_rtx_LABEL_REF (VOIDmode, pool->label));
aee4e0db
UW
5976 value = gen_rtx_CONST (VOIDmode, value);
5977 }
5978
b2ccb744
UW
5979 insn = emit_label_after (c->label, insn);
5980 INSN_ADDRESSES_NEW (insn, -1);
416cf582 5981
38899e29 5982 value = gen_rtx_UNSPEC_VOLATILE (constant_modes[i],
416cf582
UW
5983 gen_rtvec (1, value),
5984 UNSPECV_POOL_ENTRY);
5985 insn = emit_insn_after (value, insn);
b2ccb744
UW
5986 INSN_ADDRESSES_NEW (insn, -1);
5987 }
5988
9bb86f41
UW
5989 /* Ensure minimum alignment for instructions. */
5990 insn = emit_insn_after (gen_pool_align (GEN_INT (2)), insn);
b2ccb744
UW
5991 INSN_ADDRESSES_NEW (insn, -1);
5992
9bb86f41
UW
5993 /* Output in-pool execute template insns. */
5994 for (c = pool->execute; c; c = c->next)
5995 {
9bb86f41
UW
5996 insn = emit_label_after (c->label, insn);
5997 INSN_ADDRESSES_NEW (insn, -1);
5998
5999 insn = emit_insn_after (s390_execute_target (c->value), insn);
6000 INSN_ADDRESSES_NEW (insn, -1);
6001 }
6002
6003 /* Switch back to previous section. */
6004 if (TARGET_CPU_ZARCH)
6005 {
6006 insn = emit_insn_after (gen_pool_section_end (), insn);
6007 INSN_ADDRESSES_NEW (insn, -1);
6008 }
6009
b2ccb744
UW
6010 insn = emit_barrier_after (insn);
6011 INSN_ADDRESSES_NEW (insn, -1);
6012
aee4e0db
UW
6013 /* Remove placeholder insn. */
6014 remove_insn (pool->pool_insn);
9bb86f41
UW
6015}
6016
b2ccb744
UW
6017/* Free all memory used by POOL. */
6018
6019static void
9c808aad 6020s390_free_pool (struct constant_pool *pool)
b2ccb744 6021{
9bb86f41 6022 struct constant *c, *next;
b2ccb744
UW
6023 int i;
6024
6025 for (i = 0; i < NR_C_MODES; i++)
9bb86f41
UW
6026 for (c = pool->constants[i]; c; c = next)
6027 {
6028 next = c->next;
6029 free (c);
6030 }
6031
6032 for (c = pool->execute; c; c = next)
b2ccb744 6033 {
9bb86f41
UW
6034 next = c->next;
6035 free (c);
b2ccb744
UW
6036 }
6037
7b210806 6038 BITMAP_FREE (pool->insns);
b2ccb744 6039 free (pool);
c7453384 6040}
b2ccb744 6041
b2ccb744 6042
5af2f3d3
UW
6043/* Collect main literal pool. Return NULL on overflow. */
6044
6045static struct constant_pool *
6046s390_mainpool_start (void)
6047{
6048 struct constant_pool *pool;
6049 rtx insn;
6050
6051 pool = s390_alloc_pool ();
6052
6053 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6054 {
6055 if (GET_CODE (insn) == INSN
585539a1
UW
6056 && GET_CODE (PATTERN (insn)) == SET
6057 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC_VOLATILE
6058 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPECV_MAIN_POOL)
5af2f3d3 6059 {
8d933e31 6060 gcc_assert (!pool->pool_insn);
5af2f3d3
UW
6061 pool->pool_insn = insn;
6062 }
6063
d24959df 6064 if (!TARGET_CPU_ZARCH && s390_execute_label (insn))
9bb86f41
UW
6065 {
6066 s390_add_execute (pool, insn);
6067 }
6068 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5af2f3d3
UW
6069 {
6070 rtx pool_ref = NULL_RTX;
6071 find_constant_pool_ref (PATTERN (insn), &pool_ref);
6072 if (pool_ref)
6073 {
6074 rtx constant = get_pool_constant (pool_ref);
6075 enum machine_mode mode = get_pool_mode (pool_ref);
6076 s390_add_constant (pool, constant, mode);
6077 }
6078 }
03870a04
AK
6079
6080 /* If hot/cold partitioning is enabled we have to make sure that
6081 the literal pool is emitted in the same section where the
6082 initialization of the literal pool base pointer takes place.
6083 emit_pool_after is only used in the non-overflow case on non
6084 Z cpus where we can emit the literal pool at the end of the
6085 function body within the text section. */
6086 if (NOTE_P (insn)
b49326f1
AK
6087 && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS
6088 && !pool->emit_pool_after)
6089 pool->emit_pool_after = PREV_INSN (insn);
5af2f3d3
UW
6090 }
6091
8d933e31 6092 gcc_assert (pool->pool_insn || pool->size == 0);
5af2f3d3
UW
6093
6094 if (pool->size >= 4096)
6095 {
d76e8439
UW
6096 /* We're going to chunkify the pool, so remove the main
6097 pool placeholder insn. */
6098 remove_insn (pool->pool_insn);
6099
5af2f3d3
UW
6100 s390_free_pool (pool);
6101 pool = NULL;
6102 }
6103
03870a04
AK
6104 /* If the functions ends with the section where the literal pool
6105 should be emitted set the marker to its end. */
b49326f1 6106 if (pool && !pool->emit_pool_after)
03870a04
AK
6107 pool->emit_pool_after = get_last_insn ();
6108
5af2f3d3
UW
6109 return pool;
6110}
6111
6112/* POOL holds the main literal pool as collected by s390_mainpool_start.
6113 Modify the current function to output the pool constants as well as
585539a1 6114 the pool register setup instruction. */
5af2f3d3
UW
6115
6116static void
585539a1 6117s390_mainpool_finish (struct constant_pool *pool)
5af2f3d3 6118{
91086990 6119 rtx base_reg = cfun->machine->base_reg;
5af2f3d3
UW
6120 rtx insn;
6121
6122 /* If the pool is empty, we're done. */
6123 if (pool->size == 0)
6124 {
91086990
UW
6125 /* We don't actually need a base register after all. */
6126 cfun->machine->base_reg = NULL_RTX;
6127
6128 if (pool->pool_insn)
6129 remove_insn (pool->pool_insn);
5af2f3d3
UW
6130 s390_free_pool (pool);
6131 return;
6132 }
6133
6134 /* We need correct insn addresses. */
6135 shorten_branches (get_insns ());
6136
9e8327e3 6137 /* On zSeries, we use a LARL to load the pool register. The pool is
5af2f3d3 6138 located in the .rodata section, so we emit it after the function. */
9e8327e3 6139 if (TARGET_CPU_ZARCH)
5af2f3d3
UW
6140 {
6141 insn = gen_main_base_64 (base_reg, pool->label);
6142 insn = emit_insn_after (insn, pool->pool_insn);
6143 INSN_ADDRESSES_NEW (insn, -1);
6144 remove_insn (pool->pool_insn);
38899e29
EC
6145
6146 insn = get_last_insn ();
5af2f3d3
UW
6147 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
6148 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
6149
6150 s390_dump_pool (pool, 0);
6151 }
6152
9e8327e3 6153 /* On S/390, if the total size of the function's code plus literal pool
5af2f3d3
UW
6154 does not exceed 4096 bytes, we use BASR to set up a function base
6155 pointer, and emit the literal pool at the end of the function. */
03870a04 6156 else if (INSN_ADDRESSES (INSN_UID (pool->emit_pool_after))
5af2f3d3
UW
6157 + pool->size + 8 /* alignment slop */ < 4096)
6158 {
6159 insn = gen_main_base_31_small (base_reg, pool->label);
6160 insn = emit_insn_after (insn, pool->pool_insn);
6161 INSN_ADDRESSES_NEW (insn, -1);
6162 remove_insn (pool->pool_insn);
6163
6164 insn = emit_label_after (pool->label, insn);
6165 INSN_ADDRESSES_NEW (insn, -1);
6166
03870a04
AK
6167 /* emit_pool_after will be set by s390_mainpool_start to the
6168 last insn of the section where the literal pool should be
6169 emitted. */
6170 insn = pool->emit_pool_after;
6171
5af2f3d3
UW
6172 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
6173 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
6174
6175 s390_dump_pool (pool, 1);
6176 }
6177
6178 /* Otherwise, we emit an inline literal pool and use BASR to branch
6179 over it, setting up the pool register at the same time. */
6180 else
6181 {
6182 rtx pool_end = gen_label_rtx ();
6183
6184 insn = gen_main_base_31_large (base_reg, pool->label, pool_end);
6185 insn = emit_insn_after (insn, pool->pool_insn);
6186 INSN_ADDRESSES_NEW (insn, -1);
6187 remove_insn (pool->pool_insn);
6188
6189 insn = emit_label_after (pool->label, insn);
6190 INSN_ADDRESSES_NEW (insn, -1);
6191
6192 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
6193 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
6194
6195 insn = emit_label_after (pool_end, pool->pool_insn);
6196 INSN_ADDRESSES_NEW (insn, -1);
6197
6198 s390_dump_pool (pool, 1);
6199 }
6200
6201
6202 /* Replace all literal pool references. */
6203
6204 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6205 {
6206 if (INSN_P (insn))
585539a1 6207 replace_ltrel_base (&PATTERN (insn));
5af2f3d3
UW
6208
6209 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
6210 {
6211 rtx addr, pool_ref = NULL_RTX;
6212 find_constant_pool_ref (PATTERN (insn), &pool_ref);
6213 if (pool_ref)
6214 {
9bb86f41
UW
6215 if (s390_execute_label (insn))
6216 addr = s390_find_execute (pool, insn);
6217 else
6218 addr = s390_find_constant (pool, get_pool_constant (pool_ref),
6219 get_pool_mode (pool_ref));
6220
5af2f3d3
UW
6221 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
6222 INSN_CODE (insn) = -1;
6223 }
6224 }
6225 }
6226
6227
6228 /* Free the pool. */
6229 s390_free_pool (pool);
6230}
6231
6232/* POOL holds the main literal pool as collected by s390_mainpool_start.
6233 We have decided we cannot use this pool, so revert all changes
6234 to the current function that were done by s390_mainpool_start. */
6235static void
6236s390_mainpool_cancel (struct constant_pool *pool)
6237{
6238 /* We didn't actually change the instruction stream, so simply
6239 free the pool memory. */
6240 s390_free_pool (pool);
6241}
6242
6243
585539a1 6244/* Chunkify the literal pool. */
9db1d521 6245
b2ccb744
UW
6246#define S390_POOL_CHUNK_MIN 0xc00
6247#define S390_POOL_CHUNK_MAX 0xe00
6248
c7453384 6249static struct constant_pool *
585539a1 6250s390_chunkify_start (void)
9db1d521 6251{
b2ccb744
UW
6252 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
6253 int extra_size = 0;
6254 bitmap far_labels;
fd7643fb 6255 rtx pending_ltrel = NULL_RTX;
13e58269 6256 rtx insn;
9db1d521 6257
9c808aad 6258 rtx (*gen_reload_base) (rtx, rtx) =
9e8327e3 6259 TARGET_CPU_ZARCH? gen_reload_base_64 : gen_reload_base_31;
aee4e0db
UW
6260
6261
c3cc6b78
UW
6262 /* We need correct insn addresses. */
6263
6264 shorten_branches (get_insns ());
6265
fd7643fb 6266 /* Scan all insns and move literals to pool chunks. */
13e58269 6267
13e58269 6268 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9db1d521 6269 {
03870a04
AK
6270 bool section_switch_p = false;
6271
fd7643fb
UW
6272 /* Check for pending LTREL_BASE. */
6273 if (INSN_P (insn))
6274 {
6275 rtx ltrel_base = find_ltrel_base (PATTERN (insn));
6276 if (ltrel_base)
6277 {
8d933e31
AS
6278 gcc_assert (ltrel_base == pending_ltrel);
6279 pending_ltrel = NULL_RTX;
fd7643fb
UW
6280 }
6281 }
6282
d24959df 6283 if (!TARGET_CPU_ZARCH && s390_execute_label (insn))
9bb86f41
UW
6284 {
6285 if (!curr_pool)
6286 curr_pool = s390_start_pool (&pool_list, insn);
6287
6288 s390_add_execute (curr_pool, insn);
6289 s390_add_pool_insn (curr_pool, insn);
6290 }
6291 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
b2ccb744 6292 {
aee4e0db 6293 rtx pool_ref = NULL_RTX;
b2ccb744
UW
6294 find_constant_pool_ref (PATTERN (insn), &pool_ref);
6295 if (pool_ref)
6296 {
fd7643fb
UW
6297 rtx constant = get_pool_constant (pool_ref);
6298 enum machine_mode mode = get_pool_mode (pool_ref);
6299
b2ccb744
UW
6300 if (!curr_pool)
6301 curr_pool = s390_start_pool (&pool_list, insn);
6302
fd7643fb 6303 s390_add_constant (curr_pool, constant, mode);
aee4e0db 6304 s390_add_pool_insn (curr_pool, insn);
aee4e0db 6305
fd7643fb
UW
6306 /* Don't split the pool chunk between a LTREL_OFFSET load
6307 and the corresponding LTREL_BASE. */
6308 if (GET_CODE (constant) == CONST
6309 && GET_CODE (XEXP (constant, 0)) == UNSPEC
6310 && XINT (XEXP (constant, 0), 1) == UNSPEC_LTREL_OFFSET)
6311 {
8d933e31 6312 gcc_assert (!pending_ltrel);
fd7643fb
UW
6313 pending_ltrel = pool_ref;
6314 }
b2ccb744
UW
6315 }
6316 }
6317
aee4e0db 6318 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
fd7643fb
UW
6319 {
6320 if (curr_pool)
6321 s390_add_pool_insn (curr_pool, insn);
6322 /* An LTREL_BASE must follow within the same basic block. */
8d933e31 6323 gcc_assert (!pending_ltrel);
fd7643fb 6324 }
aee4e0db 6325
03870a04
AK
6326 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
6327 section_switch_p = true;
6328
c7453384 6329 if (!curr_pool
b2ccb744
UW
6330 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
6331 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
9db1d521 6332 continue;
13e58269 6333
9e8327e3 6334 if (TARGET_CPU_ZARCH)
9db1d521 6335 {
b2ccb744
UW
6336 if (curr_pool->size < S390_POOL_CHUNK_MAX)
6337 continue;
13e58269 6338
aee4e0db 6339 s390_end_pool (curr_pool, NULL_RTX);
b2ccb744
UW
6340 curr_pool = NULL;
6341 }
6342 else
9db1d521 6343 {
b2ccb744 6344 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
9c808aad 6345 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
b2ccb744
UW
6346 + extra_size;
6347
6348 /* We will later have to insert base register reload insns.
6349 Those will have an effect on code size, which we need to
6350 consider here. This calculation makes rather pessimistic
6351 worst-case assumptions. */
aee4e0db 6352 if (GET_CODE (insn) == CODE_LABEL)
b2ccb744 6353 extra_size += 6;
b2ccb744
UW
6354
6355 if (chunk_size < S390_POOL_CHUNK_MIN
03870a04
AK
6356 && curr_pool->size < S390_POOL_CHUNK_MIN
6357 && !section_switch_p)
b2ccb744
UW
6358 continue;
6359
6360 /* Pool chunks can only be inserted after BARRIERs ... */
6361 if (GET_CODE (insn) == BARRIER)
6362 {
6363 s390_end_pool (curr_pool, insn);
6364 curr_pool = NULL;
6365 extra_size = 0;
6366 }
6367
6368 /* ... so if we don't find one in time, create one. */
03870a04
AK
6369 else if (chunk_size > S390_POOL_CHUNK_MAX
6370 || curr_pool->size > S390_POOL_CHUNK_MAX
6371 || section_switch_p)
b2ccb744 6372 {
b2ccb744
UW
6373 rtx label, jump, barrier;
6374
03870a04
AK
6375 if (!section_switch_p)
6376 {
6377 /* We can insert the barrier only after a 'real' insn. */
6378 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
6379 continue;
6380 if (get_attr_length (insn) == 0)
6381 continue;
6382 /* Don't separate LTREL_BASE from the corresponding
fd7643fb 6383 LTREL_OFFSET load. */
03870a04
AK
6384 if (pending_ltrel)
6385 continue;
6386 }
6387 else
6388 {
6389 gcc_assert (!pending_ltrel);
6390
6391 /* The old pool has to end before the section switch
6392 note in order to make it part of the current
6393 section. */
6394 insn = PREV_INSN (insn);
6395 }
aee4e0db 6396
9c808aad 6397 label = gen_label_rtx ();
b2ccb744
UW
6398 jump = emit_jump_insn_after (gen_jump (label), insn);
6399 barrier = emit_barrier_after (jump);
6400 insn = emit_label_after (label, barrier);
6401 JUMP_LABEL (jump) = label;
6402 LABEL_NUSES (label) = 1;
6403
aee4e0db
UW
6404 INSN_ADDRESSES_NEW (jump, -1);
6405 INSN_ADDRESSES_NEW (barrier, -1);
b2ccb744
UW
6406 INSN_ADDRESSES_NEW (insn, -1);
6407
6408 s390_end_pool (curr_pool, barrier);
6409 curr_pool = NULL;
6410 extra_size = 0;
6411 }
13e58269 6412 }
9db1d521 6413 }
ce50cae8 6414
aee4e0db
UW
6415 if (curr_pool)
6416 s390_end_pool (curr_pool, NULL_RTX);
8d933e31 6417 gcc_assert (!pending_ltrel);
b2ccb744 6418
c7453384 6419 /* Find all labels that are branched into
13e58269 6420 from an insn belonging to a different chunk. */
ce50cae8 6421
7b210806 6422 far_labels = BITMAP_ALLOC (NULL);
6bc627b3 6423
13e58269 6424 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9db1d521 6425 {
b2ccb744
UW
6426 /* Labels marked with LABEL_PRESERVE_P can be target
6427 of non-local jumps, so we have to mark them.
6428 The same holds for named labels.
6429
6430 Don't do that, however, if it is the label before
6431 a jump table. */
6432
c7453384 6433 if (GET_CODE (insn) == CODE_LABEL
b2ccb744
UW
6434 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
6435 {
6436 rtx vec_insn = next_real_insn (insn);
c7453384 6437 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
b2ccb744
UW
6438 PATTERN (vec_insn) : NULL_RTX;
6439 if (!vec_pat
6440 || !(GET_CODE (vec_pat) == ADDR_VEC
6441 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
6442 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
6443 }
6444
6445 /* If we have a direct jump (conditional or unconditional)
6446 or a casesi jump, check all potential targets. */
c7453384 6447 else if (GET_CODE (insn) == JUMP_INSN)
13e58269
UW
6448 {
6449 rtx pat = PATTERN (insn);
0a3bdf9d
UW
6450 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
6451 pat = XVECEXP (pat, 0, 0);
6452
c7453384 6453 if (GET_CODE (pat) == SET)
13e58269 6454 {
aee4e0db 6455 rtx label = JUMP_LABEL (insn);
13e58269
UW
6456 if (label)
6457 {
c7453384 6458 if (s390_find_pool (pool_list, label)
b2ccb744
UW
6459 != s390_find_pool (pool_list, insn))
6460 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
13e58269 6461 }
c7453384 6462 }
b2ccb744
UW
6463 else if (GET_CODE (pat) == PARALLEL
6464 && XVECLEN (pat, 0) == 2
6465 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
6466 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
6467 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
6468 {
6469 /* Find the jump table used by this casesi jump. */
6470 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
6471 rtx vec_insn = next_real_insn (vec_label);
c7453384 6472 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
b2ccb744
UW
6473 PATTERN (vec_insn) : NULL_RTX;
6474 if (vec_pat
6475 && (GET_CODE (vec_pat) == ADDR_VEC
6476 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
6477 {
6478 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
13e58269 6479
b2ccb744
UW
6480 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
6481 {
6482 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
13e58269 6483
c7453384 6484 if (s390_find_pool (pool_list, label)
b2ccb744
UW
6485 != s390_find_pool (pool_list, insn))
6486 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
6487 }
6488 }
6489 }
13e58269 6490 }
9db1d521 6491 }
ce50cae8 6492
b2ccb744
UW
6493 /* Insert base register reload insns before every pool. */
6494
6495 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
aee4e0db 6496 {
585539a1
UW
6497 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
6498 curr_pool->label);
aee4e0db
UW
6499 rtx insn = curr_pool->first_insn;
6500 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
6501 }
b2ccb744
UW
6502
6503 /* Insert base register reload insns at every far label. */
13e58269 6504
13e58269 6505 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
c7453384 6506 if (GET_CODE (insn) == CODE_LABEL
b2ccb744
UW
6507 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
6508 {
6509 struct constant_pool *pool = s390_find_pool (pool_list, insn);
6510 if (pool)
6511 {
585539a1
UW
6512 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
6513 pool->label);
aee4e0db 6514 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
b2ccb744
UW
6515 }
6516 }
6517
aee4e0db 6518
7b210806 6519 BITMAP_FREE (far_labels);
13e58269 6520
13e58269
UW
6521
6522 /* Recompute insn addresses. */
6523
6524 init_insn_lengths ();
6525 shorten_branches (get_insns ());
9db1d521 6526
aee4e0db
UW
6527 return pool_list;
6528}
9db1d521 6529
aee4e0db 6530/* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
c7453384 6531 After we have decided to use this list, finish implementing
585539a1 6532 all changes to the current function as required. */
c7453384 6533
aee4e0db 6534static void
585539a1 6535s390_chunkify_finish (struct constant_pool *pool_list)
aee4e0db 6536{
aee4e0db
UW
6537 struct constant_pool *curr_pool = NULL;
6538 rtx insn;
c7453384
EC
6539
6540
aee4e0db
UW
6541 /* Replace all literal pool references. */
6542
c7453384 6543 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
aee4e0db 6544 {
fd7643fb 6545 if (INSN_P (insn))
585539a1 6546 replace_ltrel_base (&PATTERN (insn));
fd7643fb 6547
aee4e0db
UW
6548 curr_pool = s390_find_pool (pool_list, insn);
6549 if (!curr_pool)
6550 continue;
6551
6552 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
6553 {
6554 rtx addr, pool_ref = NULL_RTX;
6555 find_constant_pool_ref (PATTERN (insn), &pool_ref);
6556 if (pool_ref)
6557 {
9bb86f41
UW
6558 if (s390_execute_label (insn))
6559 addr = s390_find_execute (curr_pool, insn);
6560 else
6561 addr = s390_find_constant (curr_pool,
6562 get_pool_constant (pool_ref),
6563 get_pool_mode (pool_ref));
6564
aee4e0db
UW
6565 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
6566 INSN_CODE (insn) = -1;
6567 }
aee4e0db
UW
6568 }
6569 }
6570
6571 /* Dump out all literal pools. */
c7453384 6572
aee4e0db 6573 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
5af2f3d3 6574 s390_dump_pool (curr_pool, 0);
c7453384 6575
aee4e0db
UW
6576 /* Free pool list. */
6577
6578 while (pool_list)
6579 {
6580 struct constant_pool *next = pool_list->next;
6581 s390_free_pool (pool_list);
6582 pool_list = next;
6583 }
6584}
6585
6586/* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
6587 We have decided we cannot use this list, so revert all changes
6588 to the current function that were done by s390_chunkify_start. */
c7453384 6589
aee4e0db 6590static void
9c808aad 6591s390_chunkify_cancel (struct constant_pool *pool_list)
aee4e0db
UW
6592{
6593 struct constant_pool *curr_pool = NULL;
6594 rtx insn;
6595
6596 /* Remove all pool placeholder insns. */
6597
6598 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
6599 {
6600 /* Did we insert an extra barrier? Remove it. */
6601 rtx barrier = PREV_INSN (curr_pool->pool_insn);
6602 rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
6603 rtx label = NEXT_INSN (curr_pool->pool_insn);
6604
6605 if (jump && GET_CODE (jump) == JUMP_INSN
6606 && barrier && GET_CODE (barrier) == BARRIER
6607 && label && GET_CODE (label) == CODE_LABEL
6608 && GET_CODE (PATTERN (jump)) == SET
6609 && SET_DEST (PATTERN (jump)) == pc_rtx
6610 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
6611 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
6612 {
6613 remove_insn (jump);
6614 remove_insn (barrier);
6615 remove_insn (label);
b2ccb744 6616 }
9db1d521 6617
aee4e0db
UW
6618 remove_insn (curr_pool->pool_insn);
6619 }
6620
fd7643fb 6621 /* Remove all base register reload insns. */
aee4e0db
UW
6622
6623 for (insn = get_insns (); insn; )
6624 {
6625 rtx next_insn = NEXT_INSN (insn);
6626
6627 if (GET_CODE (insn) == INSN
6628 && GET_CODE (PATTERN (insn)) == SET
6629 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
fd7643fb 6630 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
aee4e0db 6631 remove_insn (insn);
9db1d521 6632
aee4e0db
UW
6633 insn = next_insn;
6634 }
6635
6636 /* Free pool list. */
9db1d521 6637
b2ccb744 6638 while (pool_list)
9db1d521 6639 {
b2ccb744
UW
6640 struct constant_pool *next = pool_list->next;
6641 s390_free_pool (pool_list);
6642 pool_list = next;
9db1d521 6643 }
9db1d521
HP
6644}
6645
b2ccb744 6646
faeb9bb6 6647/* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
416cf582
UW
6648
6649void
faeb9bb6 6650s390_output_pool_entry (rtx exp, enum machine_mode mode, unsigned int align)
416cf582
UW
6651{
6652 REAL_VALUE_TYPE r;
6653
6654 switch (GET_MODE_CLASS (mode))
6655 {
6656 case MODE_FLOAT:
4dc19cc0 6657 case MODE_DECIMAL_FLOAT:
8d933e31 6658 gcc_assert (GET_CODE (exp) == CONST_DOUBLE);
416cf582
UW
6659
6660 REAL_VALUE_FROM_CONST_DOUBLE (r, exp);
6661 assemble_real (r, mode, align);
6662 break;
6663
6664 case MODE_INT:
faeb9bb6 6665 assemble_integer (exp, GET_MODE_SIZE (mode), align, 1);
416cf582
UW
6666 break;
6667
6668 default:
8d933e31 6669 gcc_unreachable ();
416cf582
UW
6670 }
6671}
6672
6673
ab96de7e
AS
6674/* Return an RTL expression representing the value of the return address
6675 for the frame COUNT steps up from the current frame. FRAME is the
6676 frame pointer of that frame. */
b2ccb744 6677
ab96de7e
AS
6678rtx
6679s390_return_addr_rtx (int count, rtx frame ATTRIBUTE_UNUSED)
b2ccb744 6680{
ab96de7e
AS
6681 int offset;
6682 rtx addr;
aee4e0db 6683
ab96de7e 6684 /* Without backchain, we fail for all but the current frame. */
c3cc6b78 6685
ab96de7e
AS
6686 if (!TARGET_BACKCHAIN && count > 0)
6687 return NULL_RTX;
c3cc6b78 6688
ab96de7e
AS
6689 /* For the current frame, we need to make sure the initial
6690 value of RETURN_REGNUM is actually saved. */
c3cc6b78 6691
ab96de7e 6692 if (count == 0)
c3cc6b78 6693 {
7bcebb25
AK
6694 /* On non-z architectures branch splitting could overwrite r14. */
6695 if (TARGET_CPU_ZARCH)
6696 return get_hard_reg_initial_val (Pmode, RETURN_REGNUM);
6697 else
6698 {
6699 cfun_frame_layout.save_return_addr_p = true;
6700 return gen_rtx_MEM (Pmode, return_address_pointer_rtx);
6701 }
ab96de7e 6702 }
c3cc6b78 6703
ab96de7e
AS
6704 if (TARGET_PACKED_STACK)
6705 offset = -2 * UNITS_PER_WORD;
6706 else
6707 offset = RETURN_REGNUM * UNITS_PER_WORD;
c3cc6b78 6708
ab96de7e
AS
6709 addr = plus_constant (frame, offset);
6710 addr = memory_address (Pmode, addr);
6711 return gen_rtx_MEM (Pmode, addr);
6712}
c3cc6b78 6713
ab96de7e
AS
6714/* Return an RTL expression representing the back chain stored in
6715 the current stack frame. */
545d16ff 6716
ab96de7e
AS
6717rtx
6718s390_back_chain_rtx (void)
6719{
6720 rtx chain;
545d16ff 6721
ab96de7e 6722 gcc_assert (TARGET_BACKCHAIN);
545d16ff 6723
ab96de7e
AS
6724 if (TARGET_PACKED_STACK)
6725 chain = plus_constant (stack_pointer_rtx,
6726 STACK_POINTER_OFFSET - UNITS_PER_WORD);
6727 else
6728 chain = stack_pointer_rtx;
545d16ff 6729
ab96de7e
AS
6730 chain = gen_rtx_MEM (Pmode, chain);
6731 return chain;
6732}
c3cc6b78 6733
ab96de7e
AS
6734/* Find first call clobbered register unused in a function.
6735 This could be used as base register in a leaf function
6736 or for holding the return address before epilogue. */
c3cc6b78 6737
ab96de7e
AS
6738static int
6739find_unused_clobbered_reg (void)
6740{
6741 int i;
6742 for (i = 0; i < 6; i++)
6fb5fa3c 6743 if (!df_regs_ever_live_p (i))
ab96de7e
AS
6744 return i;
6745 return 0;
6746}
c3cc6b78 6747
7bcebb25
AK
6748
6749/* Helper function for s390_regs_ever_clobbered. Sets the fields in DATA for all
6750 clobbered hard regs in SETREG. */
6751
6752static void
7bc980e1 6753s390_reg_clobbered_rtx (rtx setreg, const_rtx set_insn ATTRIBUTE_UNUSED, void *data)
7bcebb25
AK
6754{
6755 int *regs_ever_clobbered = (int *)data;
6756 unsigned int i, regno;
6757 enum machine_mode mode = GET_MODE (setreg);
6758
6759 if (GET_CODE (setreg) == SUBREG)
6760 {
6761 rtx inner = SUBREG_REG (setreg);
6762 if (!GENERAL_REG_P (inner))
6763 return;
6764 regno = subreg_regno (setreg);
6765 }
6766 else if (GENERAL_REG_P (setreg))
6767 regno = REGNO (setreg);
6768 else
6769 return;
6770
6771 for (i = regno;
6772 i < regno + HARD_REGNO_NREGS (regno, mode);
6773 i++)
6774 regs_ever_clobbered[i] = 1;
6775}
6776
6777/* Walks through all basic blocks of the current function looking
6778 for clobbered hard regs using s390_reg_clobbered_rtx. The fields
6779 of the passed integer array REGS_EVER_CLOBBERED are set to one for
6780 each of those regs. */
6781
6782static void
6783s390_regs_ever_clobbered (int *regs_ever_clobbered)
6784{
6785 basic_block cur_bb;
6786 rtx cur_insn;
6787 unsigned int i;
6788
6789 memset (regs_ever_clobbered, 0, 16 * sizeof (int));
6790
6791 /* For non-leaf functions we have to consider all call clobbered regs to be
6792 clobbered. */
6793 if (!current_function_is_leaf)
6794 {
6795 for (i = 0; i < 16; i++)
6796 regs_ever_clobbered[i] = call_really_used_regs[i];
6797 }
6798
6799 /* Make the "magic" eh_return registers live if necessary. For regs_ever_live
6800 this work is done by liveness analysis (mark_regs_live_at_end).
6801 Special care is needed for functions containing landing pads. Landing pads
6802 may use the eh registers, but the code which sets these registers is not
6803 contained in that function. Hence s390_regs_ever_clobbered is not able to
6804 deal with this automatically. */
e3b5732b 6805 if (crtl->calls_eh_return || cfun->machine->has_landing_pad_p)
7bcebb25 6806 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM ; i++)
e3b5732b 6807 if (crtl->calls_eh_return
297a777d 6808 || (cfun->machine->has_landing_pad_p
6fb5fa3c 6809 && df_regs_ever_live_p (EH_RETURN_DATA_REGNO (i))))
297a777d 6810 regs_ever_clobbered[EH_RETURN_DATA_REGNO (i)] = 1;
7bcebb25
AK
6811
6812 /* For nonlocal gotos all call-saved registers have to be saved.
6813 This flag is also set for the unwinding code in libgcc.
6814 See expand_builtin_unwind_init. For regs_ever_live this is done by
6815 reload. */
e3b5732b 6816 if (cfun->has_nonlocal_label)
7bcebb25
AK
6817 for (i = 0; i < 16; i++)
6818 if (!call_really_used_regs[i])
6819 regs_ever_clobbered[i] = 1;
6820
6821 FOR_EACH_BB (cur_bb)
6822 {
6823 FOR_BB_INSNS (cur_bb, cur_insn)
6824 {
6825 if (INSN_P (cur_insn))
6826 note_stores (PATTERN (cur_insn),
6827 s390_reg_clobbered_rtx,
6828 regs_ever_clobbered);
6829 }
6830 }
6831}
6832
ab96de7e
AS
6833/* Determine the frame area which actually has to be accessed
6834 in the function epilogue. The values are stored at the
6835 given pointers AREA_BOTTOM (address of the lowest used stack
6836 address) and AREA_TOP (address of the first item which does
6837 not belong to the stack frame). */
545d16ff 6838
ab96de7e
AS
6839static void
6840s390_frame_area (int *area_bottom, int *area_top)
6841{
6842 int b, t;
6843 int i;
545d16ff 6844
ab96de7e
AS
6845 b = INT_MAX;
6846 t = INT_MIN;
adf39f8f
AK
6847
6848 if (cfun_frame_layout.first_restore_gpr != -1)
6849 {
6850 b = (cfun_frame_layout.gprs_offset
6851 + cfun_frame_layout.first_restore_gpr * UNITS_PER_WORD);
6852 t = b + (cfun_frame_layout.last_restore_gpr
6853 - cfun_frame_layout.first_restore_gpr + 1) * UNITS_PER_WORD;
6854 }
6855
6856 if (TARGET_64BIT && cfun_save_high_fprs_p)
6857 {
6858 b = MIN (b, cfun_frame_layout.f8_offset);
6859 t = MAX (t, (cfun_frame_layout.f8_offset
6860 + cfun_frame_layout.high_fprs * 8));
6861 }
6862
6863 if (!TARGET_64BIT)
6864 for (i = 2; i < 4; i++)
6865 if (cfun_fpr_bit_p (i))
6866 {
6867 b = MIN (b, cfun_frame_layout.f4_offset + (i - 2) * 8);
6868 t = MAX (t, cfun_frame_layout.f4_offset + (i - 1) * 8);
6869 }
6870
6871 *area_bottom = b;
6872 *area_top = t;
6873}
6874
91086990 6875/* Fill cfun->machine with info about register usage of current function.
7bcebb25 6876 Return in CLOBBERED_REGS which GPRs are currently considered set. */
4023fb28
UW
6877
6878static void
7bcebb25 6879s390_register_info (int clobbered_regs[])
4023fb28
UW
6880{
6881 int i, j;
4023fb28 6882
adf39f8f
AK
6883 /* fprs 8 - 15 are call saved for 64 Bit ABI. */
6884 cfun_frame_layout.fpr_bitmap = 0;
6885 cfun_frame_layout.high_fprs = 0;
c3cc6b78 6886 if (TARGET_64BIT)
c7453384 6887 for (i = 24; i < 32; i++)
6fb5fa3c 6888 if (df_regs_ever_live_p (i) && !global_regs[i])
c3cc6b78 6889 {
adf39f8f
AK
6890 cfun_set_fpr_bit (i - 16);
6891 cfun_frame_layout.high_fprs++;
c3cc6b78 6892 }
4023fb28 6893
b767fc11
UW
6894 /* Find first and last gpr to be saved. We trust regs_ever_live
6895 data, except that we don't save and restore global registers.
545d16ff 6896
b767fc11
UW
6897 Also, all registers with special meaning to the compiler need
6898 to be handled extra. */
545d16ff 6899
7bcebb25
AK
6900 s390_regs_ever_clobbered (clobbered_regs);
6901
b767fc11 6902 for (i = 0; i < 16; i++)
e2df5c1d 6903 clobbered_regs[i] = clobbered_regs[i] && !global_regs[i] && !fixed_regs[i];
7bcebb25
AK
6904
6905 if (frame_pointer_needed)
6906 clobbered_regs[HARD_FRAME_POINTER_REGNUM] = 1;
c3cc6b78 6907
b767fc11 6908 if (flag_pic)
7bcebb25 6909 clobbered_regs[PIC_OFFSET_TABLE_REGNUM]
6fb5fa3c 6910 |= df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM);
91086990 6911
7bcebb25 6912 clobbered_regs[BASE_REGNUM]
e2df5c1d
UW
6913 |= (cfun->machine->base_reg
6914 && REGNO (cfun->machine->base_reg) == BASE_REGNUM);
91086990 6915
7bcebb25 6916 clobbered_regs[RETURN_REGNUM]
e2df5c1d 6917 |= (!current_function_is_leaf
dc4477f5 6918 || TARGET_TPF_PROFILING
e2df5c1d
UW
6919 || cfun->machine->split_branches_pending_p
6920 || cfun_frame_layout.save_return_addr_p
e3b5732b
JH
6921 || crtl->calls_eh_return
6922 || cfun->stdarg);
91086990 6923
7bcebb25 6924 clobbered_regs[STACK_POINTER_REGNUM]
e2df5c1d
UW
6925 |= (!current_function_is_leaf
6926 || TARGET_TPF_PROFILING
6927 || cfun_save_high_fprs_p
6928 || get_frame_size () > 0
e3b5732b
JH
6929 || cfun->calls_alloca
6930 || cfun->stdarg);
7bcebb25 6931
b767fc11 6932 for (i = 6; i < 16; i++)
6fb5fa3c 6933 if (df_regs_ever_live_p (i) || clobbered_regs[i])
b767fc11 6934 break;
4023fb28 6935 for (j = 15; j > i; j--)
6fb5fa3c 6936 if (df_regs_ever_live_p (j) || clobbered_regs[j])
b767fc11 6937 break;
c3cc6b78 6938
b767fc11
UW
6939 if (i == 16)
6940 {
6941 /* Nothing to save/restore. */
fb3712f6
AK
6942 cfun_frame_layout.first_save_gpr_slot = -1;
6943 cfun_frame_layout.last_save_gpr_slot = -1;
adf39f8f
AK
6944 cfun_frame_layout.first_save_gpr = -1;
6945 cfun_frame_layout.first_restore_gpr = -1;
6946 cfun_frame_layout.last_save_gpr = -1;
6947 cfun_frame_layout.last_restore_gpr = -1;
b767fc11
UW
6948 }
6949 else
6950 {
fb3712f6
AK
6951 /* Save slots for gprs from i to j. */
6952 cfun_frame_layout.first_save_gpr_slot = i;
6953 cfun_frame_layout.last_save_gpr_slot = j;
6954
6955 for (i = cfun_frame_layout.first_save_gpr_slot;
6956 i < cfun_frame_layout.last_save_gpr_slot + 1;
6957 i++)
6958 if (clobbered_regs[i])
6959 break;
6960
6961 for (j = cfun_frame_layout.last_save_gpr_slot; j > i; j--)
6962 if (clobbered_regs[j])
6963 break;
6964
6965 if (i == cfun_frame_layout.last_save_gpr_slot + 1)
6966 {
6967 /* Nothing to save/restore. */
6968 cfun_frame_layout.first_save_gpr = -1;
6969 cfun_frame_layout.first_restore_gpr = -1;
6970 cfun_frame_layout.last_save_gpr = -1;
6971 cfun_frame_layout.last_restore_gpr = -1;
6972 }
6973 else
6974 {
6975 /* Save / Restore from gpr i to j. */
6976 cfun_frame_layout.first_save_gpr = i;
6977 cfun_frame_layout.first_restore_gpr = i;
6978 cfun_frame_layout.last_save_gpr = j;
6979 cfun_frame_layout.last_restore_gpr = j;
6980 }
b767fc11 6981 }
c3cc6b78 6982
e3b5732b 6983 if (cfun->stdarg)
b767fc11 6984 {
adf39f8f 6985 /* Varargs functions need to save gprs 2 to 6. */
29a79fcf 6986 if (cfun->va_list_gpr_size
38173d38 6987 && crtl->args.info.gprs < GP_ARG_NUM_REG)
29a79fcf 6988 {
38173d38 6989 int min_gpr = crtl->args.info.gprs;
29a79fcf
UW
6990 int max_gpr = min_gpr + cfun->va_list_gpr_size;
6991 if (max_gpr > GP_ARG_NUM_REG)
6992 max_gpr = GP_ARG_NUM_REG;
6993
6994 if (cfun_frame_layout.first_save_gpr == -1
6995 || cfun_frame_layout.first_save_gpr > 2 + min_gpr)
fb3712f6
AK
6996 {
6997 cfun_frame_layout.first_save_gpr = 2 + min_gpr;
6998 cfun_frame_layout.first_save_gpr_slot = 2 + min_gpr;
6999 }
29a79fcf
UW
7000
7001 if (cfun_frame_layout.last_save_gpr == -1
7002 || cfun_frame_layout.last_save_gpr < 2 + max_gpr - 1)
fb3712f6
AK
7003 {
7004 cfun_frame_layout.last_save_gpr = 2 + max_gpr - 1;
7005 cfun_frame_layout.last_save_gpr_slot = 2 + max_gpr - 1;
7006 }
29a79fcf 7007 }
b767fc11 7008
adf39f8f 7009 /* Mark f0, f2 for 31 bit and f0-f4 for 64 bit to be saved. */
29a79fcf 7010 if (TARGET_HARD_FLOAT && cfun->va_list_fpr_size
38173d38 7011 && crtl->args.info.fprs < FP_ARG_NUM_REG)
29a79fcf 7012 {
38173d38 7013 int min_fpr = crtl->args.info.fprs;
29a79fcf
UW
7014 int max_fpr = min_fpr + cfun->va_list_fpr_size;
7015 if (max_fpr > FP_ARG_NUM_REG)
7016 max_fpr = FP_ARG_NUM_REG;
7017
7018 /* ??? This is currently required to ensure proper location
7019 of the fpr save slots within the va_list save area. */
7020 if (TARGET_PACKED_STACK)
7021 min_fpr = 0;
7022
7023 for (i = min_fpr; i < max_fpr; i++)
7024 cfun_set_fpr_bit (i);
7025 }
adf39f8f
AK
7026 }
7027
7028 if (!TARGET_64BIT)
7029 for (i = 2; i < 4; i++)
6fb5fa3c 7030 if (df_regs_ever_live_p (i + 16) && !global_regs[i + 16])
adf39f8f
AK
7031 cfun_set_fpr_bit (i);
7032}
7033
91086990 7034/* Fill cfun->machine with info about frame of current function. */
adf39f8f
AK
7035
7036static void
91086990 7037s390_frame_info (void)
adf39f8f
AK
7038{
7039 int i;
7040
7041 cfun_frame_layout.frame_size = get_frame_size ();
adf39f8f 7042 if (!TARGET_64BIT && cfun_frame_layout.frame_size > 0x7fff0000)
c85ce869 7043 fatal_error ("total size of local variables exceeds architecture limit");
adf39f8f 7044
b3d31392 7045 if (!TARGET_PACKED_STACK)
adf39f8f
AK
7046 {
7047 cfun_frame_layout.backchain_offset = 0;
7048 cfun_frame_layout.f0_offset = 16 * UNITS_PER_WORD;
7049 cfun_frame_layout.f4_offset = cfun_frame_layout.f0_offset + 2 * 8;
7050 cfun_frame_layout.f8_offset = -cfun_frame_layout.high_fprs * 8;
fb3712f6 7051 cfun_frame_layout.gprs_offset = (cfun_frame_layout.first_save_gpr_slot
adf39f8f
AK
7052 * UNITS_PER_WORD);
7053 }
b3d31392 7054 else if (TARGET_BACKCHAIN) /* kernel stack layout */
adf39f8f
AK
7055 {
7056 cfun_frame_layout.backchain_offset = (STACK_POINTER_OFFSET
7057 - UNITS_PER_WORD);
7058 cfun_frame_layout.gprs_offset
7059 = (cfun_frame_layout.backchain_offset
fb3712f6 7060 - (STACK_POINTER_REGNUM - cfun_frame_layout.first_save_gpr_slot + 1)
adf39f8f
AK
7061 * UNITS_PER_WORD);
7062
7063 if (TARGET_64BIT)
7064 {
7065 cfun_frame_layout.f4_offset
7066 = (cfun_frame_layout.gprs_offset
7067 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
7068
7069 cfun_frame_layout.f0_offset
7070 = (cfun_frame_layout.f4_offset
7071 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
7072 }
7073 else
7074 {
ea506297
AK
7075 /* On 31 bit we have to care about alignment of the
7076 floating point regs to provide fastest access. */
adf39f8f 7077 cfun_frame_layout.f0_offset
ea506297
AK
7078 = ((cfun_frame_layout.gprs_offset
7079 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1))
adf39f8f
AK
7080 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
7081
7082 cfun_frame_layout.f4_offset
7083 = (cfun_frame_layout.f0_offset
7084 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
7085 }
7086 }
7087 else /* no backchain */
7088 {
7089 cfun_frame_layout.f4_offset
7090 = (STACK_POINTER_OFFSET
7091 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
7092
7093 cfun_frame_layout.f0_offset
7094 = (cfun_frame_layout.f4_offset
7095 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
7096
7097 cfun_frame_layout.gprs_offset
7098 = cfun_frame_layout.f0_offset - cfun_gprs_save_area_size;
7099 }
7100
7101 if (current_function_is_leaf
7102 && !TARGET_TPF_PROFILING
7103 && cfun_frame_layout.frame_size == 0
7104 && !cfun_save_high_fprs_p
e3b5732b
JH
7105 && !cfun->calls_alloca
7106 && !cfun->stdarg)
adf39f8f
AK
7107 return;
7108
b3d31392 7109 if (!TARGET_PACKED_STACK)
63296cb1 7110 cfun_frame_layout.frame_size += (STACK_POINTER_OFFSET
38173d38 7111 + crtl->outgoing_args_size
adf39f8f
AK
7112 + cfun_frame_layout.high_fprs * 8);
7113 else
7114 {
66480e91
AK
7115 if (TARGET_BACKCHAIN)
7116 cfun_frame_layout.frame_size += UNITS_PER_WORD;
ea506297
AK
7117
7118 /* No alignment trouble here because f8-f15 are only saved under
7119 64 bit. */
adf39f8f
AK
7120 cfun_frame_layout.f8_offset = (MIN (MIN (cfun_frame_layout.f0_offset,
7121 cfun_frame_layout.f4_offset),
7122 cfun_frame_layout.gprs_offset)
7123 - cfun_frame_layout.high_fprs * 8);
7124
7125 cfun_frame_layout.frame_size += cfun_frame_layout.high_fprs * 8;
7126
7127 for (i = 0; i < 8; i++)
7128 if (cfun_fpr_bit_p (i))
7129 cfun_frame_layout.frame_size += 8;
7130
7131 cfun_frame_layout.frame_size += cfun_gprs_save_area_size;
ea506297
AK
7132
7133 /* If under 31 bit an odd number of gprs has to be saved we have to adjust
7134 the frame size to sustain 8 byte alignment of stack frames. */
adf39f8f
AK
7135 cfun_frame_layout.frame_size = ((cfun_frame_layout.frame_size +
7136 STACK_BOUNDARY / BITS_PER_UNIT - 1)
7137 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1));
7138
38173d38 7139 cfun_frame_layout.frame_size += crtl->outgoing_args_size;
b767fc11 7140 }
4023fb28
UW
7141}
7142
91086990
UW
7143/* Generate frame layout. Fills in register and frame data for the current
7144 function in cfun->machine. This routine can be called multiple times;
7145 it will re-do the complete frame layout every time. */
4023fb28 7146
91086990
UW
7147static void
7148s390_init_frame_layout (void)
9db1d521 7149{
91086990
UW
7150 HOST_WIDE_INT frame_size;
7151 int base_used;
7bcebb25 7152 int clobbered_regs[16];
b767fc11 7153
91086990
UW
7154 /* On S/390 machines, we may need to perform branch splitting, which
7155 will require both base and return address register. We have no
7156 choice but to assume we're going to need them until right at the
7157 end of the machine dependent reorg phase. */
7158 if (!TARGET_CPU_ZARCH)
7159 cfun->machine->split_branches_pending_p = true;
7160
7161 do
7162 {
7163 frame_size = cfun_frame_layout.frame_size;
7164
7165 /* Try to predict whether we'll need the base register. */
7166 base_used = cfun->machine->split_branches_pending_p
e3b5732b 7167 || crtl->uses_const_pool
20f04e65
AK
7168 || (!DISP_IN_RANGE (frame_size)
7169 && !CONST_OK_FOR_K (frame_size));
91086990
UW
7170
7171 /* Decide which register to use as literal pool base. In small
7172 leaf functions, try to use an unused call-clobbered register
7173 as base register to avoid save/restore overhead. */
7174 if (!base_used)
7175 cfun->machine->base_reg = NULL_RTX;
6fb5fa3c 7176 else if (current_function_is_leaf && !df_regs_ever_live_p (5))
91086990
UW
7177 cfun->machine->base_reg = gen_rtx_REG (Pmode, 5);
7178 else
7179 cfun->machine->base_reg = gen_rtx_REG (Pmode, BASE_REGNUM);
adf39f8f 7180
7bcebb25 7181 s390_register_info (clobbered_regs);
91086990
UW
7182 s390_frame_info ();
7183 }
7184 while (frame_size != cfun_frame_layout.frame_size);
9db1d521
HP
7185}
7186
91086990
UW
7187/* Update frame layout. Recompute actual register save data based on
7188 current info and update regs_ever_live for the special registers.
7189 May be called multiple times, but may never cause *more* registers
7190 to be saved than s390_init_frame_layout allocated room for. */
7191
7192static void
7193s390_update_frame_layout (void)
7194{
7bcebb25 7195 int clobbered_regs[16];
91086990 7196
7bcebb25 7197 s390_register_info (clobbered_regs);
91086990 7198
6fb5fa3c
DB
7199 df_set_regs_ever_live (BASE_REGNUM,
7200 clobbered_regs[BASE_REGNUM] ? true : false);
7201 df_set_regs_ever_live (RETURN_REGNUM,
7202 clobbered_regs[RETURN_REGNUM] ? true : false);
7203 df_set_regs_ever_live (STACK_POINTER_REGNUM,
7204 clobbered_regs[STACK_POINTER_REGNUM] ? true : false);
91086990
UW
7205
7206 if (cfun->machine->base_reg)
6fb5fa3c 7207 df_set_regs_ever_live (REGNO (cfun->machine->base_reg), true);
91086990
UW
7208}
7209
74aa8b4b
AK
7210/* Return true if it is legal to put a value with MODE into REGNO. */
7211
7212bool
7213s390_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
7214{
7215 switch (REGNO_REG_CLASS (regno))
7216 {
7217 case FP_REGS:
7218 if (REGNO_PAIR_OK (regno, mode))
7219 {
7220 if (mode == SImode || mode == DImode)
7221 return true;
7222
7223 if (FLOAT_MODE_P (mode) && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
7224 return true;
7225 }
7226 break;
7227 case ADDR_REGS:
7228 if (FRAME_REGNO_P (regno) && mode == Pmode)
7229 return true;
7230
7231 /* fallthrough */
7232 case GENERAL_REGS:
7233 if (REGNO_PAIR_OK (regno, mode))
7234 {
7235 if (TARGET_64BIT
4dc19cc0 7236 || (mode != TFmode && mode != TCmode && mode != TDmode))
74aa8b4b
AK
7237 return true;
7238 }
7239 break;
7240 case CC_REGS:
7241 if (GET_MODE_CLASS (mode) == MODE_CC)
7242 return true;
7243 break;
7244 case ACCESS_REGS:
7245 if (REGNO_PAIR_OK (regno, mode))
7246 {
7247 if (mode == SImode || mode == Pmode)
7248 return true;
7249 }
7250 break;
7251 default:
7252 return false;
7253 }
7254
7255 return false;
7256}
7257
7633f08e
UW
7258/* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
7259
7260bool
7261s390_hard_regno_rename_ok (unsigned int old_reg, unsigned int new_reg)
7262{
7263 /* Once we've decided upon a register to use as base register, it must
7264 no longer be used for any other purpose. */
7265 if (cfun->machine->base_reg)
7266 if (REGNO (cfun->machine->base_reg) == old_reg
7267 || REGNO (cfun->machine->base_reg) == new_reg)
7268 return false;
7269
7270 return true;
7271}
7272
74aa8b4b
AK
7273/* Maximum number of registers to represent a value of mode MODE
7274 in a register of class CLASS. */
7275
7276bool
7277s390_class_max_nregs (enum reg_class class, enum machine_mode mode)
7278{
7279 switch (class)
7280 {
7281 case FP_REGS:
7282 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7283 return 2 * ((GET_MODE_SIZE (mode) / 2 + 8 - 1) / 8);
7284 else
7285 return (GET_MODE_SIZE (mode) + 8 - 1) / 8;
7286 case ACCESS_REGS:
7287 return (GET_MODE_SIZE (mode) + 4 - 1) / 4;
7288 default:
7289 break;
7290 }
7291 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
7292}
7293
91086990
UW
7294/* Return true if register FROM can be eliminated via register TO. */
7295
7296bool
7297s390_can_eliminate (int from, int to)
7298{
7633f08e
UW
7299 /* On zSeries machines, we have not marked the base register as fixed.
7300 Instead, we have an elimination rule BASE_REGNUM -> BASE_REGNUM.
7301 If a function requires the base register, we say here that this
7302 elimination cannot be performed. This will cause reload to free
7303 up the base register (as if it were fixed). On the other hand,
7304 if the current function does *not* require the base register, we
7305 say here the elimination succeeds, which in turn allows reload
7306 to allocate the base register for any other purpose. */
7307 if (from == BASE_REGNUM && to == BASE_REGNUM)
7308 {
7309 if (TARGET_CPU_ZARCH)
7310 {
7311 s390_init_frame_layout ();
7312 return cfun->machine->base_reg == NULL_RTX;
7313 }
7314
7315 return false;
7316 }
7317
7318 /* Everything else must point into the stack frame. */
91086990
UW
7319 gcc_assert (to == STACK_POINTER_REGNUM
7320 || to == HARD_FRAME_POINTER_REGNUM);
7321
7322 gcc_assert (from == FRAME_POINTER_REGNUM
7323 || from == ARG_POINTER_REGNUM
7324 || from == RETURN_ADDRESS_POINTER_REGNUM);
7325
7326 /* Make sure we actually saved the return address. */
7327 if (from == RETURN_ADDRESS_POINTER_REGNUM)
e3b5732b
JH
7328 if (!crtl->calls_eh_return
7329 && !cfun->stdarg
91086990
UW
7330 && !cfun_frame_layout.save_return_addr_p)
7331 return false;
7332
7333 return true;
7334}
7335
7336/* Return offset between register FROM and TO initially after prolog. */
a38e09bc
AK
7337
7338HOST_WIDE_INT
91086990 7339s390_initial_elimination_offset (int from, int to)
a38e09bc 7340{
91086990
UW
7341 HOST_WIDE_INT offset;
7342 int index;
a38e09bc 7343
91086990
UW
7344 /* ??? Why are we called for non-eliminable pairs? */
7345 if (!s390_can_eliminate (from, to))
7346 return 0;
7347
7348 switch (from)
7349 {
7350 case FRAME_POINTER_REGNUM:
63296cb1
AK
7351 offset = (get_frame_size()
7352 + STACK_POINTER_OFFSET
38173d38 7353 + crtl->outgoing_args_size);
91086990 7354 break;
adf39f8f 7355
91086990
UW
7356 case ARG_POINTER_REGNUM:
7357 s390_init_frame_layout ();
7358 offset = cfun_frame_layout.frame_size + STACK_POINTER_OFFSET;
7359 break;
7360
7361 case RETURN_ADDRESS_POINTER_REGNUM:
7362 s390_init_frame_layout ();
fb3712f6 7363 index = RETURN_REGNUM - cfun_frame_layout.first_save_gpr_slot;
91086990
UW
7364 gcc_assert (index >= 0);
7365 offset = cfun_frame_layout.frame_size + cfun_frame_layout.gprs_offset;
7366 offset += index * UNITS_PER_WORD;
7367 break;
7368
7633f08e
UW
7369 case BASE_REGNUM:
7370 offset = 0;
7371 break;
7372
91086990
UW
7373 default:
7374 gcc_unreachable ();
7375 }
7376
7377 return offset;
a38e09bc
AK
7378}
7379
4023fb28 7380/* Emit insn to save fpr REGNUM at offset OFFSET relative
c7453384 7381 to register BASE. Return generated insn. */
994fe660 7382
9db1d521 7383static rtx
9c808aad 7384save_fpr (rtx base, int offset, int regnum)
9db1d521 7385{
4023fb28
UW
7386 rtx addr;
7387 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
dcc9eb26
AK
7388
7389 if (regnum >= 16 && regnum <= (16 + FP_ARG_NUM_REG))
7390 set_mem_alias_set (addr, get_varargs_alias_set ());
7391 else
7392 set_mem_alias_set (addr, get_frame_alias_set ());
9db1d521 7393
4023fb28
UW
7394 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
7395}
9db1d521 7396
4023fb28 7397/* Emit insn to restore fpr REGNUM from offset OFFSET relative
c7453384 7398 to register BASE. Return generated insn. */
9db1d521 7399
4023fb28 7400static rtx
9c808aad 7401restore_fpr (rtx base, int offset, int regnum)
4023fb28
UW
7402{
7403 rtx addr;
7404 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
dcc9eb26 7405 set_mem_alias_set (addr, get_frame_alias_set ());
9db1d521 7406
4023fb28 7407 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
9db1d521
HP
7408}
7409
c3cc6b78 7410/* Generate insn to save registers FIRST to LAST into
c7453384 7411 the register save area located at offset OFFSET
c3cc6b78 7412 relative to register BASE. */
9db1d521 7413
c3cc6b78 7414static rtx
9c808aad 7415save_gprs (rtx base, int offset, int first, int last)
9db1d521 7416{
c3cc6b78
UW
7417 rtx addr, insn, note;
7418 int i;
7419
adf39f8f 7420 addr = plus_constant (base, offset);
c3cc6b78 7421 addr = gen_rtx_MEM (Pmode, addr);
dcc9eb26
AK
7422
7423 set_mem_alias_set (addr, get_frame_alias_set ());
c3cc6b78
UW
7424
7425 /* Special-case single register. */
7426 if (first == last)
7427 {
7428 if (TARGET_64BIT)
7429 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
7430 else
7431 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
7432
7433 RTX_FRAME_RELATED_P (insn) = 1;
7434 return insn;
7435 }
7436
7437
7438 insn = gen_store_multiple (addr,
7439 gen_rtx_REG (Pmode, first),
7440 GEN_INT (last - first + 1));
7441
e3b5732b 7442 if (first <= 6 && cfun->stdarg)
dcc9eb26
AK
7443 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
7444 {
7445 rtx mem = XEXP (XVECEXP (PATTERN (insn), 0, i), 0);
7446
7447 if (first + i <= 6)
7448 set_mem_alias_set (mem, get_varargs_alias_set ());
7449 }
c3cc6b78
UW
7450
7451 /* We need to set the FRAME_RELATED flag on all SETs
7452 inside the store-multiple pattern.
7453
7454 However, we must not emit DWARF records for registers 2..5
c7453384 7455 if they are stored for use by variable arguments ...
c3cc6b78 7456
a4d05547 7457 ??? Unfortunately, it is not enough to simply not the
c3cc6b78
UW
7458 FRAME_RELATED flags for those SETs, because the first SET
7459 of the PARALLEL is always treated as if it had the flag
7460 set, even if it does not. Therefore we emit a new pattern
7461 without those registers as REG_FRAME_RELATED_EXPR note. */
7462
7463 if (first >= 6)
7464 {
7465 rtx pat = PATTERN (insn);
7466
7467 for (i = 0; i < XVECLEN (pat, 0); i++)
7468 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
7469 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
7470
7471 RTX_FRAME_RELATED_P (insn) = 1;
7472 }
7473 else if (last >= 6)
7474 {
adf39f8f 7475 addr = plus_constant (base, offset + (6 - first) * UNITS_PER_WORD);
c7453384 7476 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
c3cc6b78
UW
7477 gen_rtx_REG (Pmode, 6),
7478 GEN_INT (last - 6 + 1));
7479 note = PATTERN (note);
7480
7481 REG_NOTES (insn) =
c7453384 7482 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
c3cc6b78
UW
7483 note, REG_NOTES (insn));
7484
7485 for (i = 0; i < XVECLEN (note, 0); i++)
7486 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
7487 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
7488
7489 RTX_FRAME_RELATED_P (insn) = 1;
7490 }
7491
7492 return insn;
4023fb28 7493}
9db1d521 7494
c3cc6b78 7495/* Generate insn to restore registers FIRST to LAST from
c7453384 7496 the register save area located at offset OFFSET
c3cc6b78 7497 relative to register BASE. */
9db1d521 7498
c3cc6b78 7499static rtx
9c808aad 7500restore_gprs (rtx base, int offset, int first, int last)
4023fb28 7501{
c3cc6b78
UW
7502 rtx addr, insn;
7503
adf39f8f 7504 addr = plus_constant (base, offset);
c3cc6b78 7505 addr = gen_rtx_MEM (Pmode, addr);
dcc9eb26 7506 set_mem_alias_set (addr, get_frame_alias_set ());
c3cc6b78
UW
7507
7508 /* Special-case single register. */
7509 if (first == last)
7510 {
7511 if (TARGET_64BIT)
7512 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
7513 else
7514 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
7515
7516 return insn;
7517 }
7518
7519 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
7520 addr,
7521 GEN_INT (last - first + 1));
7522 return insn;
4023fb28 7523}
9db1d521 7524
585539a1 7525/* Return insn sequence to load the GOT register. */
fd7643fb
UW
7526
7527static GTY(()) rtx got_symbol;
585539a1
UW
7528rtx
7529s390_load_got (void)
fd7643fb 7530{
585539a1
UW
7531 rtx insns;
7532
fd7643fb
UW
7533 if (!got_symbol)
7534 {
7535 got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
7536 SYMBOL_REF_FLAGS (got_symbol) = SYMBOL_FLAG_LOCAL;
7537 }
7538
585539a1
UW
7539 start_sequence ();
7540
9e8327e3 7541 if (TARGET_CPU_ZARCH)
fd7643fb 7542 {
585539a1 7543 emit_move_insn (pic_offset_table_rtx, got_symbol);
fd7643fb
UW
7544 }
7545 else
7546 {
585539a1 7547 rtx offset;
fd7643fb 7548
c7453384 7549 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, got_symbol),
fd7643fb
UW
7550 UNSPEC_LTREL_OFFSET);
7551 offset = gen_rtx_CONST (Pmode, offset);
7552 offset = force_const_mem (Pmode, offset);
7553
585539a1 7554 emit_move_insn (pic_offset_table_rtx, offset);
fd7643fb 7555
c7453384 7556 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (offset, 0)),
fd7643fb
UW
7557 UNSPEC_LTREL_BASE);
7558 offset = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset);
7559
585539a1 7560 emit_move_insn (pic_offset_table_rtx, offset);
fd7643fb 7561 }
585539a1
UW
7562
7563 insns = get_insns ();
7564 end_sequence ();
7565 return insns;
fd7643fb
UW
7566}
7567
12959abe
AK
7568/* This ties together stack memory (MEM with an alias set of frame_alias_set)
7569 and the change to the stack pointer. */
7570
7571static void
7572s390_emit_stack_tie (void)
7573{
7574 rtx mem = gen_frame_mem (BLKmode,
7575 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
7576
7577 emit_insn (gen_stack_tie (mem));
7578}
7579
4023fb28 7580/* Expand the prologue into a bunch of separate insns. */
9db1d521 7581
4023fb28 7582void
9c808aad 7583s390_emit_prologue (void)
4023fb28 7584{
4023fb28
UW
7585 rtx insn, addr;
7586 rtx temp_reg;
2c153108 7587 int i;
adf39f8f
AK
7588 int offset;
7589 int next_fpr = 0;
9db1d521 7590
91086990 7591 /* Complete frame layout. */
b767fc11 7592
91086990 7593 s390_update_frame_layout ();
4023fb28 7594
585539a1
UW
7595 /* Annotate all constant pool references to let the scheduler know
7596 they implicitly use the base register. */
7597
7598 push_topmost_sequence ();
7599
7600 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
7601 if (INSN_P (insn))
6fb5fa3c
DB
7602 {
7603 annotate_constant_pool_refs (&PATTERN (insn));
7604 df_insn_rescan (insn);
7605 }
585539a1
UW
7606
7607 pop_topmost_sequence ();
7608
c7453384
EC
7609 /* Choose best register to use for temp use within prologue.
7610 See below for why TPF must use the register 1. */
7611
7bcebb25
AK
7612 if (!has_hard_reg_initial_val (Pmode, RETURN_REGNUM)
7613 && !current_function_is_leaf
7614 && !TARGET_TPF_PROFILING)
4023fb28 7615 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
9db1d521 7616 else
4023fb28 7617 temp_reg = gen_rtx_REG (Pmode, 1);
9db1d521 7618
4023fb28 7619 /* Save call saved gprs. */
adf39f8f 7620 if (cfun_frame_layout.first_save_gpr != -1)
2790879f
AK
7621 {
7622 insn = save_gprs (stack_pointer_rtx,
fb3712f6
AK
7623 cfun_frame_layout.gprs_offset +
7624 UNITS_PER_WORD * (cfun_frame_layout.first_save_gpr
7625 - cfun_frame_layout.first_save_gpr_slot),
2790879f
AK
7626 cfun_frame_layout.first_save_gpr,
7627 cfun_frame_layout.last_save_gpr);
7628 emit_insn (insn);
7629 }
4023fb28 7630
5af2f3d3 7631 /* Dummy insn to mark literal pool slot. */
c7453384 7632
91086990
UW
7633 if (cfun->machine->base_reg)
7634 emit_insn (gen_main_pool (cfun->machine->base_reg));
c7453384 7635
adf39f8f 7636 offset = cfun_frame_layout.f0_offset;
4023fb28 7637
adf39f8f
AK
7638 /* Save f0 and f2. */
7639 for (i = 0; i < 2; i++)
7640 {
7641 if (cfun_fpr_bit_p (i))
7642 {
7643 save_fpr (stack_pointer_rtx, offset, i + 16);
7644 offset += 8;
7645 }
b3d31392 7646 else if (!TARGET_PACKED_STACK)
adf39f8f
AK
7647 offset += 8;
7648 }
9db1d521 7649
adf39f8f
AK
7650 /* Save f4 and f6. */
7651 offset = cfun_frame_layout.f4_offset;
7652 for (i = 2; i < 4; i++)
7653 {
7654 if (cfun_fpr_bit_p (i))
4023fb28 7655 {
adf39f8f
AK
7656 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
7657 offset += 8;
7658
7659 /* If f4 and f6 are call clobbered they are saved due to stdargs and
7660 therefore are not frame related. */
7661 if (!call_really_used_regs[i + 16])
7662 RTX_FRAME_RELATED_P (insn) = 1;
4023fb28 7663 }
b3d31392 7664 else if (!TARGET_PACKED_STACK)
adf39f8f
AK
7665 offset += 8;
7666 }
7667
b3d31392 7668 if (TARGET_PACKED_STACK
adf39f8f
AK
7669 && cfun_save_high_fprs_p
7670 && cfun_frame_layout.f8_offset + cfun_frame_layout.high_fprs * 8 > 0)
7671 {
7672 offset = (cfun_frame_layout.f8_offset
7673 + (cfun_frame_layout.high_fprs - 1) * 8);
7674
7675 for (i = 15; i > 7 && offset >= 0; i--)
7676 if (cfun_fpr_bit_p (i))
7677 {
7678 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
7679
7680 RTX_FRAME_RELATED_P (insn) = 1;
7681 offset -= 8;
7682 }
7683 if (offset >= cfun_frame_layout.f8_offset)
7684 next_fpr = i + 16;
7685 }
7686
b3d31392 7687 if (!TARGET_PACKED_STACK)
adf39f8f 7688 next_fpr = cfun_save_high_fprs_p ? 31 : 0;
9db1d521 7689
4023fb28 7690 /* Decrement stack pointer. */
9db1d521 7691
adf39f8f 7692 if (cfun_frame_layout.frame_size > 0)
4023fb28 7693 {
adf39f8f 7694 rtx frame_off = GEN_INT (-cfun_frame_layout.frame_size);
9db1d521 7695
d75f90f1
AK
7696 if (s390_stack_size)
7697 {
690e7b63 7698 HOST_WIDE_INT stack_guard;
d75f90f1 7699
690e7b63
AK
7700 if (s390_stack_guard)
7701 stack_guard = s390_stack_guard;
d75f90f1 7702 else
690e7b63
AK
7703 {
7704 /* If no value for stack guard is provided the smallest power of 2
7705 larger than the current frame size is chosen. */
7706 stack_guard = 1;
7707 while (stack_guard < cfun_frame_layout.frame_size)
7708 stack_guard <<= 1;
7709 }
d75f90f1 7710
690e7b63
AK
7711 if (cfun_frame_layout.frame_size >= s390_stack_size)
7712 {
7713 warning (0, "frame size of function %qs is "
7714 HOST_WIDE_INT_PRINT_DEC
7715 " bytes exceeding user provided stack limit of "
7716 HOST_WIDE_INT_PRINT_DEC " bytes. "
7717 "An unconditional trap is added.",
7718 current_function_name(), cfun_frame_layout.frame_size,
7719 s390_stack_size);
7720 emit_insn (gen_trap ());
7721 }
7722 else
7723 {
7724 HOST_WIDE_INT stack_check_mask = ((s390_stack_size - 1)
7725 & ~(stack_guard - 1));
7726 rtx t = gen_rtx_AND (Pmode, stack_pointer_rtx,
7727 GEN_INT (stack_check_mask));
7728 if (TARGET_64BIT)
7729 gen_cmpdi (t, const0_rtx);
7730 else
7731 gen_cmpsi (t, const0_rtx);
7732
7733 emit_insn (gen_conditional_trap (gen_rtx_EQ (CCmode,
7734 gen_rtx_REG (CCmode,
7735 CC_REGNUM),
7736 const0_rtx),
7737 const0_rtx));
7738 }
d75f90f1
AK
7739 }
7740
7741 if (s390_warn_framesize > 0
7742 && cfun_frame_layout.frame_size >= s390_warn_framesize)
d4ee4d25 7743 warning (0, "frame size of %qs is " HOST_WIDE_INT_PRINT_DEC " bytes",
d75f90f1
AK
7744 current_function_name (), cfun_frame_layout.frame_size);
7745
7746 if (s390_warn_dynamicstack_p && cfun->calls_alloca)
d4ee4d25 7747 warning (0, "%qs uses dynamic stack allocation", current_function_name ());
d75f90f1 7748
4023fb28 7749 /* Save incoming stack pointer into temp reg. */
66480e91 7750 if (TARGET_BACKCHAIN || next_fpr)
adf39f8f 7751 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
c7453384 7752
fae778eb 7753 /* Subtract frame size from stack pointer. */
4023fb28 7754
d3632d41
UW
7755 if (DISP_IN_RANGE (INTVAL (frame_off)))
7756 {
c7453384 7757 insn = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
adf39f8f 7758 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
9c808aad 7759 frame_off));
d3632d41
UW
7760 insn = emit_insn (insn);
7761 }
7762 else
7763 {
b5c67a49 7764 if (!CONST_OK_FOR_K (INTVAL (frame_off)))
d3632d41
UW
7765 frame_off = force_const_mem (Pmode, frame_off);
7766
7767 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
585539a1 7768 annotate_constant_pool_refs (&PATTERN (insn));
d3632d41 7769 }
4023fb28 7770
4023fb28 7771 RTX_FRAME_RELATED_P (insn) = 1;
c7453384 7772 REG_NOTES (insn) =
4023fb28
UW
7773 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7774 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
adf39f8f
AK
7775 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
7776 GEN_INT (-cfun_frame_layout.frame_size))),
4023fb28
UW
7777 REG_NOTES (insn));
7778
7779 /* Set backchain. */
c7453384 7780
66480e91 7781 if (TARGET_BACKCHAIN)
9db1d521 7782 {
adf39f8f
AK
7783 if (cfun_frame_layout.backchain_offset)
7784 addr = gen_rtx_MEM (Pmode,
7785 plus_constant (stack_pointer_rtx,
7786 cfun_frame_layout.backchain_offset));
7787 else
7788 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
dcc9eb26 7789 set_mem_alias_set (addr, get_frame_alias_set ());
4023fb28 7790 insn = emit_insn (gen_move_insn (addr, temp_reg));
9db1d521 7791 }
7d798969
UW
7792
7793 /* If we support asynchronous exceptions (e.g. for Java),
7794 we need to make sure the backchain pointer is set up
7795 before any possibly trapping memory access. */
7796
66480e91 7797 if (TARGET_BACKCHAIN && flag_non_call_exceptions)
7d798969
UW
7798 {
7799 addr = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
c41c1387 7800 emit_clobber (addr);
7d798969 7801 }
4023fb28 7802 }
9db1d521 7803
4023fb28 7804 /* Save fprs 8 - 15 (64 bit ABI). */
c7453384 7805
adf39f8f 7806 if (cfun_save_high_fprs_p && next_fpr)
4023fb28 7807 {
12959abe
AK
7808 /* If the stack might be accessed through a different register
7809 we have to make sure that the stack pointer decrement is not
7810 moved below the use of the stack slots. */
7811 s390_emit_stack_tie ();
7812
adf39f8f
AK
7813 insn = emit_insn (gen_add2_insn (temp_reg,
7814 GEN_INT (cfun_frame_layout.f8_offset)));
7815
7816 offset = 0;
9db1d521 7817
adf39f8f
AK
7818 for (i = 24; i <= next_fpr; i++)
7819 if (cfun_fpr_bit_p (i - 16))
4023fb28 7820 {
c7453384 7821 rtx addr = plus_constant (stack_pointer_rtx,
adf39f8f
AK
7822 cfun_frame_layout.frame_size
7823 + cfun_frame_layout.f8_offset
7824 + offset);
7825
7826 insn = save_fpr (temp_reg, offset, i);
7827 offset += 8;
4023fb28 7828 RTX_FRAME_RELATED_P (insn) = 1;
c7453384 7829 REG_NOTES (insn) =
4023fb28 7830 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
adf39f8f
AK
7831 gen_rtx_SET (VOIDmode,
7832 gen_rtx_MEM (DFmode, addr),
7833 gen_rtx_REG (DFmode, i)),
7834 REG_NOTES (insn));
4023fb28
UW
7835 }
7836 }
c7453384 7837
4023fb28 7838 /* Set frame pointer, if needed. */
c7453384 7839
29742ba4 7840 if (frame_pointer_needed)
4023fb28
UW
7841 {
7842 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
7843 RTX_FRAME_RELATED_P (insn) = 1;
7844 }
9db1d521 7845
4023fb28 7846 /* Set up got pointer, if needed. */
c7453384 7847
6fb5fa3c 7848 if (flag_pic && df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM))
585539a1
UW
7849 {
7850 rtx insns = s390_load_got ();
7851
7852 for (insn = insns; insn; insn = NEXT_INSN (insn))
6fb5fa3c 7853 annotate_constant_pool_refs (&PATTERN (insn));
585539a1
UW
7854
7855 emit_insn (insns);
7856 }
c7453384 7857
3839e36a 7858 if (TARGET_TPF_PROFILING)
c7453384
EC
7859 {
7860 /* Generate a BAS instruction to serve as a function
7861 entry intercept to facilitate the use of tracing
2f7e5a0d
EC
7862 algorithms located at the branch target. */
7863 emit_insn (gen_prologue_tpf ());
c7453384
EC
7864
7865 /* Emit a blockage here so that all code
7866 lies between the profiling mechanisms. */
7867 emit_insn (gen_blockage ());
7868 }
4023fb28 7869}
9db1d521 7870
b1c9bc51 7871/* Expand the epilogue into a bunch of separate insns. */
9db1d521 7872
4023fb28 7873void
ed9676cf 7874s390_emit_epilogue (bool sibcall)
4023fb28 7875{
4023fb28 7876 rtx frame_pointer, return_reg;
5d4d885c 7877 int area_bottom, area_top, offset = 0;
adf39f8f 7878 int next_offset;
4023fb28 7879 rtvec p;
7333171f 7880 int i;
9db1d521 7881
3839e36a 7882 if (TARGET_TPF_PROFILING)
c7453384
EC
7883 {
7884
7885 /* Generate a BAS instruction to serve as a function
7886 entry intercept to facilitate the use of tracing
2f7e5a0d 7887 algorithms located at the branch target. */
c7453384 7888
c7453384
EC
7889 /* Emit a blockage here so that all code
7890 lies between the profiling mechanisms. */
7891 emit_insn (gen_blockage ());
7892
2f7e5a0d 7893 emit_insn (gen_epilogue_tpf ());
c7453384
EC
7894 }
7895
4023fb28 7896 /* Check whether to use frame or stack pointer for restore. */
9db1d521 7897
adf39f8f
AK
7898 frame_pointer = (frame_pointer_needed
7899 ? hard_frame_pointer_rtx : stack_pointer_rtx);
9db1d521 7900
adf39f8f 7901 s390_frame_area (&area_bottom, &area_top);
9db1d521 7902
c7453384 7903 /* Check whether we can access the register save area.
4023fb28 7904 If not, increment the frame pointer as required. */
9db1d521 7905
4023fb28
UW
7906 if (area_top <= area_bottom)
7907 {
7908 /* Nothing to restore. */
7909 }
adf39f8f
AK
7910 else if (DISP_IN_RANGE (cfun_frame_layout.frame_size + area_bottom)
7911 && DISP_IN_RANGE (cfun_frame_layout.frame_size + area_top - 1))
4023fb28
UW
7912 {
7913 /* Area is in range. */
adf39f8f 7914 offset = cfun_frame_layout.frame_size;
4023fb28
UW
7915 }
7916 else
7917 {
7918 rtx insn, frame_off;
9db1d521 7919
c7453384 7920 offset = area_bottom < 0 ? -area_bottom : 0;
adf39f8f 7921 frame_off = GEN_INT (cfun_frame_layout.frame_size - offset);
9db1d521 7922
d3632d41
UW
7923 if (DISP_IN_RANGE (INTVAL (frame_off)))
7924 {
c7453384 7925 insn = gen_rtx_SET (VOIDmode, frame_pointer,
d3632d41
UW
7926 gen_rtx_PLUS (Pmode, frame_pointer, frame_off));
7927 insn = emit_insn (insn);
7928 }
7929 else
7930 {
b5c67a49 7931 if (!CONST_OK_FOR_K (INTVAL (frame_off)))
d3632d41 7932 frame_off = force_const_mem (Pmode, frame_off);
9db1d521 7933
d3632d41 7934 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
585539a1 7935 annotate_constant_pool_refs (&PATTERN (insn));
d3632d41 7936 }
4023fb28 7937 }
9db1d521 7938
4023fb28
UW
7939 /* Restore call saved fprs. */
7940
7941 if (TARGET_64BIT)
9db1d521 7942 {
adf39f8f
AK
7943 if (cfun_save_high_fprs_p)
7944 {
7945 next_offset = cfun_frame_layout.f8_offset;
7946 for (i = 24; i < 32; i++)
7947 {
7948 if (cfun_fpr_bit_p (i - 16))
7949 {
7950 restore_fpr (frame_pointer,
7951 offset + next_offset, i);
7952 next_offset += 8;
7953 }
7954 }
7955 }
7956
9db1d521
HP
7957 }
7958 else
7959 {
adf39f8f 7960 next_offset = cfun_frame_layout.f4_offset;
7333171f 7961 for (i = 18; i < 20; i++)
adf39f8f
AK
7962 {
7963 if (cfun_fpr_bit_p (i - 16))
7964 {
7965 restore_fpr (frame_pointer,
7966 offset + next_offset, i);
7967 next_offset += 8;
7968 }
b3d31392 7969 else if (!TARGET_PACKED_STACK)
adf39f8f
AK
7970 next_offset += 8;
7971 }
7972
4023fb28 7973 }
9db1d521 7974
4023fb28
UW
7975 /* Return register. */
7976
c7453384 7977 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4023fb28
UW
7978
7979 /* Restore call saved gprs. */
7980
adf39f8f 7981 if (cfun_frame_layout.first_restore_gpr != -1)
4023fb28 7982 {
c3cc6b78 7983 rtx insn, addr;
1447dc69
HP
7984 int i;
7985
c7453384 7986 /* Check for global register and save them
1447dc69
HP
7987 to stack location from where they get restored. */
7988
adf39f8f
AK
7989 for (i = cfun_frame_layout.first_restore_gpr;
7990 i <= cfun_frame_layout.last_restore_gpr;
1447dc69
HP
7991 i++)
7992 {
c7453384 7993 /* These registers are special and need to be
cf5ee720 7994 restored in any case. */
c7453384 7995 if (i == STACK_POINTER_REGNUM
cf5ee720 7996 || i == RETURN_REGNUM
490ceeb4 7997 || i == BASE_REGNUM
5d4d885c 7998 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
cf5ee720
UW
7999 continue;
8000
1447dc69
HP
8001 if (global_regs[i])
8002 {
c7453384 8003 addr = plus_constant (frame_pointer,
adf39f8f 8004 offset + cfun_frame_layout.gprs_offset
fb3712f6 8005 + (i - cfun_frame_layout.first_save_gpr_slot)
adf39f8f 8006 * UNITS_PER_WORD);
1447dc69 8007 addr = gen_rtx_MEM (Pmode, addr);
dcc9eb26 8008 set_mem_alias_set (addr, get_frame_alias_set ());
1447dc69 8009 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
c7453384 8010 }
1447dc69 8011 }
4023fb28 8012
ed9676cf 8013 if (! sibcall)
9db1d521 8014 {
ed9676cf
AK
8015 /* Fetch return address from stack before load multiple,
8016 this will do good for scheduling. */
38899e29 8017
adf39f8f
AK
8018 if (cfun_frame_layout.save_return_addr_p
8019 || (cfun_frame_layout.first_restore_gpr < BASE_REGNUM
8020 && cfun_frame_layout.last_restore_gpr > RETURN_REGNUM))
ed9676cf
AK
8021 {
8022 int return_regnum = find_unused_clobbered_reg();
8023 if (!return_regnum)
8024 return_regnum = 4;
8025 return_reg = gen_rtx_REG (Pmode, return_regnum);
38899e29 8026
ed9676cf 8027 addr = plus_constant (frame_pointer,
adf39f8f
AK
8028 offset + cfun_frame_layout.gprs_offset
8029 + (RETURN_REGNUM
fb3712f6 8030 - cfun_frame_layout.first_save_gpr_slot)
adf39f8f 8031 * UNITS_PER_WORD);
ed9676cf 8032 addr = gen_rtx_MEM (Pmode, addr);
dcc9eb26 8033 set_mem_alias_set (addr, get_frame_alias_set ());
ed9676cf
AK
8034 emit_move_insn (return_reg, addr);
8035 }
9db1d521 8036 }
4023fb28 8037
adf39f8f
AK
8038 insn = restore_gprs (frame_pointer,
8039 offset + cfun_frame_layout.gprs_offset
8040 + (cfun_frame_layout.first_restore_gpr
fb3712f6 8041 - cfun_frame_layout.first_save_gpr_slot)
adf39f8f
AK
8042 * UNITS_PER_WORD,
8043 cfun_frame_layout.first_restore_gpr,
8044 cfun_frame_layout.last_restore_gpr);
c3cc6b78 8045 emit_insn (insn);
4023fb28 8046 }
9db1d521 8047
ed9676cf
AK
8048 if (! sibcall)
8049 {
c7453384 8050
ed9676cf 8051 /* Return to caller. */
38899e29 8052
ed9676cf 8053 p = rtvec_alloc (2);
38899e29 8054
ed9676cf
AK
8055 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
8056 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
8057 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
8058 }
9db1d521
HP
8059}
8060
9db1d521 8061
c7453384 8062/* Return the size in bytes of a function argument of
994fe660
UW
8063 type TYPE and/or mode MODE. At least one of TYPE or
8064 MODE must be specified. */
9db1d521
HP
8065
8066static int
586de218 8067s390_function_arg_size (enum machine_mode mode, const_tree type)
9db1d521
HP
8068{
8069 if (type)
8070 return int_size_in_bytes (type);
8071
d65f7478 8072 /* No type info available for some library calls ... */
9db1d521
HP
8073 if (mode != BLKmode)
8074 return GET_MODE_SIZE (mode);
8075
8076 /* If we have neither type nor mode, abort */
8d933e31 8077 gcc_unreachable ();
9db1d521
HP
8078}
8079
82b1c974
UW
8080/* Return true if a function argument of type TYPE and mode MODE
8081 is to be passed in a floating-point register, if available. */
8082
8083static bool
9c808aad 8084s390_function_arg_float (enum machine_mode mode, tree type)
82b1c974 8085{
8c17530e
UW
8086 int size = s390_function_arg_size (mode, type);
8087 if (size > 8)
8088 return false;
8089
82b1c974
UW
8090 /* Soft-float changes the ABI: no floating-point registers are used. */
8091 if (TARGET_SOFT_FLOAT)
8092 return false;
8093
8094 /* No type info available for some library calls ... */
8095 if (!type)
4dc19cc0 8096 return mode == SFmode || mode == DFmode || mode == SDmode || mode == DDmode;
82b1c974
UW
8097
8098 /* The ABI says that record types with a single member are treated
8099 just like that member would be. */
8100 while (TREE_CODE (type) == RECORD_TYPE)
8101 {
8102 tree field, single = NULL_TREE;
8103
8104 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
8105 {
8106 if (TREE_CODE (field) != FIELD_DECL)
8107 continue;
8108
8109 if (single == NULL_TREE)
8110 single = TREE_TYPE (field);
8111 else
8112 return false;
8113 }
8114
8115 if (single == NULL_TREE)
8116 return false;
8117 else
8118 type = single;
8119 }
8120
8121 return TREE_CODE (type) == REAL_TYPE;
8122}
8123
8c17530e
UW
8124/* Return true if a function argument of type TYPE and mode MODE
8125 is to be passed in an integer register, or a pair of integer
8126 registers, if available. */
8127
8128static bool
8129s390_function_arg_integer (enum machine_mode mode, tree type)
8130{
8131 int size = s390_function_arg_size (mode, type);
8132 if (size > 8)
8133 return false;
8134
8135 /* No type info available for some library calls ... */
8136 if (!type)
8137 return GET_MODE_CLASS (mode) == MODE_INT
4dc19cc0 8138 || (TARGET_SOFT_FLOAT && SCALAR_FLOAT_MODE_P (mode));
8c17530e
UW
8139
8140 /* We accept small integral (and similar) types. */
8141 if (INTEGRAL_TYPE_P (type)
38899e29 8142 || POINTER_TYPE_P (type)
8c17530e
UW
8143 || TREE_CODE (type) == OFFSET_TYPE
8144 || (TARGET_SOFT_FLOAT && TREE_CODE (type) == REAL_TYPE))
8145 return true;
8146
8147 /* We also accept structs of size 1, 2, 4, 8 that are not
38899e29 8148 passed in floating-point registers. */
8c17530e
UW
8149 if (AGGREGATE_TYPE_P (type)
8150 && exact_log2 (size) >= 0
8151 && !s390_function_arg_float (mode, type))
8152 return true;
8153
8154 return false;
8155}
8156
994fe660
UW
8157/* Return 1 if a function argument of type TYPE and mode MODE
8158 is to be passed by reference. The ABI specifies that only
8159 structures of size 1, 2, 4, or 8 bytes are passed by value,
8160 all other structures (and complex numbers) are passed by
8161 reference. */
8162
8cd5a4e0
RH
8163static bool
8164s390_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
586de218 8165 enum machine_mode mode, const_tree type,
8cd5a4e0 8166 bool named ATTRIBUTE_UNUSED)
9db1d521
HP
8167{
8168 int size = s390_function_arg_size (mode, type);
8c17530e
UW
8169 if (size > 8)
8170 return true;
9db1d521
HP
8171
8172 if (type)
8173 {
8c17530e 8174 if (AGGREGATE_TYPE_P (type) && exact_log2 (size) < 0)
9db1d521
HP
8175 return 1;
8176
8c17530e
UW
8177 if (TREE_CODE (type) == COMPLEX_TYPE
8178 || TREE_CODE (type) == VECTOR_TYPE)
9db1d521
HP
8179 return 1;
8180 }
c7453384 8181
9db1d521 8182 return 0;
9db1d521
HP
8183}
8184
8185/* Update the data in CUM to advance over an argument of mode MODE and
8186 data type TYPE. (TYPE is null for libcalls where that information
994fe660
UW
8187 may not be available.). The boolean NAMED specifies whether the
8188 argument is a named argument (as opposed to an unnamed argument
8189 matching an ellipsis). */
9db1d521
HP
8190
8191void
9c808aad
AJ
8192s390_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
8193 tree type, int named ATTRIBUTE_UNUSED)
9db1d521 8194{
8cd5a4e0 8195 if (s390_function_arg_float (mode, type))
9db1d521 8196 {
82b1c974 8197 cum->fprs += 1;
9db1d521 8198 }
8c17530e 8199 else if (s390_function_arg_integer (mode, type))
9db1d521
HP
8200 {
8201 int size = s390_function_arg_size (mode, type);
8202 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
8203 }
8c17530e 8204 else
8d933e31 8205 gcc_unreachable ();
9db1d521
HP
8206}
8207
994fe660
UW
8208/* Define where to put the arguments to a function.
8209 Value is zero to push the argument on the stack,
8210 or a hard register in which to store the argument.
8211
8212 MODE is the argument's machine mode.
8213 TYPE is the data type of the argument (as a tree).
8214 This is null for libcalls where that information may
8215 not be available.
8216 CUM is a variable of type CUMULATIVE_ARGS which gives info about
8217 the preceding args and about the function being called.
8218 NAMED is nonzero if this argument is a named parameter
c7453384 8219 (otherwise it is an extra parameter matching an ellipsis).
994fe660
UW
8220
8221 On S/390, we use general purpose registers 2 through 6 to
8222 pass integer, pointer, and certain structure arguments, and
8223 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
8224 to pass floating point arguments. All remaining arguments
8225 are pushed to the stack. */
9db1d521
HP
8226
8227rtx
9c808aad
AJ
8228s390_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
8229 int named ATTRIBUTE_UNUSED)
9db1d521 8230{
82b1c974 8231 if (s390_function_arg_float (mode, type))
9db1d521 8232 {
29a79fcf 8233 if (cum->fprs + 1 > FP_ARG_NUM_REG)
9db1d521
HP
8234 return 0;
8235 else
f1c25d3b 8236 return gen_rtx_REG (mode, cum->fprs + 16);
9db1d521 8237 }
8c17530e 8238 else if (s390_function_arg_integer (mode, type))
9db1d521
HP
8239 {
8240 int size = s390_function_arg_size (mode, type);
8241 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
8242
29a79fcf 8243 if (cum->gprs + n_gprs > GP_ARG_NUM_REG)
9db1d521
HP
8244 return 0;
8245 else
f1c25d3b 8246 return gen_rtx_REG (mode, cum->gprs + 2);
9db1d521 8247 }
8c17530e
UW
8248
8249 /* After the real arguments, expand_call calls us once again
8250 with a void_type_node type. Whatever we return here is
8251 passed as operand 2 to the call expanders.
8252
8253 We don't need this feature ... */
8254 else if (type == void_type_node)
8255 return const0_rtx;
8256
8d933e31 8257 gcc_unreachable ();
8c17530e
UW
8258}
8259
8260/* Return true if return values of type TYPE should be returned
8261 in a memory buffer whose address is passed by the caller as
8262 hidden first argument. */
8263
8264static bool
586de218 8265s390_return_in_memory (const_tree type, const_tree fundecl ATTRIBUTE_UNUSED)
8c17530e
UW
8266{
8267 /* We accept small integral (and similar) types. */
8268 if (INTEGRAL_TYPE_P (type)
38899e29 8269 || POINTER_TYPE_P (type)
8c17530e
UW
8270 || TREE_CODE (type) == OFFSET_TYPE
8271 || TREE_CODE (type) == REAL_TYPE)
8272 return int_size_in_bytes (type) > 8;
8273
8274 /* Aggregates and similar constructs are always returned
8275 in memory. */
8276 if (AGGREGATE_TYPE_P (type)
8277 || TREE_CODE (type) == COMPLEX_TYPE
8278 || TREE_CODE (type) == VECTOR_TYPE)
8279 return true;
8280
8281 /* ??? We get called on all sorts of random stuff from
8282 aggregate_value_p. We can't abort, but it's not clear
8283 what's safe to return. Pretend it's a struct I guess. */
8284 return true;
8285}
8286
8287/* Define where to return a (scalar) value of type TYPE.
8288 If TYPE is null, define where to return a (scalar)
8289 value of mode MODE from a libcall. */
8290
8291rtx
586de218 8292s390_function_value (const_tree type, enum machine_mode mode)
8c17530e
UW
8293{
8294 if (type)
8295 {
8df83eae 8296 int unsignedp = TYPE_UNSIGNED (type);
8c17530e
UW
8297 mode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
8298 }
8299
4dc19cc0 8300 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT || SCALAR_FLOAT_MODE_P (mode));
8d933e31 8301 gcc_assert (GET_MODE_SIZE (mode) <= 8);
8c17530e 8302
4dc19cc0 8303 if (TARGET_HARD_FLOAT && SCALAR_FLOAT_MODE_P (mode))
8c17530e
UW
8304 return gen_rtx_REG (mode, 16);
8305 else
8306 return gen_rtx_REG (mode, 2);
9db1d521
HP
8307}
8308
8309
994fe660
UW
8310/* Create and return the va_list datatype.
8311
8312 On S/390, va_list is an array type equivalent to
8313
8314 typedef struct __va_list_tag
8315 {
8316 long __gpr;
8317 long __fpr;
8318 void *__overflow_arg_area;
8319 void *__reg_save_area;
994fe660
UW
8320 } va_list[1];
8321
8322 where __gpr and __fpr hold the number of general purpose
8323 or floating point arguments used up to now, respectively,
c7453384 8324 __overflow_arg_area points to the stack location of the
994fe660
UW
8325 next argument passed on the stack, and __reg_save_area
8326 always points to the start of the register area in the
8327 call frame of the current function. The function prologue
8328 saves all registers used for argument passing into this
8329 area if the function uses variable arguments. */
9db1d521 8330
c35d187f
RH
8331static tree
8332s390_build_builtin_va_list (void)
9db1d521
HP
8333{
8334 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
8335
47798692 8336 record = lang_hooks.types.make_type (RECORD_TYPE);
9db1d521
HP
8337
8338 type_decl =
8339 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
8340
c7453384 8341 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
9db1d521 8342 long_integer_type_node);
c7453384 8343 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
9db1d521
HP
8344 long_integer_type_node);
8345 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
8346 ptr_type_node);
8347 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
8348 ptr_type_node);
8349
29a79fcf
UW
8350 va_list_gpr_counter_field = f_gpr;
8351 va_list_fpr_counter_field = f_fpr;
8352
9db1d521
HP
8353 DECL_FIELD_CONTEXT (f_gpr) = record;
8354 DECL_FIELD_CONTEXT (f_fpr) = record;
8355 DECL_FIELD_CONTEXT (f_ovf) = record;
8356 DECL_FIELD_CONTEXT (f_sav) = record;
8357
8358 TREE_CHAIN (record) = type_decl;
8359 TYPE_NAME (record) = type_decl;
8360 TYPE_FIELDS (record) = f_gpr;
8361 TREE_CHAIN (f_gpr) = f_fpr;
8362 TREE_CHAIN (f_fpr) = f_ovf;
8363 TREE_CHAIN (f_ovf) = f_sav;
8364
8365 layout_type (record);
8366
8367 /* The correct type is an array type of one element. */
8368 return build_array_type (record, build_index_type (size_zero_node));
8369}
8370
994fe660 8371/* Implement va_start by filling the va_list structure VALIST.
6c535c69
ZW
8372 STDARG_P is always true, and ignored.
8373 NEXTARG points to the first anonymous stack argument.
994fe660 8374
f710504c 8375 The following global variables are used to initialize
994fe660
UW
8376 the va_list structure:
8377
38173d38 8378 crtl->args.info:
994fe660 8379 holds number of gprs and fprs used for named arguments.
38173d38 8380 crtl->args.arg_offset_rtx:
994fe660
UW
8381 holds the offset of the first anonymous stack argument
8382 (relative to the virtual arg pointer). */
9db1d521 8383
d7bd8aeb 8384static void
9c808aad 8385s390_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
9db1d521
HP
8386{
8387 HOST_WIDE_INT n_gpr, n_fpr;
8388 int off;
8389 tree f_gpr, f_fpr, f_ovf, f_sav;
8390 tree gpr, fpr, ovf, sav, t;
8391
8392 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
8393 f_fpr = TREE_CHAIN (f_gpr);
8394 f_ovf = TREE_CHAIN (f_fpr);
8395 f_sav = TREE_CHAIN (f_ovf);
8396
967af719 8397 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
8398 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
8399 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
8400 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
8401 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
9db1d521
HP
8402
8403 /* Count number of gp and fp argument registers used. */
8404
38173d38
JH
8405 n_gpr = crtl->args.info.gprs;
8406 n_fpr = crtl->args.info.fprs;
9db1d521 8407
29a79fcf
UW
8408 if (cfun->va_list_gpr_size)
8409 {
07beea0d 8410 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gpr), gpr,
47a25a46 8411 build_int_cst (NULL_TREE, n_gpr));
29a79fcf
UW
8412 TREE_SIDE_EFFECTS (t) = 1;
8413 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8414 }
9db1d521 8415
29a79fcf
UW
8416 if (cfun->va_list_fpr_size)
8417 {
07beea0d 8418 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (fpr), fpr,
47a25a46 8419 build_int_cst (NULL_TREE, n_fpr));
29a79fcf
UW
8420 TREE_SIDE_EFFECTS (t) = 1;
8421 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8422 }
9db1d521
HP
8423
8424 /* Find the overflow area. */
29a79fcf
UW
8425 if (n_gpr + cfun->va_list_gpr_size > GP_ARG_NUM_REG
8426 || n_fpr + cfun->va_list_fpr_size > FP_ARG_NUM_REG)
8427 {
8428 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
9db1d521 8429
38173d38 8430 off = INTVAL (crtl->args.arg_offset_rtx);
29a79fcf
UW
8431 off = off < 0 ? 0 : off;
8432 if (TARGET_DEBUG_ARG)
8433 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
8434 (int)n_gpr, (int)n_fpr, off);
9db1d521 8435
5be014d5 8436 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t, size_int (off));
9db1d521 8437
07beea0d 8438 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
29a79fcf
UW
8439 TREE_SIDE_EFFECTS (t) = 1;
8440 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8441 }
9db1d521
HP
8442
8443 /* Find the register save area. */
29a79fcf
UW
8444 if ((cfun->va_list_gpr_size && n_gpr < GP_ARG_NUM_REG)
8445 || (cfun->va_list_fpr_size && n_fpr < FP_ARG_NUM_REG))
8446 {
8447 t = make_tree (TREE_TYPE (sav), return_address_pointer_rtx);
5be014d5
AP
8448 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
8449 size_int (-RETURN_REGNUM * UNITS_PER_WORD));
6b78f6be 8450
07beea0d 8451 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (sav), sav, t);
29a79fcf
UW
8452 TREE_SIDE_EFFECTS (t) = 1;
8453 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8454 }
9db1d521
HP
8455}
8456
c7453384 8457/* Implement va_arg by updating the va_list structure
994fe660 8458 VALIST as required to retrieve an argument of type
c7453384
EC
8459 TYPE, and returning that argument.
8460
994fe660 8461 Generates code equivalent to:
c7453384 8462
9db1d521
HP
8463 if (integral value) {
8464 if (size <= 4 && args.gpr < 5 ||
c7453384 8465 size > 4 && args.gpr < 4 )
9db1d521
HP
8466 ret = args.reg_save_area[args.gpr+8]
8467 else
8468 ret = *args.overflow_arg_area++;
8469 } else if (float value) {
8470 if (args.fgpr < 2)
8471 ret = args.reg_save_area[args.fpr+64]
8472 else
8473 ret = *args.overflow_arg_area++;
8474 } else if (aggregate value) {
8475 if (args.gpr < 5)
8476 ret = *args.reg_save_area[args.gpr]
8477 else
8478 ret = **args.overflow_arg_area++;
8479 } */
8480
ab96de7e 8481static tree
63694b5e
UW
8482s390_gimplify_va_arg (tree valist, tree type, tree *pre_p,
8483 tree *post_p ATTRIBUTE_UNUSED)
9db1d521
HP
8484{
8485 tree f_gpr, f_fpr, f_ovf, f_sav;
8486 tree gpr, fpr, ovf, sav, reg, t, u;
8487 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
63694b5e 8488 tree lab_false, lab_over, addr;
9db1d521
HP
8489
8490 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
8491 f_fpr = TREE_CHAIN (f_gpr);
8492 f_ovf = TREE_CHAIN (f_fpr);
8493 f_sav = TREE_CHAIN (f_ovf);
8494
967af719 8495 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
8496 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
8497 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
8498 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
8499 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
9db1d521
HP
8500
8501 size = int_size_in_bytes (type);
8502
8cd5a4e0 8503 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
9db1d521
HP
8504 {
8505 if (TARGET_DEBUG_ARG)
8506 {
8507 fprintf (stderr, "va_arg: aggregate type");
8508 debug_tree (type);
8509 }
8510
8511 /* Aggregates are passed by reference. */
8512 indirect_p = 1;
8513 reg = gpr;
8514 n_reg = 1;
ea506297 8515
b3d31392 8516 /* kernel stack layout on 31 bit: It is assumed here that no padding
ea506297
AK
8517 will be added by s390_frame_info because for va_args always an even
8518 number of gprs has to be saved r15-r2 = 14 regs. */
6b78f6be 8519 sav_ofs = 2 * UNITS_PER_WORD;
9db1d521
HP
8520 sav_scale = UNITS_PER_WORD;
8521 size = UNITS_PER_WORD;
29a79fcf 8522 max_reg = GP_ARG_NUM_REG - n_reg;
9db1d521 8523 }
82b1c974 8524 else if (s390_function_arg_float (TYPE_MODE (type), type))
9db1d521
HP
8525 {
8526 if (TARGET_DEBUG_ARG)
8527 {
8528 fprintf (stderr, "va_arg: float type");
8529 debug_tree (type);
8530 }
8531
8532 /* FP args go in FP registers, if present. */
8533 indirect_p = 0;
8534 reg = fpr;
8535 n_reg = 1;
6b78f6be 8536 sav_ofs = 16 * UNITS_PER_WORD;
9db1d521 8537 sav_scale = 8;
29a79fcf 8538 max_reg = FP_ARG_NUM_REG - n_reg;
9db1d521
HP
8539 }
8540 else
8541 {
8542 if (TARGET_DEBUG_ARG)
8543 {
8544 fprintf (stderr, "va_arg: other type");
8545 debug_tree (type);
8546 }
8547
8548 /* Otherwise into GP registers. */
8549 indirect_p = 0;
8550 reg = gpr;
8551 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
ea506297 8552
b3d31392
AK
8553 /* kernel stack layout on 31 bit: It is assumed here that no padding
8554 will be added by s390_frame_info because for va_args always an even
8555 number of gprs has to be saved r15-r2 = 14 regs. */
6b78f6be 8556 sav_ofs = 2 * UNITS_PER_WORD;
c7453384 8557
c873e11f
UW
8558 if (size < UNITS_PER_WORD)
8559 sav_ofs += UNITS_PER_WORD - size;
9db1d521
HP
8560
8561 sav_scale = UNITS_PER_WORD;
29a79fcf 8562 max_reg = GP_ARG_NUM_REG - n_reg;
9db1d521
HP
8563 }
8564
8565 /* Pull the value out of the saved registers ... */
8566
63694b5e
UW
8567 lab_false = create_artificial_label ();
8568 lab_over = create_artificial_label ();
8569 addr = create_tmp_var (ptr_type_node, "addr");
dcc9eb26 8570 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
9db1d521 8571
6c6dd4bd 8572 t = fold_convert (TREE_TYPE (reg), size_int (max_reg));
63694b5e
UW
8573 t = build2 (GT_EXPR, boolean_type_node, reg, t);
8574 u = build1 (GOTO_EXPR, void_type_node, lab_false);
8575 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
8576 gimplify_and_add (t, pre_p);
9db1d521 8577
5be014d5
AP
8578 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav,
8579 size_int (sav_ofs));
6c6dd4bd
UW
8580 u = build2 (MULT_EXPR, TREE_TYPE (reg), reg,
8581 fold_convert (TREE_TYPE (reg), size_int (sav_scale)));
5be014d5 8582 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, fold_convert (sizetype, u));
9db1d521 8583
07beea0d 8584 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
63694b5e 8585 gimplify_and_add (t, pre_p);
9db1d521 8586
63694b5e
UW
8587 t = build1 (GOTO_EXPR, void_type_node, lab_over);
8588 gimplify_and_add (t, pre_p);
9db1d521 8589
63694b5e
UW
8590 t = build1 (LABEL_EXPR, void_type_node, lab_false);
8591 append_to_statement_list (t, pre_p);
9db1d521 8592
9db1d521
HP
8593
8594 /* ... Otherwise out of the overflow area. */
8595
ab96de7e
AS
8596 t = ovf;
8597 if (size < UNITS_PER_WORD)
5be014d5
AP
8598 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
8599 size_int (UNITS_PER_WORD - size));
ab96de7e
AS
8600
8601 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
8602
07beea0d 8603 u = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
ab96de7e
AS
8604 gimplify_and_add (u, pre_p);
8605
5be014d5
AP
8606 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
8607 size_int (size));
07beea0d 8608 t = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, ovf, t);
ab96de7e
AS
8609 gimplify_and_add (t, pre_p);
8610
8611 t = build1 (LABEL_EXPR, void_type_node, lab_over);
8612 append_to_statement_list (t, pre_p);
8613
8614
8615 /* Increment register save count. */
8616
8617 u = build2 (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
8618 fold_convert (TREE_TYPE (reg), size_int (n_reg)));
8619 gimplify_and_add (u, pre_p);
8620
8621 if (indirect_p)
8622 {
8623 t = build_pointer_type (build_pointer_type (type));
8624 addr = fold_convert (t, addr);
8625 addr = build_va_arg_indirect_ref (addr);
8626 }
8627 else
8628 {
8629 t = build_pointer_type (type);
8630 addr = fold_convert (t, addr);
8631 }
8632
8633 return build_va_arg_indirect_ref (addr);
8634}
8635
8636
8637/* Builtins. */
8638
8639enum s390_builtin
8640{
8641 S390_BUILTIN_THREAD_POINTER,
8642 S390_BUILTIN_SET_THREAD_POINTER,
8643
8644 S390_BUILTIN_max
8645};
8646
8647static unsigned int const code_for_builtin_64[S390_BUILTIN_max] = {
8648 CODE_FOR_get_tp_64,
8649 CODE_FOR_set_tp_64
8650};
8651
8652static unsigned int const code_for_builtin_31[S390_BUILTIN_max] = {
8653 CODE_FOR_get_tp_31,
8654 CODE_FOR_set_tp_31
8655};
8656
8657static void
8658s390_init_builtins (void)
8659{
8660 tree ftype;
8661
8662 ftype = build_function_type (ptr_type_node, void_list_node);
c79efc4d
RÁE
8663 add_builtin_function ("__builtin_thread_pointer", ftype,
8664 S390_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
8665 NULL, NULL_TREE);
ab96de7e
AS
8666
8667 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
c79efc4d
RÁE
8668 add_builtin_function ("__builtin_set_thread_pointer", ftype,
8669 S390_BUILTIN_SET_THREAD_POINTER, BUILT_IN_MD,
8670 NULL, NULL_TREE);
ab96de7e
AS
8671}
8672
8673/* Expand an expression EXP that calls a built-in function,
8674 with result going to TARGET if that's convenient
8675 (and in mode MODE if that's convenient).
8676 SUBTARGET may be used as the target for computing one of EXP's operands.
8677 IGNORE is nonzero if the value is to be ignored. */
8678
8679static rtx
8680s390_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
8681 enum machine_mode mode ATTRIBUTE_UNUSED,
8682 int ignore ATTRIBUTE_UNUSED)
8683{
8684#define MAX_ARGS 2
8685
8686 unsigned int const *code_for_builtin =
8687 TARGET_64BIT ? code_for_builtin_64 : code_for_builtin_31;
8688
5039610b 8689 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
ab96de7e 8690 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
ab96de7e
AS
8691 enum insn_code icode;
8692 rtx op[MAX_ARGS], pat;
8693 int arity;
8694 bool nonvoid;
5039610b
SL
8695 tree arg;
8696 call_expr_arg_iterator iter;
ab96de7e
AS
8697
8698 if (fcode >= S390_BUILTIN_max)
8699 internal_error ("bad builtin fcode");
8700 icode = code_for_builtin[fcode];
8701 if (icode == 0)
8702 internal_error ("bad builtin fcode");
8703
8704 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
8705
5039610b
SL
8706 arity = 0;
8707 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
ab96de7e
AS
8708 {
8709 const struct insn_operand_data *insn_op;
8710
ab96de7e
AS
8711 if (arg == error_mark_node)
8712 return NULL_RTX;
8713 if (arity > MAX_ARGS)
8714 return NULL_RTX;
8715
8716 insn_op = &insn_data[icode].operand[arity + nonvoid];
8717
8718 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, 0);
8719
8720 if (!(*insn_op->predicate) (op[arity], insn_op->mode))
8721 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
5039610b 8722 arity++;
ab96de7e
AS
8723 }
8724
8725 if (nonvoid)
8726 {
8727 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8728 if (!target
8729 || GET_MODE (target) != tmode
8730 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
8731 target = gen_reg_rtx (tmode);
8732 }
8733
8734 switch (arity)
8735 {
8736 case 0:
8737 pat = GEN_FCN (icode) (target);
8738 break;
8739 case 1:
8740 if (nonvoid)
8741 pat = GEN_FCN (icode) (target, op[0]);
8742 else
8743 pat = GEN_FCN (icode) (op[0]);
8744 break;
8745 case 2:
8746 pat = GEN_FCN (icode) (target, op[0], op[1]);
8747 break;
8748 default:
8d933e31 8749 gcc_unreachable ();
ab96de7e
AS
8750 }
8751 if (!pat)
8752 return NULL_RTX;
8753 emit_insn (pat);
8754
8755 if (nonvoid)
8756 return target;
8757 else
8758 return const0_rtx;
8759}
8760
8761
8762/* Output assembly code for the trampoline template to
8763 stdio stream FILE.
8764
8765 On S/390, we use gpr 1 internally in the trampoline code;
8766 gpr 0 is used to hold the static chain. */
8767
8768void
8769s390_trampoline_template (FILE *file)
8770{
8771 rtx op[2];
8772 op[0] = gen_rtx_REG (Pmode, 0);
8773 op[1] = gen_rtx_REG (Pmode, 1);
8774
8775 if (TARGET_64BIT)
8776 {
8777 output_asm_insn ("basr\t%1,0", op);
8778 output_asm_insn ("lmg\t%0,%1,14(%1)", op);
8779 output_asm_insn ("br\t%1", op);
8780 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 10));
8781 }
8782 else
8783 {
8784 output_asm_insn ("basr\t%1,0", op);
8785 output_asm_insn ("lm\t%0,%1,6(%1)", op);
8786 output_asm_insn ("br\t%1", op);
8787 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 8));
8788 }
8789}
8790
8791/* Emit RTL insns to initialize the variable parts of a trampoline.
8792 FNADDR is an RTX for the address of the function's pure code.
8793 CXT is an RTX for the static chain value for the function. */
8794
8795void
8796s390_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
8797{
8798 emit_move_insn (gen_rtx_MEM (Pmode,
8799 memory_address (Pmode,
8800 plus_constant (addr, (TARGET_64BIT ? 16 : 8)))), cxt);
8801 emit_move_insn (gen_rtx_MEM (Pmode,
8802 memory_address (Pmode,
8803 plus_constant (addr, (TARGET_64BIT ? 24 : 12)))), fnaddr);
8804}
8805
ab96de7e
AS
8806/* Output assembler code to FILE to increment profiler label # LABELNO
8807 for profiling a function entry. */
8808
8809void
8810s390_function_profiler (FILE *file, int labelno)
8811{
8812 rtx op[7];
8813
8814 char label[128];
8815 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
8816
8817 fprintf (file, "# function profiler \n");
8818
8819 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
8820 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8821 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
8822
8823 op[2] = gen_rtx_REG (Pmode, 1);
8824 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
8825 SYMBOL_REF_FLAGS (op[3]) = SYMBOL_FLAG_LOCAL;
8826
8827 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
8828 if (flag_pic)
8829 {
8830 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), UNSPEC_PLT);
8831 op[4] = gen_rtx_CONST (Pmode, op[4]);
8832 }
8833
8834 if (TARGET_64BIT)
8835 {
8836 output_asm_insn ("stg\t%0,%1", op);
8837 output_asm_insn ("larl\t%2,%3", op);
8838 output_asm_insn ("brasl\t%0,%4", op);
8839 output_asm_insn ("lg\t%0,%1", op);
8840 }
8841 else if (!flag_pic)
8842 {
8843 op[6] = gen_label_rtx ();
8844
8845 output_asm_insn ("st\t%0,%1", op);
8846 output_asm_insn ("bras\t%2,%l6", op);
8847 output_asm_insn (".long\t%4", op);
8848 output_asm_insn (".long\t%3", op);
8849 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
8850 output_asm_insn ("l\t%0,0(%2)", op);
8851 output_asm_insn ("l\t%2,4(%2)", op);
8852 output_asm_insn ("basr\t%0,%0", op);
8853 output_asm_insn ("l\t%0,%1", op);
8854 }
8855 else
8856 {
8857 op[5] = gen_label_rtx ();
8858 op[6] = gen_label_rtx ();
8859
8860 output_asm_insn ("st\t%0,%1", op);
8861 output_asm_insn ("bras\t%2,%l6", op);
8862 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[5]));
8863 output_asm_insn (".long\t%4-%l5", op);
8864 output_asm_insn (".long\t%3-%l5", op);
8865 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
8866 output_asm_insn ("lr\t%0,%2", op);
8867 output_asm_insn ("a\t%0,0(%2)", op);
8868 output_asm_insn ("a\t%2,4(%2)", op);
8869 output_asm_insn ("basr\t%0,%0", op);
8870 output_asm_insn ("l\t%0,%1", op);
8871 }
8872}
8873
8874/* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
8875 into its SYMBOL_REF_FLAGS. */
8876
8877static void
8878s390_encode_section_info (tree decl, rtx rtl, int first)
8879{
8880 default_encode_section_info (decl, rtl, first);
8881
963fc8d0
AK
8882 if (TREE_CODE (decl) == VAR_DECL)
8883 {
8884 /* If a variable has a forced alignment to < 2 bytes, mark it
8885 with SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL
8886 operand. */
8887 if (DECL_USER_ALIGN (decl) && DECL_ALIGN (decl) < 16)
8888 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_ALIGN1;
8889 if (!DECL_SIZE (decl)
8890 || !DECL_ALIGN (decl)
8891 || !host_integerp (DECL_SIZE (decl), 0)
8892 || (DECL_ALIGN (decl) <= 64
8893 && DECL_ALIGN (decl) != tree_low_cst (DECL_SIZE (decl), 0)))
8894 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_NOT_NATURALLY_ALIGNED;
8895 }
8896
8897 /* Literal pool references don't have a decl so they are handled
8898 differently here. We rely on the information in the MEM_ALIGN
8899 entry to decide upon natural alignment. */
8900 if (MEM_P (rtl)
8901 && GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF
8902 && TREE_CONSTANT_POOL_ADDRESS_P (XEXP (rtl, 0))
8903 && (MEM_ALIGN (rtl) == 0
8904 || MEM_ALIGN (rtl) < GET_MODE_BITSIZE (GET_MODE (rtl))))
8905 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_NOT_NATURALLY_ALIGNED;
ab96de7e
AS
8906}
8907
8908/* Output thunk to FILE that implements a C++ virtual function call (with
8909 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
8910 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
8911 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
8912 relative to the resulting this pointer. */
8913
8914static void
8915s390_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
8916 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
8917 tree function)
8918{
8919 rtx op[10];
8920 int nonlocal = 0;
8921
8922 /* Operand 0 is the target function. */
8923 op[0] = XEXP (DECL_RTL (function), 0);
8924 if (flag_pic && !SYMBOL_REF_LOCAL_P (op[0]))
8925 {
8926 nonlocal = 1;
8927 op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]),
8928 TARGET_64BIT ? UNSPEC_PLT : UNSPEC_GOT);
8929 op[0] = gen_rtx_CONST (Pmode, op[0]);
8930 }
8931
8932 /* Operand 1 is the 'this' pointer. */
8933 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
8934 op[1] = gen_rtx_REG (Pmode, 3);
8935 else
8936 op[1] = gen_rtx_REG (Pmode, 2);
8937
8938 /* Operand 2 is the delta. */
8939 op[2] = GEN_INT (delta);
8940
8941 /* Operand 3 is the vcall_offset. */
8942 op[3] = GEN_INT (vcall_offset);
8943
8944 /* Operand 4 is the temporary register. */
8945 op[4] = gen_rtx_REG (Pmode, 1);
8946
8947 /* Operands 5 to 8 can be used as labels. */
8948 op[5] = NULL_RTX;
8949 op[6] = NULL_RTX;
8950 op[7] = NULL_RTX;
8951 op[8] = NULL_RTX;
8952
8953 /* Operand 9 can be used for temporary register. */
8954 op[9] = NULL_RTX;
8955
8956 /* Generate code. */
8957 if (TARGET_64BIT)
8958 {
8959 /* Setup literal pool pointer if required. */
8960 if ((!DISP_IN_RANGE (delta)
ec24698e
UW
8961 && !CONST_OK_FOR_K (delta)
8962 && !CONST_OK_FOR_Os (delta))
ab96de7e 8963 || (!DISP_IN_RANGE (vcall_offset)
ec24698e
UW
8964 && !CONST_OK_FOR_K (vcall_offset)
8965 && !CONST_OK_FOR_Os (vcall_offset)))
ab96de7e
AS
8966 {
8967 op[5] = gen_label_rtx ();
8968 output_asm_insn ("larl\t%4,%5", op);
8969 }
8970
8971 /* Add DELTA to this pointer. */
8972 if (delta)
8973 {
b5c67a49 8974 if (CONST_OK_FOR_J (delta))
ab96de7e
AS
8975 output_asm_insn ("la\t%1,%2(%1)", op);
8976 else if (DISP_IN_RANGE (delta))
8977 output_asm_insn ("lay\t%1,%2(%1)", op);
b5c67a49 8978 else if (CONST_OK_FOR_K (delta))
ab96de7e 8979 output_asm_insn ("aghi\t%1,%2", op);
ec24698e
UW
8980 else if (CONST_OK_FOR_Os (delta))
8981 output_asm_insn ("agfi\t%1,%2", op);
ab96de7e
AS
8982 else
8983 {
8984 op[6] = gen_label_rtx ();
8985 output_asm_insn ("agf\t%1,%6-%5(%4)", op);
8986 }
8987 }
8988
8989 /* Perform vcall adjustment. */
8990 if (vcall_offset)
8991 {
8992 if (DISP_IN_RANGE (vcall_offset))
8993 {
8994 output_asm_insn ("lg\t%4,0(%1)", op);
8995 output_asm_insn ("ag\t%1,%3(%4)", op);
8996 }
b5c67a49 8997 else if (CONST_OK_FOR_K (vcall_offset))
ab96de7e
AS
8998 {
8999 output_asm_insn ("lghi\t%4,%3", op);
9000 output_asm_insn ("ag\t%4,0(%1)", op);
9001 output_asm_insn ("ag\t%1,0(%4)", op);
9002 }
ec24698e
UW
9003 else if (CONST_OK_FOR_Os (vcall_offset))
9004 {
9005 output_asm_insn ("lgfi\t%4,%3", op);
9006 output_asm_insn ("ag\t%4,0(%1)", op);
9007 output_asm_insn ("ag\t%1,0(%4)", op);
9008 }
ab96de7e
AS
9009 else
9010 {
9011 op[7] = gen_label_rtx ();
9012 output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
9013 output_asm_insn ("ag\t%4,0(%1)", op);
9014 output_asm_insn ("ag\t%1,0(%4)", op);
9015 }
9016 }
9017
9018 /* Jump to target. */
9019 output_asm_insn ("jg\t%0", op);
9020
9021 /* Output literal pool if required. */
9022 if (op[5])
9023 {
9024 output_asm_insn (".align\t4", op);
9025 targetm.asm_out.internal_label (file, "L",
9026 CODE_LABEL_NUMBER (op[5]));
9027 }
9028 if (op[6])
9029 {
9030 targetm.asm_out.internal_label (file, "L",
9031 CODE_LABEL_NUMBER (op[6]));
9032 output_asm_insn (".long\t%2", op);
9033 }
9034 if (op[7])
9035 {
9036 targetm.asm_out.internal_label (file, "L",
9037 CODE_LABEL_NUMBER (op[7]));
9038 output_asm_insn (".long\t%3", op);
9039 }
9040 }
9041 else
9042 {
9043 /* Setup base pointer if required. */
9044 if (!vcall_offset
9045 || (!DISP_IN_RANGE (delta)
ec24698e
UW
9046 && !CONST_OK_FOR_K (delta)
9047 && !CONST_OK_FOR_Os (delta))
ab96de7e 9048 || (!DISP_IN_RANGE (delta)
ec24698e
UW
9049 && !CONST_OK_FOR_K (vcall_offset)
9050 && !CONST_OK_FOR_Os (vcall_offset)))
ab96de7e
AS
9051 {
9052 op[5] = gen_label_rtx ();
9053 output_asm_insn ("basr\t%4,0", op);
9054 targetm.asm_out.internal_label (file, "L",
9055 CODE_LABEL_NUMBER (op[5]));
9056 }
9057
9058 /* Add DELTA to this pointer. */
9059 if (delta)
9060 {
b5c67a49 9061 if (CONST_OK_FOR_J (delta))
ab96de7e
AS
9062 output_asm_insn ("la\t%1,%2(%1)", op);
9063 else if (DISP_IN_RANGE (delta))
9064 output_asm_insn ("lay\t%1,%2(%1)", op);
b5c67a49 9065 else if (CONST_OK_FOR_K (delta))
ab96de7e 9066 output_asm_insn ("ahi\t%1,%2", op);
ec24698e
UW
9067 else if (CONST_OK_FOR_Os (delta))
9068 output_asm_insn ("afi\t%1,%2", op);
ab96de7e
AS
9069 else
9070 {
9071 op[6] = gen_label_rtx ();
9072 output_asm_insn ("a\t%1,%6-%5(%4)", op);
9073 }
9074 }
9075
9076 /* Perform vcall adjustment. */
9077 if (vcall_offset)
9078 {
b5c67a49 9079 if (CONST_OK_FOR_J (vcall_offset))
ab96de7e 9080 {
c4d50129 9081 output_asm_insn ("l\t%4,0(%1)", op);
ab96de7e
AS
9082 output_asm_insn ("a\t%1,%3(%4)", op);
9083 }
9084 else if (DISP_IN_RANGE (vcall_offset))
9085 {
c4d50129 9086 output_asm_insn ("l\t%4,0(%1)", op);
ab96de7e
AS
9087 output_asm_insn ("ay\t%1,%3(%4)", op);
9088 }
b5c67a49 9089 else if (CONST_OK_FOR_K (vcall_offset))
ab96de7e
AS
9090 {
9091 output_asm_insn ("lhi\t%4,%3", op);
9092 output_asm_insn ("a\t%4,0(%1)", op);
9093 output_asm_insn ("a\t%1,0(%4)", op);
9094 }
ec24698e
UW
9095 else if (CONST_OK_FOR_Os (vcall_offset))
9096 {
9097 output_asm_insn ("iilf\t%4,%3", op);
9098 output_asm_insn ("a\t%4,0(%1)", op);
9099 output_asm_insn ("a\t%1,0(%4)", op);
9100 }
ab96de7e
AS
9101 else
9102 {
9103 op[7] = gen_label_rtx ();
9104 output_asm_insn ("l\t%4,%7-%5(%4)", op);
9105 output_asm_insn ("a\t%4,0(%1)", op);
9106 output_asm_insn ("a\t%1,0(%4)", op);
9107 }
9db1d521 9108
ab96de7e
AS
9109 /* We had to clobber the base pointer register.
9110 Re-setup the base pointer (with a different base). */
9111 op[5] = gen_label_rtx ();
9112 output_asm_insn ("basr\t%4,0", op);
9113 targetm.asm_out.internal_label (file, "L",
9114 CODE_LABEL_NUMBER (op[5]));
9115 }
9db1d521 9116
ab96de7e
AS
9117 /* Jump to target. */
9118 op[8] = gen_label_rtx ();
9db1d521 9119
ab96de7e
AS
9120 if (!flag_pic)
9121 output_asm_insn ("l\t%4,%8-%5(%4)", op);
9122 else if (!nonlocal)
9123 output_asm_insn ("a\t%4,%8-%5(%4)", op);
9124 /* We cannot call through .plt, since .plt requires %r12 loaded. */
9125 else if (flag_pic == 1)
9126 {
9127 output_asm_insn ("a\t%4,%8-%5(%4)", op);
9128 output_asm_insn ("l\t%4,%0(%4)", op);
9129 }
9130 else if (flag_pic == 2)
9131 {
9132 op[9] = gen_rtx_REG (Pmode, 0);
9133 output_asm_insn ("l\t%9,%8-4-%5(%4)", op);
9134 output_asm_insn ("a\t%4,%8-%5(%4)", op);
9135 output_asm_insn ("ar\t%4,%9", op);
9136 output_asm_insn ("l\t%4,0(%4)", op);
9137 }
9db1d521 9138
ab96de7e 9139 output_asm_insn ("br\t%4", op);
9db1d521 9140
ab96de7e
AS
9141 /* Output literal pool. */
9142 output_asm_insn (".align\t4", op);
9db1d521 9143
ab96de7e
AS
9144 if (nonlocal && flag_pic == 2)
9145 output_asm_insn (".long\t%0", op);
9146 if (nonlocal)
9147 {
9148 op[0] = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
9149 SYMBOL_REF_FLAGS (op[0]) = SYMBOL_FLAG_LOCAL;
9150 }
63694b5e 9151
ab96de7e
AS
9152 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[8]));
9153 if (!flag_pic)
9154 output_asm_insn (".long\t%0", op);
9155 else
9156 output_asm_insn (".long\t%0-%5", op);
9db1d521 9157
ab96de7e
AS
9158 if (op[6])
9159 {
9160 targetm.asm_out.internal_label (file, "L",
9161 CODE_LABEL_NUMBER (op[6]));
9162 output_asm_insn (".long\t%2", op);
9163 }
9164 if (op[7])
9165 {
9166 targetm.asm_out.internal_label (file, "L",
9167 CODE_LABEL_NUMBER (op[7]));
9168 output_asm_insn (".long\t%3", op);
9169 }
9db1d521 9170 }
9db1d521
HP
9171}
9172
ab96de7e
AS
9173static bool
9174s390_valid_pointer_mode (enum machine_mode mode)
9175{
9176 return (mode == SImode || (TARGET_64BIT && mode == DImode));
9177}
994fe660 9178
9a6f71b4 9179/* Checks whether the given CALL_EXPR would use a caller
ab96de7e
AS
9180 saved register. This is used to decide whether sibling call
9181 optimization could be performed on the respective function
9182 call. */
fd3cd001 9183
ab96de7e 9184static bool
9a6f71b4 9185s390_call_saved_register_used (tree call_expr)
fd3cd001 9186{
ab96de7e
AS
9187 CUMULATIVE_ARGS cum;
9188 tree parameter;
9189 enum machine_mode mode;
9190 tree type;
9191 rtx parm_rtx;
9a6f71b4 9192 int reg, i;
fd3cd001 9193
ab96de7e 9194 INIT_CUMULATIVE_ARGS (cum, NULL, NULL, 0, 0);
fd3cd001 9195
9a6f71b4 9196 for (i = 0; i < call_expr_nargs (call_expr); i++)
ab96de7e 9197 {
9a6f71b4 9198 parameter = CALL_EXPR_ARG (call_expr, i);
8d933e31 9199 gcc_assert (parameter);
fd3cd001 9200
ab96de7e
AS
9201 /* For an undeclared variable passed as parameter we will get
9202 an ERROR_MARK node here. */
9203 if (TREE_CODE (parameter) == ERROR_MARK)
9204 return true;
fd3cd001 9205
8d933e31
AS
9206 type = TREE_TYPE (parameter);
9207 gcc_assert (type);
fd3cd001 9208
8d933e31
AS
9209 mode = TYPE_MODE (type);
9210 gcc_assert (mode);
fd3cd001 9211
ab96de7e
AS
9212 if (pass_by_reference (&cum, mode, type, true))
9213 {
9214 mode = Pmode;
9215 type = build_pointer_type (type);
9216 }
fd3cd001 9217
ab96de7e 9218 parm_rtx = s390_function_arg (&cum, mode, type, 0);
fd3cd001 9219
ab96de7e 9220 s390_function_arg_advance (&cum, mode, type, 0);
fd3cd001 9221
ab96de7e
AS
9222 if (parm_rtx && REG_P (parm_rtx))
9223 {
9224 for (reg = 0;
9225 reg < HARD_REGNO_NREGS (REGNO (parm_rtx), GET_MODE (parm_rtx));
9226 reg++)
9227 if (! call_used_regs[reg + REGNO (parm_rtx)])
9228 return true;
9229 }
9230 }
9231 return false;
9232}
fd3cd001 9233
ab96de7e
AS
9234/* Return true if the given call expression can be
9235 turned into a sibling call.
9236 DECL holds the declaration of the function to be called whereas
9237 EXP is the call expression itself. */
fd3cd001 9238
ab96de7e
AS
9239static bool
9240s390_function_ok_for_sibcall (tree decl, tree exp)
9241{
9242 /* The TPF epilogue uses register 1. */
9243 if (TARGET_TPF_PROFILING)
9244 return false;
fd3cd001 9245
ab96de7e
AS
9246 /* The 31 bit PLT code uses register 12 (GOT pointer - caller saved)
9247 which would have to be restored before the sibcall. */
7691ec4e 9248 if (!TARGET_64BIT && flag_pic && decl && !targetm.binds_local_p (decl))
ab96de7e 9249 return false;
fd3cd001 9250
ab96de7e
AS
9251 /* Register 6 on s390 is available as an argument register but unfortunately
9252 "caller saved". This makes functions needing this register for arguments
9253 not suitable for sibcalls. */
9a6f71b4 9254 return !s390_call_saved_register_used (exp);
ab96de7e 9255}
fd3cd001 9256
ab96de7e 9257/* Return the fixed registers used for condition codes. */
fd3cd001 9258
ab96de7e
AS
9259static bool
9260s390_fixed_condition_code_regs (unsigned int *p1, unsigned int *p2)
9261{
9262 *p1 = CC_REGNUM;
9263 *p2 = INVALID_REGNUM;
9264
9265 return true;
9266}
fd3cd001 9267
ab96de7e
AS
9268/* This function is used by the call expanders of the machine description.
9269 It emits the call insn itself together with the necessary operations
9270 to adjust the target address and returns the emitted insn.
9271 ADDR_LOCATION is the target address rtx
9272 TLS_CALL the location of the thread-local symbol
9273 RESULT_REG the register where the result of the call should be stored
9274 RETADDR_REG the register where the return address should be stored
9275 If this parameter is NULL_RTX the call is considered
9276 to be a sibling call. */
fd3cd001 9277
ab96de7e
AS
9278rtx
9279s390_emit_call (rtx addr_location, rtx tls_call, rtx result_reg,
9280 rtx retaddr_reg)
9db1d521 9281{
ab96de7e
AS
9282 bool plt_call = false;
9283 rtx insn;
9284 rtx call;
9285 rtx clobber;
9286 rtvec vec;
cadc42db 9287
ab96de7e
AS
9288 /* Direct function calls need special treatment. */
9289 if (GET_CODE (addr_location) == SYMBOL_REF)
9db1d521 9290 {
ab96de7e
AS
9291 /* When calling a global routine in PIC mode, we must
9292 replace the symbol itself with the PLT stub. */
9293 if (flag_pic && !SYMBOL_REF_LOCAL_P (addr_location))
9294 {
9295 addr_location = gen_rtx_UNSPEC (Pmode,
9296 gen_rtvec (1, addr_location),
9297 UNSPEC_PLT);
9298 addr_location = gen_rtx_CONST (Pmode, addr_location);
9299 plt_call = true;
9300 }
9301
9302 /* Unless we can use the bras(l) insn, force the
9303 routine address into a register. */
9304 if (!TARGET_SMALL_EXEC && !TARGET_CPU_ZARCH)
9305 {
9306 if (flag_pic)
9307 addr_location = legitimize_pic_address (addr_location, 0);
9308 else
9309 addr_location = force_reg (Pmode, addr_location);
9310 }
9db1d521 9311 }
ab96de7e
AS
9312
9313 /* If it is already an indirect call or the code above moved the
9314 SYMBOL_REF to somewhere else make sure the address can be found in
9315 register 1. */
9316 if (retaddr_reg == NULL_RTX
9317 && GET_CODE (addr_location) != SYMBOL_REF
9318 && !plt_call)
9db1d521 9319 {
ab96de7e
AS
9320 emit_move_insn (gen_rtx_REG (Pmode, SIBCALL_REGNUM), addr_location);
9321 addr_location = gen_rtx_REG (Pmode, SIBCALL_REGNUM);
9db1d521 9322 }
9db1d521 9323
ab96de7e
AS
9324 addr_location = gen_rtx_MEM (QImode, addr_location);
9325 call = gen_rtx_CALL (VOIDmode, addr_location, const0_rtx);
4023fb28 9326
ab96de7e
AS
9327 if (result_reg != NULL_RTX)
9328 call = gen_rtx_SET (VOIDmode, result_reg, call);
4023fb28 9329
ab96de7e
AS
9330 if (retaddr_reg != NULL_RTX)
9331 {
9332 clobber = gen_rtx_CLOBBER (VOIDmode, retaddr_reg);
c7453384 9333
ab96de7e
AS
9334 if (tls_call != NULL_RTX)
9335 vec = gen_rtvec (3, call, clobber,
9336 gen_rtx_USE (VOIDmode, tls_call));
9337 else
9338 vec = gen_rtvec (2, call, clobber);
4023fb28 9339
ab96de7e
AS
9340 call = gen_rtx_PARALLEL (VOIDmode, vec);
9341 }
4023fb28 9342
ab96de7e 9343 insn = emit_call_insn (call);
4023fb28 9344
ab96de7e
AS
9345 /* 31-bit PLT stubs and tls calls use the GOT register implicitly. */
9346 if ((!TARGET_64BIT && plt_call) || tls_call != NULL_RTX)
9347 {
9348 /* s390_function_ok_for_sibcall should
9349 have denied sibcalls in this case. */
8d933e31 9350 gcc_assert (retaddr_reg != NULL_RTX);
4023fb28 9351
ab96de7e
AS
9352 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
9353 }
9354 return insn;
9355}
4023fb28 9356
ab96de7e 9357/* Implement CONDITIONAL_REGISTER_USAGE. */
4023fb28 9358
ab96de7e
AS
9359void
9360s390_conditional_register_usage (void)
9361{
9362 int i;
4023fb28 9363
4023fb28
UW
9364 if (flag_pic)
9365 {
ab96de7e
AS
9366 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
9367 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
4023fb28 9368 }
ab96de7e 9369 if (TARGET_CPU_ZARCH)
4023fb28 9370 {
7633f08e
UW
9371 fixed_regs[BASE_REGNUM] = 0;
9372 call_used_regs[BASE_REGNUM] = 0;
ab96de7e
AS
9373 fixed_regs[RETURN_REGNUM] = 0;
9374 call_used_regs[RETURN_REGNUM] = 0;
4023fb28 9375 }
ab96de7e 9376 if (TARGET_64BIT)
4023fb28 9377 {
ab96de7e
AS
9378 for (i = 24; i < 32; i++)
9379 call_used_regs[i] = call_really_used_regs[i] = 0;
4023fb28
UW
9380 }
9381 else
9382 {
ab96de7e
AS
9383 for (i = 18; i < 20; i++)
9384 call_used_regs[i] = call_really_used_regs[i] = 0;
9385 }
4023fb28 9386
ab96de7e
AS
9387 if (TARGET_SOFT_FLOAT)
9388 {
9389 for (i = 16; i < 32; i++)
9390 call_used_regs[i] = fixed_regs[i] = 1;
4023fb28
UW
9391 }
9392}
9393
ab96de7e 9394/* Corresponding function to eh_return expander. */
fb49053f 9395
ab96de7e
AS
9396static GTY(()) rtx s390_tpf_eh_return_symbol;
9397void
9398s390_emit_tpf_eh_return (rtx target)
fb49053f 9399{
ab96de7e 9400 rtx insn, reg;
e23795ea 9401
ab96de7e
AS
9402 if (!s390_tpf_eh_return_symbol)
9403 s390_tpf_eh_return_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tpf_eh_return");
9404
9405 reg = gen_rtx_REG (Pmode, 2);
9406
9407 emit_move_insn (reg, target);
9408 insn = s390_emit_call (s390_tpf_eh_return_symbol, NULL_RTX, reg,
9409 gen_rtx_REG (Pmode, RETURN_REGNUM));
9410 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), reg);
9411
9412 emit_move_insn (EH_RETURN_HANDLER_RTX, reg);
fd3cd001
UW
9413}
9414
ab96de7e
AS
9415/* Rework the prologue/epilogue to avoid saving/restoring
9416 registers unnecessarily. */
3062825f 9417
c590b625 9418static void
ab96de7e 9419s390_optimize_prologue (void)
483ab821 9420{
ab96de7e 9421 rtx insn, new_insn, next_insn;
3062825f 9422
ab96de7e 9423 /* Do a final recompute of the frame-related data. */
3062825f 9424
ab96de7e 9425 s390_update_frame_layout ();
3062825f 9426
ab96de7e
AS
9427 /* If all special registers are in fact used, there's nothing we
9428 can do, so no point in walking the insn list. */
3062825f 9429
ab96de7e
AS
9430 if (cfun_frame_layout.first_save_gpr <= BASE_REGNUM
9431 && cfun_frame_layout.last_save_gpr >= BASE_REGNUM
9432 && (TARGET_CPU_ZARCH
9433 || (cfun_frame_layout.first_save_gpr <= RETURN_REGNUM
9434 && cfun_frame_layout.last_save_gpr >= RETURN_REGNUM)))
9435 return;
3062825f 9436
ab96de7e 9437 /* Search for prologue/epilogue insns and replace them. */
3062825f 9438
ab96de7e
AS
9439 for (insn = get_insns (); insn; insn = next_insn)
9440 {
9441 int first, last, off;
9442 rtx set, base, offset;
3062825f 9443
ab96de7e 9444 next_insn = NEXT_INSN (insn);
89ce1c8f 9445
ab96de7e
AS
9446 if (GET_CODE (insn) != INSN)
9447 continue;
3062825f 9448
ab96de7e
AS
9449 if (GET_CODE (PATTERN (insn)) == PARALLEL
9450 && store_multiple_operation (PATTERN (insn), VOIDmode))
3062825f 9451 {
ab96de7e
AS
9452 set = XVECEXP (PATTERN (insn), 0, 0);
9453 first = REGNO (SET_SRC (set));
9454 last = first + XVECLEN (PATTERN (insn), 0) - 1;
9455 offset = const0_rtx;
9456 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
9457 off = INTVAL (offset);
3062825f 9458
ab96de7e
AS
9459 if (GET_CODE (base) != REG || off < 0)
9460 continue;
22a707a4
AK
9461 if (cfun_frame_layout.first_save_gpr != -1
9462 && (cfun_frame_layout.first_save_gpr < first
9463 || cfun_frame_layout.last_save_gpr > last))
9464 continue;
ab96de7e
AS
9465 if (REGNO (base) != STACK_POINTER_REGNUM
9466 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9467 continue;
9468 if (first > BASE_REGNUM || last < BASE_REGNUM)
9469 continue;
9470
9471 if (cfun_frame_layout.first_save_gpr != -1)
3062825f 9472 {
ab96de7e
AS
9473 new_insn = save_gprs (base,
9474 off + (cfun_frame_layout.first_save_gpr
9475 - first) * UNITS_PER_WORD,
9476 cfun_frame_layout.first_save_gpr,
9477 cfun_frame_layout.last_save_gpr);
9478 new_insn = emit_insn_before (new_insn, insn);
9479 INSN_ADDRESSES_NEW (new_insn, -1);
3062825f 9480 }
3062825f 9481
ab96de7e
AS
9482 remove_insn (insn);
9483 continue;
3062825f
UW
9484 }
9485
22a707a4
AK
9486 if (cfun_frame_layout.first_save_gpr == -1
9487 && GET_CODE (PATTERN (insn)) == SET
ab96de7e
AS
9488 && GET_CODE (SET_SRC (PATTERN (insn))) == REG
9489 && (REGNO (SET_SRC (PATTERN (insn))) == BASE_REGNUM
9490 || (!TARGET_CPU_ZARCH
9491 && REGNO (SET_SRC (PATTERN (insn))) == RETURN_REGNUM))
9492 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
3062825f 9493 {
ab96de7e
AS
9494 set = PATTERN (insn);
9495 first = REGNO (SET_SRC (set));
9496 offset = const0_rtx;
9497 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
9498 off = INTVAL (offset);
3062825f 9499
ab96de7e
AS
9500 if (GET_CODE (base) != REG || off < 0)
9501 continue;
9502 if (REGNO (base) != STACK_POINTER_REGNUM
9503 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9504 continue;
3062825f 9505
ab96de7e
AS
9506 remove_insn (insn);
9507 continue;
3062825f
UW
9508 }
9509
ab96de7e
AS
9510 if (GET_CODE (PATTERN (insn)) == PARALLEL
9511 && load_multiple_operation (PATTERN (insn), VOIDmode))
89ce1c8f 9512 {
ab96de7e
AS
9513 set = XVECEXP (PATTERN (insn), 0, 0);
9514 first = REGNO (SET_DEST (set));
9515 last = first + XVECLEN (PATTERN (insn), 0) - 1;
9516 offset = const0_rtx;
9517 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
9518 off = INTVAL (offset);
89ce1c8f 9519
ab96de7e
AS
9520 if (GET_CODE (base) != REG || off < 0)
9521 continue;
22a707a4
AK
9522 if (cfun_frame_layout.first_restore_gpr != -1
9523 && (cfun_frame_layout.first_restore_gpr < first
9524 || cfun_frame_layout.last_restore_gpr > last))
9525 continue;
ab96de7e
AS
9526 if (REGNO (base) != STACK_POINTER_REGNUM
9527 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9528 continue;
9529 if (first > BASE_REGNUM || last < BASE_REGNUM)
9530 continue;
3062825f 9531
ab96de7e
AS
9532 if (cfun_frame_layout.first_restore_gpr != -1)
9533 {
9534 new_insn = restore_gprs (base,
9535 off + (cfun_frame_layout.first_restore_gpr
9536 - first) * UNITS_PER_WORD,
9537 cfun_frame_layout.first_restore_gpr,
9538 cfun_frame_layout.last_restore_gpr);
9539 new_insn = emit_insn_before (new_insn, insn);
9540 INSN_ADDRESSES_NEW (new_insn, -1);
9541 }
89ce1c8f 9542
ab96de7e
AS
9543 remove_insn (insn);
9544 continue;
89ce1c8f
JJ
9545 }
9546
22a707a4
AK
9547 if (cfun_frame_layout.first_restore_gpr == -1
9548 && GET_CODE (PATTERN (insn)) == SET
ab96de7e
AS
9549 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
9550 && (REGNO (SET_DEST (PATTERN (insn))) == BASE_REGNUM
9551 || (!TARGET_CPU_ZARCH
9552 && REGNO (SET_DEST (PATTERN (insn))) == RETURN_REGNUM))
9553 && GET_CODE (SET_SRC (PATTERN (insn))) == MEM)
3062825f 9554 {
ab96de7e
AS
9555 set = PATTERN (insn);
9556 first = REGNO (SET_DEST (set));
9557 offset = const0_rtx;
9558 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
9559 off = INTVAL (offset);
c7453384 9560
ab96de7e
AS
9561 if (GET_CODE (base) != REG || off < 0)
9562 continue;
9563 if (REGNO (base) != STACK_POINTER_REGNUM
9564 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9565 continue;
29742ba4 9566
ab96de7e
AS
9567 remove_insn (insn);
9568 continue;
9569 }
9570 }
29742ba4
HP
9571}
9572
ab96de7e 9573/* Perform machine-dependent processing. */
ed9676cf 9574
ab96de7e
AS
9575static void
9576s390_reorg (void)
ed9676cf 9577{
ab96de7e 9578 bool pool_overflow = false;
ed9676cf 9579
ab96de7e
AS
9580 /* Make sure all splits have been performed; splits after
9581 machine_dependent_reorg might confuse insn length counts. */
9582 split_all_insns_noflow ();
38899e29 9583
3cd045d1
UW
9584 /* From here on decomposed literal pool addresses must be accepted. */
9585 cfun->machine->decomposed_literal_pool_addresses_ok_p = true;
ed9676cf 9586
ab96de7e
AS
9587 /* Install the main literal pool and the associated base
9588 register load insns.
38899e29 9589
ab96de7e
AS
9590 In addition, there are two problematic situations we need
9591 to correct:
ed9676cf 9592
ab96de7e
AS
9593 - the literal pool might be > 4096 bytes in size, so that
9594 some of its elements cannot be directly accessed
ed9676cf 9595
ab96de7e
AS
9596 - a branch target might be > 64K away from the branch, so that
9597 it is not possible to use a PC-relative instruction.
ed9676cf 9598
ab96de7e
AS
9599 To fix those, we split the single literal pool into multiple
9600 pool chunks, reloading the pool base register at various
9601 points throughout the function to ensure it always points to
9602 the pool chunk the following code expects, and / or replace
9603 PC-relative branches by absolute branches.
ed9676cf 9604
ab96de7e
AS
9605 However, the two problems are interdependent: splitting the
9606 literal pool can move a branch further away from its target,
9607 causing the 64K limit to overflow, and on the other hand,
9608 replacing a PC-relative branch by an absolute branch means
9609 we need to put the branch target address into the literal
9610 pool, possibly causing it to overflow.
ffdda752 9611
ab96de7e
AS
9612 So, we loop trying to fix up both problems until we manage
9613 to satisfy both conditions at the same time. Note that the
9614 loop is guaranteed to terminate as every pass of the loop
9615 strictly decreases the total number of PC-relative branches
9616 in the function. (This is not completely true as there
9617 might be branch-over-pool insns introduced by chunkify_start.
9618 Those never need to be split however.) */
ffdda752 9619
ab96de7e
AS
9620 for (;;)
9621 {
9622 struct constant_pool *pool = NULL;
a628ab6d 9623
ab96de7e
AS
9624 /* Collect the literal pool. */
9625 if (!pool_overflow)
9626 {
9627 pool = s390_mainpool_start ();
9628 if (!pool)
9629 pool_overflow = true;
9630 }
a628ab6d 9631
ab96de7e
AS
9632 /* If literal pool overflowed, start to chunkify it. */
9633 if (pool_overflow)
9634 pool = s390_chunkify_start ();
a628ab6d 9635
ab96de7e
AS
9636 /* Split out-of-range branches. If this has created new
9637 literal pool entries, cancel current chunk list and
9638 recompute it. zSeries machines have large branch
9639 instructions, so we never need to split a branch. */
9640 if (!TARGET_CPU_ZARCH && s390_split_branches ())
9641 {
9642 if (pool_overflow)
9643 s390_chunkify_cancel (pool);
9644 else
9645 s390_mainpool_cancel (pool);
a628ab6d 9646
ab96de7e
AS
9647 continue;
9648 }
9649
9650 /* If we made it up to here, both conditions are satisfied.
9651 Finish up literal pool related changes. */
9652 if (pool_overflow)
9653 s390_chunkify_finish (pool);
9654 else
9655 s390_mainpool_finish (pool);
9656
9657 /* We're done splitting branches. */
9658 cfun->machine->split_branches_pending_p = false;
9659 break;
a628ab6d 9660 }
a628ab6d 9661
d24959df
UW
9662 /* Generate out-of-pool execute target insns. */
9663 if (TARGET_CPU_ZARCH)
9664 {
9665 rtx insn, label, target;
9666
9667 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9668 {
9669 label = s390_execute_label (insn);
9670 if (!label)
9671 continue;
9672
9673 gcc_assert (label != const0_rtx);
9674
9675 target = emit_label (XEXP (label, 0));
9676 INSN_ADDRESSES_NEW (target, -1);
9677
9678 target = emit_insn (s390_execute_target (insn));
9679 INSN_ADDRESSES_NEW (target, -1);
9680 }
9681 }
9682
9683 /* Try to optimize prologue and epilogue further. */
ab96de7e
AS
9684 s390_optimize_prologue ();
9685}
ed9676cf 9686
ed9676cf 9687
ab96de7e 9688/* Initialize GCC target structure. */
38899e29 9689
ab96de7e
AS
9690#undef TARGET_ASM_ALIGNED_HI_OP
9691#define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
9692#undef TARGET_ASM_ALIGNED_DI_OP
9693#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
9694#undef TARGET_ASM_INTEGER
9695#define TARGET_ASM_INTEGER s390_assemble_integer
ed9676cf 9696
ab96de7e
AS
9697#undef TARGET_ASM_OPEN_PAREN
9698#define TARGET_ASM_OPEN_PAREN ""
38899e29 9699
ab96de7e
AS
9700#undef TARGET_ASM_CLOSE_PAREN
9701#define TARGET_ASM_CLOSE_PAREN ""
ed9676cf 9702
ab96de7e
AS
9703#undef TARGET_DEFAULT_TARGET_FLAGS
9704#define TARGET_DEFAULT_TARGET_FLAGS (TARGET_DEFAULT | MASK_FUSED_MADD)
9705#undef TARGET_HANDLE_OPTION
9706#define TARGET_HANDLE_OPTION s390_handle_option
38899e29 9707
ab96de7e
AS
9708#undef TARGET_ENCODE_SECTION_INFO
9709#define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
ed9676cf 9710
ab96de7e
AS
9711#ifdef HAVE_AS_TLS
9712#undef TARGET_HAVE_TLS
9713#define TARGET_HAVE_TLS true
9714#endif
9715#undef TARGET_CANNOT_FORCE_CONST_MEM
9716#define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
ed9676cf 9717
ab96de7e
AS
9718#undef TARGET_DELEGITIMIZE_ADDRESS
9719#define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
ed9676cf 9720
ab96de7e
AS
9721#undef TARGET_RETURN_IN_MEMORY
9722#define TARGET_RETURN_IN_MEMORY s390_return_in_memory
38899e29 9723
ab96de7e
AS
9724#undef TARGET_INIT_BUILTINS
9725#define TARGET_INIT_BUILTINS s390_init_builtins
9726#undef TARGET_EXPAND_BUILTIN
9727#define TARGET_EXPAND_BUILTIN s390_expand_builtin
38899e29 9728
ab96de7e
AS
9729#undef TARGET_ASM_OUTPUT_MI_THUNK
9730#define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
9731#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3101faab 9732#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
ed9676cf 9733
ab96de7e
AS
9734#undef TARGET_SCHED_ADJUST_PRIORITY
9735#define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
9736#undef TARGET_SCHED_ISSUE_RATE
9737#define TARGET_SCHED_ISSUE_RATE s390_issue_rate
9738#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
9739#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
38899e29 9740
ab96de7e
AS
9741#undef TARGET_CANNOT_COPY_INSN_P
9742#define TARGET_CANNOT_COPY_INSN_P s390_cannot_copy_insn_p
9743#undef TARGET_RTX_COSTS
9744#define TARGET_RTX_COSTS s390_rtx_costs
9745#undef TARGET_ADDRESS_COST
9746#define TARGET_ADDRESS_COST s390_address_cost
38899e29 9747
ab96de7e
AS
9748#undef TARGET_MACHINE_DEPENDENT_REORG
9749#define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
8a512b77 9750
ab96de7e
AS
9751#undef TARGET_VALID_POINTER_MODE
9752#define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
38899e29 9753
ab96de7e
AS
9754#undef TARGET_BUILD_BUILTIN_VA_LIST
9755#define TARGET_BUILD_BUILTIN_VA_LIST s390_build_builtin_va_list
d7bd8aeb
JJ
9756#undef TARGET_EXPAND_BUILTIN_VA_START
9757#define TARGET_EXPAND_BUILTIN_VA_START s390_va_start
ab96de7e
AS
9758#undef TARGET_GIMPLIFY_VA_ARG_EXPR
9759#define TARGET_GIMPLIFY_VA_ARG_EXPR s390_gimplify_va_arg
4798630c 9760
ab96de7e 9761#undef TARGET_PROMOTE_FUNCTION_ARGS
586de218 9762#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
ab96de7e 9763#undef TARGET_PROMOTE_FUNCTION_RETURN
586de218 9764#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
ab96de7e
AS
9765#undef TARGET_PASS_BY_REFERENCE
9766#define TARGET_PASS_BY_REFERENCE s390_pass_by_reference
4798630c 9767
ab96de7e
AS
9768#undef TARGET_FUNCTION_OK_FOR_SIBCALL
9769#define TARGET_FUNCTION_OK_FOR_SIBCALL s390_function_ok_for_sibcall
4798630c 9770
ab96de7e
AS
9771#undef TARGET_FIXED_CONDITION_CODE_REGS
9772#define TARGET_FIXED_CONDITION_CODE_REGS s390_fixed_condition_code_regs
4798630c 9773
ab96de7e
AS
9774#undef TARGET_CC_MODES_COMPATIBLE
9775#define TARGET_CC_MODES_COMPATIBLE s390_cc_modes_compatible
4798630c 9776
e7e64a25 9777#undef TARGET_INVALID_WITHIN_DOLOOP
3101faab 9778#define TARGET_INVALID_WITHIN_DOLOOP hook_constcharptr_const_rtx_null
c08b81aa 9779
fdbe66f2
EB
9780#ifdef HAVE_AS_TLS
9781#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
9782#define TARGET_ASM_OUTPUT_DWARF_DTPREL s390_output_dwarf_dtprel
9783#endif
9784
7269aee7 9785#ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
608063c3
JB
9786#undef TARGET_MANGLE_TYPE
9787#define TARGET_MANGLE_TYPE s390_mangle_type
7269aee7
AH
9788#endif
9789
4dc19cc0
AK
9790#undef TARGET_SCALAR_MODE_SUPPORTED_P
9791#define TARGET_SCALAR_MODE_SUPPORTED_P s390_scalar_mode_supported_p
9792
833cd70a
AK
9793#undef TARGET_SECONDARY_RELOAD
9794#define TARGET_SECONDARY_RELOAD s390_secondary_reload
9795
c7ff6e7a
AK
9796#undef TARGET_LIBGCC_CMP_RETURN_MODE
9797#define TARGET_LIBGCC_CMP_RETURN_MODE s390_libgcc_cmp_return_mode
9798
9799#undef TARGET_LIBGCC_SHIFT_COUNT_MODE
9800#define TARGET_LIBGCC_SHIFT_COUNT_MODE s390_libgcc_shift_count_mode
9801
ab96de7e 9802struct gcc_target targetm = TARGET_INITIALIZER;
38899e29 9803
29742ba4 9804#include "gt-s390.h"