]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/s390/s390.c
* alias.c (record_set, memory_modified_1): Constify.
[thirdparty/gcc.git] / gcc / config / s390 / s390.c
CommitLineData
4673c1a0 1/* Subroutines used for code generation on IBM S/390 and zSeries
3072d30e 2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
3 2007 Free Software Foundation, Inc.
4673c1a0 4 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
0c0a8ea5 5 Ulrich Weigand (uweigand@de.ibm.com).
4673c1a0 6
1e98c8f3 7This file is part of GCC.
4673c1a0 8
1e98c8f3 9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
11Software Foundation; either version 2, or (at your option) any later
12version.
4673c1a0 13
1e98c8f3 14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
4673c1a0 18
19You should have received a copy of the GNU General Public License
1e98c8f3 20along with GCC; see the file COPYING. If not, write to the Free
dbddc6c4 21Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2202110-1301, USA. */
4673c1a0 23
24#include "config.h"
4673c1a0 25#include "system.h"
805e22b2 26#include "coretypes.h"
27#include "tm.h"
4673c1a0 28#include "rtl.h"
29#include "tree.h"
30#include "tm_p.h"
31#include "regs.h"
32#include "hard-reg-set.h"
33#include "real.h"
34#include "insn-config.h"
35#include "conditions.h"
36#include "output.h"
37#include "insn-attr.h"
38#include "flags.h"
39#include "except.h"
40#include "function.h"
41#include "recog.h"
42#include "expr.h"
c10847b9 43#include "reload.h"
4673c1a0 44#include "toplev.h"
45#include "basic-block.h"
8b4a4127 46#include "integrate.h"
4673c1a0 47#include "ggc.h"
48#include "target.h"
49#include "target-def.h"
7baa5366 50#include "debug.h"
a1f71e15 51#include "langhooks.h"
4fbc4db5 52#include "optabs.h"
d93e0d9f 53#include "tree-gimple.h"
3072d30e 54#include "df.h"
4673c1a0 55
a3e33162 56
18925d38 57/* Define the specific costs for a given cpu. */
58
59struct processor_costs
60{
260075cc 61 /* multiplication */
18925d38 62 const int m; /* cost of an M instruction. */
63 const int mghi; /* cost of an MGHI instruction. */
64 const int mh; /* cost of an MH instruction. */
65 const int mhi; /* cost of an MHI instruction. */
9cd3f3e6 66 const int ml; /* cost of an ML instruction. */
18925d38 67 const int mr; /* cost of an MR instruction. */
68 const int ms; /* cost of an MS instruction. */
69 const int msg; /* cost of an MSG instruction. */
70 const int msgf; /* cost of an MSGF instruction. */
71 const int msgfr; /* cost of an MSGFR instruction. */
72 const int msgr; /* cost of an MSGR instruction. */
73 const int msr; /* cost of an MSR instruction. */
74 const int mult_df; /* cost of multiplication in DFmode. */
429f9fdb 75 const int mxbr;
260075cc 76 /* square root */
429f9fdb 77 const int sqxbr; /* cost of square root in TFmode. */
9cd3f3e6 78 const int sqdbr; /* cost of square root in DFmode. */
79 const int sqebr; /* cost of square root in SFmode. */
260075cc 80 /* multiply and add */
d95e38cf 81 const int madbr; /* cost of multiply and add in DFmode. */
82 const int maebr; /* cost of multiply and add in SFmode. */
260075cc 83 /* division */
429f9fdb 84 const int dxbr;
260075cc 85 const int ddbr;
260075cc 86 const int debr;
3f074425 87 const int dlgr;
88 const int dlr;
89 const int dr;
90 const int dsgfr;
91 const int dsgr;
18925d38 92};
93
94const struct processor_costs *s390_cost;
95
96static const
97struct processor_costs z900_cost =
98{
99 COSTS_N_INSNS (5), /* M */
100 COSTS_N_INSNS (10), /* MGHI */
101 COSTS_N_INSNS (5), /* MH */
102 COSTS_N_INSNS (4), /* MHI */
9cd3f3e6 103 COSTS_N_INSNS (5), /* ML */
18925d38 104 COSTS_N_INSNS (5), /* MR */
105 COSTS_N_INSNS (4), /* MS */
106 COSTS_N_INSNS (15), /* MSG */
107 COSTS_N_INSNS (7), /* MSGF */
108 COSTS_N_INSNS (7), /* MSGFR */
109 COSTS_N_INSNS (10), /* MSGR */
110 COSTS_N_INSNS (4), /* MSR */
111 COSTS_N_INSNS (7), /* multiplication in DFmode */
429f9fdb 112 COSTS_N_INSNS (13), /* MXBR */
113 COSTS_N_INSNS (136), /* SQXBR */
9cd3f3e6 114 COSTS_N_INSNS (44), /* SQDBR */
115 COSTS_N_INSNS (35), /* SQEBR */
d95e38cf 116 COSTS_N_INSNS (18), /* MADBR */
117 COSTS_N_INSNS (13), /* MAEBR */
429f9fdb 118 COSTS_N_INSNS (134), /* DXBR */
260075cc 119 COSTS_N_INSNS (30), /* DDBR */
260075cc 120 COSTS_N_INSNS (27), /* DEBR */
3f074425 121 COSTS_N_INSNS (220), /* DLGR */
122 COSTS_N_INSNS (34), /* DLR */
123 COSTS_N_INSNS (34), /* DR */
124 COSTS_N_INSNS (32), /* DSGFR */
125 COSTS_N_INSNS (32), /* DSGR */
18925d38 126};
127
128static const
129struct processor_costs z990_cost =
130{
131 COSTS_N_INSNS (4), /* M */
132 COSTS_N_INSNS (2), /* MGHI */
133 COSTS_N_INSNS (2), /* MH */
134 COSTS_N_INSNS (2), /* MHI */
9cd3f3e6 135 COSTS_N_INSNS (4), /* ML */
18925d38 136 COSTS_N_INSNS (4), /* MR */
137 COSTS_N_INSNS (5), /* MS */
138 COSTS_N_INSNS (6), /* MSG */
139 COSTS_N_INSNS (4), /* MSGF */
140 COSTS_N_INSNS (4), /* MSGFR */
141 COSTS_N_INSNS (4), /* MSGR */
142 COSTS_N_INSNS (4), /* MSR */
143 COSTS_N_INSNS (1), /* multiplication in DFmode */
429f9fdb 144 COSTS_N_INSNS (28), /* MXBR */
145 COSTS_N_INSNS (130), /* SQXBR */
9cd3f3e6 146 COSTS_N_INSNS (66), /* SQDBR */
147 COSTS_N_INSNS (38), /* SQEBR */
d95e38cf 148 COSTS_N_INSNS (1), /* MADBR */
149 COSTS_N_INSNS (1), /* MAEBR */
429f9fdb 150 COSTS_N_INSNS (60), /* DXBR */
260075cc 151 COSTS_N_INSNS (40), /* DDBR */
095798e3 152 COSTS_N_INSNS (26), /* DEBR */
3f074425 153 COSTS_N_INSNS (176), /* DLGR */
154 COSTS_N_INSNS (31), /* DLR */
155 COSTS_N_INSNS (31), /* DR */
156 COSTS_N_INSNS (31), /* DSGFR */
157 COSTS_N_INSNS (31), /* DSGR */
18925d38 158};
159
163277cf 160static const
161struct processor_costs z9_109_cost =
162{
163 COSTS_N_INSNS (4), /* M */
164 COSTS_N_INSNS (2), /* MGHI */
165 COSTS_N_INSNS (2), /* MH */
166 COSTS_N_INSNS (2), /* MHI */
167 COSTS_N_INSNS (4), /* ML */
168 COSTS_N_INSNS (4), /* MR */
169 COSTS_N_INSNS (5), /* MS */
170 COSTS_N_INSNS (6), /* MSG */
171 COSTS_N_INSNS (4), /* MSGF */
172 COSTS_N_INSNS (4), /* MSGFR */
173 COSTS_N_INSNS (4), /* MSGR */
174 COSTS_N_INSNS (4), /* MSR */
175 COSTS_N_INSNS (1), /* multiplication in DFmode */
429f9fdb 176 COSTS_N_INSNS (28), /* MXBR */
177 COSTS_N_INSNS (130), /* SQXBR */
163277cf 178 COSTS_N_INSNS (66), /* SQDBR */
179 COSTS_N_INSNS (38), /* SQEBR */
180 COSTS_N_INSNS (1), /* MADBR */
181 COSTS_N_INSNS (1), /* MAEBR */
429f9fdb 182 COSTS_N_INSNS (60), /* DXBR */
163277cf 183 COSTS_N_INSNS (40), /* DDBR */
095798e3 184 COSTS_N_INSNS (26), /* DEBR */
163277cf 185 COSTS_N_INSNS (30), /* DLGR */
186 COSTS_N_INSNS (23), /* DLR */
187 COSTS_N_INSNS (23), /* DR */
188 COSTS_N_INSNS (24), /* DSGFR */
189 COSTS_N_INSNS (24), /* DSGR */
190};
18925d38 191
4673c1a0 192extern int reload_completed;
193
4673c1a0 194/* Save information from a "cmpxx" operation until the branch or scc is
195 emitted. */
196rtx s390_compare_op0, s390_compare_op1;
197
891e3096 198/* Save the result of a compare_and_swap until the branch or scc is
199 emitted. */
200rtx s390_compare_emitted = NULL_RTX;
201
56769981 202/* Structure used to hold the components of a S/390 memory
203 address. A legitimate address on S/390 is of the general
204 form
205 base + index + displacement
206 where any of the components is optional.
207
208 base and index are registers of the class ADDR_REGS,
209 displacement is an unsigned 12-bit immediate constant. */
4673c1a0 210
211struct s390_address
212{
213 rtx base;
214 rtx indx;
215 rtx disp;
e5537457 216 bool pointer;
05b58257 217 bool literal_pool;
4673c1a0 218};
219
33096195 220/* Which cpu are we tuning for. */
28ee8079 221enum processor_type s390_tune = PROCESSOR_max;
b8c0043c 222enum processor_flags s390_tune_flags;
95ae2fd6 223/* Which instruction set architecture to use. */
224enum processor_type s390_arch;
b8c0043c 225enum processor_flags s390_arch_flags;
cbb300e8 226
227HOST_WIDE_INT s390_warn_framesize = 0;
cbb300e8 228HOST_WIDE_INT s390_stack_size = 0;
229HOST_WIDE_INT s390_stack_guard = 0;
230
67928721 231/* The following structure is embedded in the machine
232 specific part of struct function. */
233
234struct s390_frame_layout GTY (())
235{
236 /* Offset within stack frame. */
237 HOST_WIDE_INT gprs_offset;
238 HOST_WIDE_INT f0_offset;
239 HOST_WIDE_INT f4_offset;
240 HOST_WIDE_INT f8_offset;
241 HOST_WIDE_INT backchain_offset;
5214e6ae 242
243 /* Number of first and last gpr where slots in the register
244 save area are reserved for. */
245 int first_save_gpr_slot;
246 int last_save_gpr_slot;
247
5a5e802f 248 /* Number of first and last gpr to be saved, restored. */
8b4a4127 249 int first_save_gpr;
250 int first_restore_gpr;
251 int last_save_gpr;
beee1f75 252 int last_restore_gpr;
8b4a4127 253
67928721 254 /* Bits standing for floating point registers. Set, if the
255 respective register has to be saved. Starting with reg 16 (f0)
256 at the rightmost bit.
257 Bit 15 - 8 7 6 5 4 3 2 1 0
258 fpr 15 - 8 7 5 3 1 6 4 2 0
259 reg 31 - 24 23 22 21 20 19 18 17 16 */
260 unsigned int fpr_bitmap;
261
262 /* Number of floating point registers f8-f15 which must be saved. */
263 int high_fprs;
264
9bee2845 265 /* Set if return address needs to be saved.
266 This flag is set by s390_return_addr_rtx if it could not use
267 the initial value of r14 and therefore depends on r14 saved
268 to the stack. */
67928721 269 bool save_return_addr_p;
270
5a5e802f 271 /* Size of stack frame. */
8b4a4127 272 HOST_WIDE_INT frame_size;
67928721 273};
274
275/* Define the structure for the machine field in struct function. */
276
277struct machine_function GTY(())
278{
279 struct s390_frame_layout frame_layout;
be00aaa8 280
20074f87 281 /* Literal pool base register. */
282 rtx base_reg;
283
4fed3f99 284 /* True if we may need to perform branch splitting. */
285 bool split_branches_pending_p;
286
86b779d2 287 /* True during final stage of literal pool processing. */
288 bool decomposed_literal_pool_addresses_ok_p;
289
be00aaa8 290 /* Some local-dynamic TLS symbol name. */
291 const char *some_ld_name;
1e639cb0 292
293 bool has_landing_pad_p;
8b4a4127 294};
295
67928721 296/* Few accessor macros for struct cfun->machine->s390_frame_layout. */
297
298#define cfun_frame_layout (cfun->machine->frame_layout)
299#define cfun_save_high_fprs_p (!!cfun_frame_layout.high_fprs)
5214e6ae 300#define cfun_gprs_save_area_size ((cfun_frame_layout.last_save_gpr_slot - \
301 cfun_frame_layout.first_save_gpr_slot + 1) * UNITS_PER_WORD)
67928721 302#define cfun_set_fpr_bit(BITNUM) (cfun->machine->frame_layout.fpr_bitmap |= \
303 (1 << (BITNUM)))
304#define cfun_fpr_bit_p(BITNUM) (!!(cfun->machine->frame_layout.fpr_bitmap & \
305 (1 << (BITNUM))))
306
6902d973 307/* Number of GPRs and FPRs used for argument passing. */
308#define GP_ARG_NUM_REG 5
309#define FP_ARG_NUM_REG (TARGET_64BIT? 4 : 2)
310
cb888f33 311/* A couple of shortcuts. */
312#define CONST_OK_FOR_J(x) \
313 CONST_OK_FOR_CONSTRAINT_P((x), 'J', "J")
314#define CONST_OK_FOR_K(x) \
315 CONST_OK_FOR_CONSTRAINT_P((x), 'K', "K")
163277cf 316#define CONST_OK_FOR_Os(x) \
317 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Os")
318#define CONST_OK_FOR_Op(x) \
319 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Op")
320#define CONST_OK_FOR_On(x) \
321 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "On")
cb888f33 322
8f1128bb 323#define REGNO_PAIR_OK(REGNO, MODE) \
324 (HARD_REGNO_NREGS ((REGNO), (MODE)) == 1 || !((REGNO) & 1))
325
0ef89dfd 326static enum machine_mode
327s390_libgcc_cmp_return_mode (void)
328{
329 return TARGET_64BIT ? DImode : SImode;
330}
331
332static enum machine_mode
333s390_libgcc_shift_count_mode (void)
334{
335 return TARGET_64BIT ? DImode : SImode;
336}
337
36868490 338/* Return true if the back end supports mode MODE. */
339static bool
340s390_scalar_mode_supported_p (enum machine_mode mode)
341{
342 if (DECIMAL_FLOAT_MODE_P (mode))
343 return true;
344 else
345 return default_scalar_mode_supported_p (mode);
346}
347
1e639cb0 348/* Set the has_landing_pad_p flag in struct machine_function to VALUE. */
349
350void
351s390_set_has_landing_pad_p (bool value)
352{
353 cfun->machine->has_landing_pad_p = value;
354}
6902d973 355
9c93d843 356/* If two condition code modes are compatible, return a condition code
357 mode which is compatible with both. Otherwise, return
358 VOIDmode. */
359
360static enum machine_mode
361s390_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
362{
363 if (m1 == m2)
364 return m1;
365
366 switch (m1)
367 {
368 case CCZmode:
369 if (m2 == CCUmode || m2 == CCTmode || m2 == CCZ1mode
370 || m2 == CCSmode || m2 == CCSRmode || m2 == CCURmode)
371 return m2;
372 return VOIDmode;
373
374 case CCSmode:
375 case CCUmode:
376 case CCTmode:
377 case CCSRmode:
378 case CCURmode:
379 case CCZ1mode:
380 if (m2 == CCZmode)
381 return m1;
382
383 return VOIDmode;
384
385 default:
386 return VOIDmode;
387 }
388 return VOIDmode;
389}
390
56769981 391/* Return true if SET either doesn't set the CC register, or else
f81e845f 392 the source and destination have matching CC modes and that
56769981 393 CC mode is at least as constrained as REQ_MODE. */
f81e845f 394
e5537457 395static bool
b40da9a7 396s390_match_ccmode_set (rtx set, enum machine_mode req_mode)
4673c1a0 397{
56769981 398 enum machine_mode set_mode;
4673c1a0 399
32eda510 400 gcc_assert (GET_CODE (set) == SET);
4673c1a0 401
402 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
403 return 1;
404
405 set_mode = GET_MODE (SET_DEST (set));
406 switch (set_mode)
407 {
4673c1a0 408 case CCSmode:
c6821d1c 409 case CCSRmode:
4673c1a0 410 case CCUmode:
c6821d1c 411 case CCURmode:
2eb8fe23 412 case CCLmode:
c6821d1c 413 case CCL1mode:
414 case CCL2mode:
3b699fc7 415 case CCL3mode:
c6821d1c 416 case CCT1mode:
417 case CCT2mode:
418 case CCT3mode:
419 if (req_mode != set_mode)
2eb8fe23 420 return 0;
421 break;
c6821d1c 422
4673c1a0 423 case CCZmode:
c6821d1c 424 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
425 && req_mode != CCSRmode && req_mode != CCURmode)
4673c1a0 426 return 0;
427 break;
3c482144 428
429 case CCAPmode:
430 case CCANmode:
431 if (req_mode != CCAmode)
432 return 0;
433 break;
f81e845f 434
4673c1a0 435 default:
32eda510 436 gcc_unreachable ();
4673c1a0 437 }
f81e845f 438
4673c1a0 439 return (GET_MODE (SET_SRC (set)) == set_mode);
440}
441
f81e845f 442/* Return true if every SET in INSN that sets the CC register
443 has source and destination with matching CC modes and that
444 CC mode is at least as constrained as REQ_MODE.
c6821d1c 445 If REQ_MODE is VOIDmode, always return false. */
f81e845f 446
e5537457 447bool
b40da9a7 448s390_match_ccmode (rtx insn, enum machine_mode req_mode)
4673c1a0 449{
450 int i;
451
c6821d1c 452 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
453 if (req_mode == VOIDmode)
e5537457 454 return false;
c6821d1c 455
4673c1a0 456 if (GET_CODE (PATTERN (insn)) == SET)
457 return s390_match_ccmode_set (PATTERN (insn), req_mode);
458
459 if (GET_CODE (PATTERN (insn)) == PARALLEL)
460 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
461 {
462 rtx set = XVECEXP (PATTERN (insn), 0, i);
463 if (GET_CODE (set) == SET)
464 if (!s390_match_ccmode_set (set, req_mode))
e5537457 465 return false;
4673c1a0 466 }
467
e5537457 468 return true;
4673c1a0 469}
470
f81e845f 471/* If a test-under-mask instruction can be used to implement
c6821d1c 472 (compare (and ... OP1) OP2), return the CC mode required
f81e845f 473 to do that. Otherwise, return VOIDmode.
c6821d1c 474 MIXED is true if the instruction can distinguish between
475 CC1 and CC2 for mixed selected bits (TMxx), it is false
476 if the instruction cannot (TM). */
477
478enum machine_mode
e5537457 479s390_tm_ccmode (rtx op1, rtx op2, bool mixed)
c6821d1c 480{
481 int bit0, bit1;
482
483 /* ??? Fixme: should work on CONST_DOUBLE as well. */
484 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
485 return VOIDmode;
486
eeba5f25 487 /* Selected bits all zero: CC0.
488 e.g.: int a; if ((a & (16 + 128)) == 0) */
c6821d1c 489 if (INTVAL (op2) == 0)
490 return CCTmode;
491
eeba5f25 492 /* Selected bits all one: CC3.
493 e.g.: int a; if ((a & (16 + 128)) == 16 + 128) */
c6821d1c 494 if (INTVAL (op2) == INTVAL (op1))
495 return CCT3mode;
496
eeba5f25 497 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. e.g.:
498 int a;
499 if ((a & (16 + 128)) == 16) -> CCT1
500 if ((a & (16 + 128)) == 128) -> CCT2 */
c6821d1c 501 if (mixed)
502 {
503 bit1 = exact_log2 (INTVAL (op2));
504 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
505 if (bit0 != -1 && bit1 != -1)
506 return bit0 > bit1 ? CCT1mode : CCT2mode;
507 }
508
509 return VOIDmode;
510}
511
f81e845f 512/* Given a comparison code OP (EQ, NE, etc.) and the operands
513 OP0 and OP1 of a COMPARE, return the mode to be used for the
2eb8fe23 514 comparison. */
515
516enum machine_mode
b40da9a7 517s390_select_ccmode (enum rtx_code code, rtx op0, rtx op1)
2eb8fe23 518{
519 switch (code)
520 {
521 case EQ:
522 case NE:
9be33ca2 523 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
524 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
525 return CCAPmode;
3c482144 526 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
cb888f33 527 && CONST_OK_FOR_K (INTVAL (XEXP (op0, 1))))
3c482144 528 return CCAPmode;
e9fd5349 529 if ((GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
530 || GET_CODE (op1) == NEG)
531 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
2eb8fe23 532 return CCLmode;
533
c6821d1c 534 if (GET_CODE (op0) == AND)
535 {
536 /* Check whether we can potentially do it via TM. */
537 enum machine_mode ccmode;
538 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
539 if (ccmode != VOIDmode)
540 {
541 /* Relax CCTmode to CCZmode to allow fall-back to AND
542 if that turns out to be beneficial. */
543 return ccmode == CCTmode ? CCZmode : ccmode;
544 }
545 }
546
f81e845f 547 if (register_operand (op0, HImode)
c6821d1c 548 && GET_CODE (op1) == CONST_INT
549 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
550 return CCT3mode;
f81e845f 551 if (register_operand (op0, QImode)
c6821d1c 552 && GET_CODE (op1) == CONST_INT
553 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
554 return CCT3mode;
555
2eb8fe23 556 return CCZmode;
557
558 case LE:
559 case LT:
560 case GE:
561 case GT:
eeba5f25 562 /* The only overflow condition of NEG and ABS happens when
563 -INT_MAX is used as parameter, which stays negative. So
564 we have an overflow from a positive value to a negative.
565 Using CCAP mode the resulting cc can be used for comparisons. */
9be33ca2 566 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
567 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
568 return CCAPmode;
eeba5f25 569
570 /* If constants are involved in an add instruction it is possible to use
571 the resulting cc for comparisons with zero. Knowing the sign of the
0975351b 572 constant the overflow behavior gets predictable. e.g.:
eeba5f25 573 int a, b; if ((b = a + c) > 0)
574 with c as a constant value: c < 0 -> CCAN and c >= 0 -> CCAP */
9be33ca2 575 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
cb888f33 576 && CONST_OK_FOR_K (INTVAL (XEXP (op0, 1))))
9be33ca2 577 {
578 if (INTVAL (XEXP((op0), 1)) < 0)
579 return CCANmode;
580 else
581 return CCAPmode;
582 }
583 /* Fall through. */
2eb8fe23 584 case UNORDERED:
585 case ORDERED:
586 case UNEQ:
587 case UNLE:
588 case UNLT:
589 case UNGE:
590 case UNGT:
591 case LTGT:
c6821d1c 592 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
593 && GET_CODE (op1) != CONST_INT)
594 return CCSRmode;
2eb8fe23 595 return CCSmode;
596
2eb8fe23 597 case LTU:
598 case GEU:
e9fd5349 599 if (GET_CODE (op0) == PLUS
600 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
c6821d1c 601 return CCL1mode;
602
603 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
604 && GET_CODE (op1) != CONST_INT)
605 return CCURmode;
606 return CCUmode;
607
608 case LEU:
2eb8fe23 609 case GTU:
e9fd5349 610 if (GET_CODE (op0) == MINUS
611 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
c6821d1c 612 return CCL2mode;
613
614 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
615 && GET_CODE (op1) != CONST_INT)
616 return CCURmode;
2eb8fe23 617 return CCUmode;
618
619 default:
32eda510 620 gcc_unreachable ();
2eb8fe23 621 }
622}
623
ebe32bb0 624/* Replace the comparison OP0 CODE OP1 by a semantically equivalent one
625 that we can implement more efficiently. */
626
627void
628s390_canonicalize_comparison (enum rtx_code *code, rtx *op0, rtx *op1)
629{
630 /* Convert ZERO_EXTRACT back to AND to enable TM patterns. */
631 if ((*code == EQ || *code == NE)
632 && *op1 == const0_rtx
633 && GET_CODE (*op0) == ZERO_EXTRACT
634 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
635 && GET_CODE (XEXP (*op0, 2)) == CONST_INT
636 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
637 {
638 rtx inner = XEXP (*op0, 0);
639 HOST_WIDE_INT modesize = GET_MODE_BITSIZE (GET_MODE (inner));
640 HOST_WIDE_INT len = INTVAL (XEXP (*op0, 1));
641 HOST_WIDE_INT pos = INTVAL (XEXP (*op0, 2));
642
643 if (len > 0 && len < modesize
644 && pos >= 0 && pos + len <= modesize
645 && modesize <= HOST_BITS_PER_WIDE_INT)
646 {
647 unsigned HOST_WIDE_INT block;
648 block = ((unsigned HOST_WIDE_INT) 1 << len) - 1;
649 block <<= modesize - pos - len;
650
651 *op0 = gen_rtx_AND (GET_MODE (inner), inner,
652 gen_int_mode (block, GET_MODE (inner)));
653 }
654 }
655
656 /* Narrow AND of memory against immediate to enable TM. */
657 if ((*code == EQ || *code == NE)
658 && *op1 == const0_rtx
659 && GET_CODE (*op0) == AND
660 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
661 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
662 {
663 rtx inner = XEXP (*op0, 0);
664 rtx mask = XEXP (*op0, 1);
665
666 /* Ignore paradoxical SUBREGs if all extra bits are masked out. */
667 if (GET_CODE (inner) == SUBREG
668 && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (inner)))
669 && (GET_MODE_SIZE (GET_MODE (inner))
670 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner))))
671 && ((INTVAL (mask)
672 & GET_MODE_MASK (GET_MODE (inner))
673 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (inner))))
674 == 0))
675 inner = SUBREG_REG (inner);
676
677 /* Do not change volatile MEMs. */
678 if (MEM_P (inner) && !MEM_VOLATILE_P (inner))
679 {
680 int part = s390_single_part (XEXP (*op0, 1),
681 GET_MODE (inner), QImode, 0);
682 if (part >= 0)
683 {
684 mask = gen_int_mode (s390_extract_part (mask, QImode, 0), QImode);
685 inner = adjust_address_nv (inner, QImode, part);
686 *op0 = gen_rtx_AND (QImode, inner, mask);
687 }
688 }
689 }
690
691 /* Narrow comparisons against 0xffff to HImode if possible. */
ebe32bb0 692 if ((*code == EQ || *code == NE)
693 && GET_CODE (*op1) == CONST_INT
694 && INTVAL (*op1) == 0xffff
695 && SCALAR_INT_MODE_P (GET_MODE (*op0))
696 && (nonzero_bits (*op0, GET_MODE (*op0))
697 & ~(unsigned HOST_WIDE_INT) 0xffff) == 0)
698 {
699 *op0 = gen_lowpart (HImode, *op0);
700 *op1 = constm1_rtx;
701 }
80b53886 702
27784c70 703 /* Remove redundant UNSPEC_CCU_TO_INT conversions if possible. */
80b53886 704 if (GET_CODE (*op0) == UNSPEC
27784c70 705 && XINT (*op0, 1) == UNSPEC_CCU_TO_INT
80b53886 706 && XVECLEN (*op0, 0) == 1
707 && GET_MODE (XVECEXP (*op0, 0, 0)) == CCUmode
708 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
709 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
710 && *op1 == const0_rtx)
711 {
712 enum rtx_code new_code = UNKNOWN;
713 switch (*code)
714 {
715 case EQ: new_code = EQ; break;
716 case NE: new_code = NE; break;
dd16a4bd 717 case LT: new_code = GTU; break;
718 case GT: new_code = LTU; break;
719 case LE: new_code = GEU; break;
720 case GE: new_code = LEU; break;
80b53886 721 default: break;
722 }
723
724 if (new_code != UNKNOWN)
725 {
726 *op0 = XVECEXP (*op0, 0, 0);
727 *code = new_code;
728 }
729 }
9c93d843 730
27784c70 731 /* Remove redundant UNSPEC_CCZ_TO_INT conversions if possible. */
732 if (GET_CODE (*op0) == UNSPEC
733 && XINT (*op0, 1) == UNSPEC_CCZ_TO_INT
734 && XVECLEN (*op0, 0) == 1
735 && GET_MODE (XVECEXP (*op0, 0, 0)) == CCZmode
736 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
737 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
738 && *op1 == const0_rtx)
739 {
740 enum rtx_code new_code = UNKNOWN;
741 switch (*code)
742 {
743 case EQ: new_code = EQ; break;
744 case NE: new_code = NE; break;
745 default: break;
746 }
747
748 if (new_code != UNKNOWN)
749 {
750 *op0 = XVECEXP (*op0, 0, 0);
751 *code = new_code;
752 }
753 }
754
9c93d843 755 /* Simplify cascaded EQ, NE with const0_rtx. */
756 if ((*code == NE || *code == EQ)
757 && (GET_CODE (*op0) == EQ || GET_CODE (*op0) == NE)
758 && GET_MODE (*op0) == SImode
759 && GET_MODE (XEXP (*op0, 0)) == CCZ1mode
760 && REG_P (XEXP (*op0, 0))
761 && XEXP (*op0, 1) == const0_rtx
762 && *op1 == const0_rtx)
763 {
764 if ((*code == EQ && GET_CODE (*op0) == NE)
765 || (*code == NE && GET_CODE (*op0) == EQ))
766 *code = EQ;
767 else
768 *code = NE;
769 *op0 = XEXP (*op0, 0);
770 }
a0631a8a 771
772 /* Prefer register over memory as first operand. */
773 if (MEM_P (*op0) && REG_P (*op1))
774 {
775 rtx tem = *op0; *op0 = *op1; *op1 = tem;
776 *code = swap_condition (*code);
777 }
ebe32bb0 778}
779
0d656e8b 780/* Emit a compare instruction suitable to implement the comparison
781 OP0 CODE OP1. Return the correct condition RTL to be placed in
782 the IF_THEN_ELSE of the conditional branch testing the result. */
783
784rtx
785s390_emit_compare (enum rtx_code code, rtx op0, rtx op1)
786{
787 enum machine_mode mode = s390_select_ccmode (code, op0, op1);
891e3096 788 rtx ret = NULL_RTX;
0d656e8b 789
891e3096 790 /* Do not output a redundant compare instruction if a compare_and_swap
9c93d843 791 pattern already computed the result and the machine modes are compatible. */
792 if (s390_compare_emitted
793 && (s390_cc_modes_compatible (GET_MODE (s390_compare_emitted), mode)
794 == GET_MODE (s390_compare_emitted)))
891e3096 795 ret = gen_rtx_fmt_ee (code, VOIDmode, s390_compare_emitted, const0_rtx);
796 else
797 {
798 rtx cc = gen_rtx_REG (mode, CC_REGNUM);
799
800 emit_insn (gen_rtx_SET (VOIDmode, cc, gen_rtx_COMPARE (mode, op0, op1)));
801 ret = gen_rtx_fmt_ee (code, VOIDmode, cc, const0_rtx);
802 }
803 s390_compare_emitted = NULL_RTX;
804 return ret;
0d656e8b 805}
806
db1f11e3 807/* Emit a SImode compare and swap instruction setting MEM to NEW if OLD
808 matches CMP.
809 Return the correct condition RTL to be placed in the IF_THEN_ELSE of the
810 conditional branch testing the result. */
811
812static rtx
813s390_emit_compare_and_swap (enum rtx_code code, rtx old, rtx mem, rtx cmp, rtx new)
814{
815 rtx ret;
816
817 emit_insn (gen_sync_compare_and_swap_ccsi (old, mem, cmp, new));
818 ret = gen_rtx_fmt_ee (code, VOIDmode, s390_compare_emitted, const0_rtx);
819
820 s390_compare_emitted = NULL_RTX;
821
822 return ret;
823}
824
0d656e8b 825/* Emit a jump instruction to TARGET. If COND is NULL_RTX, emit an
826 unconditional jump, else a conditional jump under condition COND. */
827
828void
829s390_emit_jump (rtx target, rtx cond)
830{
831 rtx insn;
832
833 target = gen_rtx_LABEL_REF (VOIDmode, target);
834 if (cond)
835 target = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, target, pc_rtx);
836
837 insn = gen_rtx_SET (VOIDmode, pc_rtx, target);
838 emit_jump_insn (insn);
839}
840
f81e845f 841/* Return branch condition mask to implement a branch
80b53886 842 specified by CODE. Return -1 for invalid comparisons. */
2eb8fe23 843
8cc5de33 844int
b40da9a7 845s390_branch_condition_mask (rtx code)
f81e845f 846{
2eb8fe23 847 const int CC0 = 1 << 3;
848 const int CC1 = 1 << 2;
849 const int CC2 = 1 << 1;
850 const int CC3 = 1 << 0;
851
32eda510 852 gcc_assert (GET_CODE (XEXP (code, 0)) == REG);
853 gcc_assert (REGNO (XEXP (code, 0)) == CC_REGNUM);
854 gcc_assert (XEXP (code, 1) == const0_rtx);
2eb8fe23 855
856 switch (GET_MODE (XEXP (code, 0)))
857 {
858 case CCZmode:
9c93d843 859 case CCZ1mode:
2eb8fe23 860 switch (GET_CODE (code))
861 {
862 case EQ: return CC0;
863 case NE: return CC1 | CC2 | CC3;
80b53886 864 default: return -1;
2eb8fe23 865 }
866 break;
867
c6821d1c 868 case CCT1mode:
869 switch (GET_CODE (code))
870 {
871 case EQ: return CC1;
872 case NE: return CC0 | CC2 | CC3;
80b53886 873 default: return -1;
c6821d1c 874 }
875 break;
876
877 case CCT2mode:
878 switch (GET_CODE (code))
879 {
880 case EQ: return CC2;
881 case NE: return CC0 | CC1 | CC3;
80b53886 882 default: return -1;
c6821d1c 883 }
884 break;
885
886 case CCT3mode:
887 switch (GET_CODE (code))
888 {
889 case EQ: return CC3;
890 case NE: return CC0 | CC1 | CC2;
80b53886 891 default: return -1;
c6821d1c 892 }
893 break;
894
2eb8fe23 895 case CCLmode:
896 switch (GET_CODE (code))
897 {
898 case EQ: return CC0 | CC2;
899 case NE: return CC1 | CC3;
80b53886 900 default: return -1;
c6821d1c 901 }
902 break;
903
904 case CCL1mode:
905 switch (GET_CODE (code))
906 {
907 case LTU: return CC2 | CC3; /* carry */
908 case GEU: return CC0 | CC1; /* no carry */
80b53886 909 default: return -1;
c6821d1c 910 }
911 break;
912
913 case CCL2mode:
914 switch (GET_CODE (code))
915 {
916 case GTU: return CC0 | CC1; /* borrow */
917 case LEU: return CC2 | CC3; /* no borrow */
80b53886 918 default: return -1;
2eb8fe23 919 }
920 break;
921
3b699fc7 922 case CCL3mode:
923 switch (GET_CODE (code))
924 {
925 case EQ: return CC0 | CC2;
926 case NE: return CC1 | CC3;
927 case LTU: return CC1;
928 case GTU: return CC3;
929 case LEU: return CC1 | CC2;
930 case GEU: return CC2 | CC3;
80b53886 931 default: return -1;
3b699fc7 932 }
933
2eb8fe23 934 case CCUmode:
935 switch (GET_CODE (code))
936 {
937 case EQ: return CC0;
938 case NE: return CC1 | CC2 | CC3;
939 case LTU: return CC1;
940 case GTU: return CC2;
941 case LEU: return CC0 | CC1;
942 case GEU: return CC0 | CC2;
80b53886 943 default: return -1;
2eb8fe23 944 }
945 break;
946
c6821d1c 947 case CCURmode:
948 switch (GET_CODE (code))
949 {
950 case EQ: return CC0;
951 case NE: return CC2 | CC1 | CC3;
952 case LTU: return CC2;
953 case GTU: return CC1;
954 case LEU: return CC0 | CC2;
955 case GEU: return CC0 | CC1;
80b53886 956 default: return -1;
c6821d1c 957 }
958 break;
959
3c482144 960 case CCAPmode:
961 switch (GET_CODE (code))
962 {
963 case EQ: return CC0;
964 case NE: return CC1 | CC2 | CC3;
965 case LT: return CC1 | CC3;
966 case GT: return CC2;
967 case LE: return CC0 | CC1 | CC3;
968 case GE: return CC0 | CC2;
80b53886 969 default: return -1;
3c482144 970 }
971 break;
972
973 case CCANmode:
974 switch (GET_CODE (code))
975 {
976 case EQ: return CC0;
977 case NE: return CC1 | CC2 | CC3;
978 case LT: return CC1;
979 case GT: return CC2 | CC3;
980 case LE: return CC0 | CC1;
981 case GE: return CC0 | CC2 | CC3;
80b53886 982 default: return -1;
3c482144 983 }
984 break;
985
2eb8fe23 986 case CCSmode:
987 switch (GET_CODE (code))
988 {
989 case EQ: return CC0;
990 case NE: return CC1 | CC2 | CC3;
991 case LT: return CC1;
992 case GT: return CC2;
993 case LE: return CC0 | CC1;
994 case GE: return CC0 | CC2;
995 case UNORDERED: return CC3;
996 case ORDERED: return CC0 | CC1 | CC2;
997 case UNEQ: return CC0 | CC3;
998 case UNLT: return CC1 | CC3;
999 case UNGT: return CC2 | CC3;
1000 case UNLE: return CC0 | CC1 | CC3;
1001 case UNGE: return CC0 | CC2 | CC3;
1002 case LTGT: return CC1 | CC2;
80b53886 1003 default: return -1;
2eb8fe23 1004 }
c6821d1c 1005 break;
1006
1007 case CCSRmode:
1008 switch (GET_CODE (code))
1009 {
1010 case EQ: return CC0;
1011 case NE: return CC2 | CC1 | CC3;
1012 case LT: return CC2;
1013 case GT: return CC1;
1014 case LE: return CC0 | CC2;
1015 case GE: return CC0 | CC1;
1016 case UNORDERED: return CC3;
1017 case ORDERED: return CC0 | CC2 | CC1;
1018 case UNEQ: return CC0 | CC3;
1019 case UNLT: return CC2 | CC3;
1020 case UNGT: return CC1 | CC3;
1021 case UNLE: return CC0 | CC2 | CC3;
1022 case UNGE: return CC0 | CC1 | CC3;
1023 case LTGT: return CC2 | CC1;
80b53886 1024 default: return -1;
c6821d1c 1025 }
1026 break;
2eb8fe23 1027
1028 default:
80b53886 1029 return -1;
2eb8fe23 1030 }
1031}
1032
f81e845f 1033/* If INV is false, return assembler mnemonic string to implement
1034 a branch specified by CODE. If INV is true, return mnemonic
2eb8fe23 1035 for the corresponding inverted branch. */
1036
1037static const char *
b40da9a7 1038s390_branch_condition_mnemonic (rtx code, int inv)
2eb8fe23 1039{
c8834c5f 1040 static const char *const mnemonic[16] =
2eb8fe23 1041 {
1042 NULL, "o", "h", "nle",
1043 "l", "nhe", "lh", "ne",
1044 "e", "nlh", "he", "nl",
1045 "le", "nh", "no", NULL
1046 };
1047
1048 int mask = s390_branch_condition_mask (code);
80b53886 1049 gcc_assert (mask >= 0);
2eb8fe23 1050
1051 if (inv)
1052 mask ^= 15;
1053
32eda510 1054 gcc_assert (mask >= 1 && mask <= 14);
2eb8fe23 1055
1056 return mnemonic[mask];
1057}
1058
64a1078f 1059/* Return the part of op which has a value different from def.
1060 The size of the part is determined by mode.
f588eb9f 1061 Use this function only if you already know that op really
64a1078f 1062 contains such a part. */
8b4a4127 1063
64a1078f 1064unsigned HOST_WIDE_INT
1065s390_extract_part (rtx op, enum machine_mode mode, int def)
8b4a4127 1066{
64a1078f 1067 unsigned HOST_WIDE_INT value = 0;
1068 int max_parts = HOST_BITS_PER_WIDE_INT / GET_MODE_BITSIZE (mode);
1069 int part_bits = GET_MODE_BITSIZE (mode);
0451e449 1070 unsigned HOST_WIDE_INT part_mask
1071 = ((unsigned HOST_WIDE_INT)1 << part_bits) - 1;
64a1078f 1072 int i;
f588eb9f 1073
64a1078f 1074 for (i = 0; i < max_parts; i++)
8b4a4127 1075 {
64a1078f 1076 if (i == 0)
1077 value = (unsigned HOST_WIDE_INT) INTVAL (op);
8b4a4127 1078 else
64a1078f 1079 value >>= part_bits;
f588eb9f 1080
64a1078f 1081 if ((value & part_mask) != (def & part_mask))
1082 return value & part_mask;
8b4a4127 1083 }
f588eb9f 1084
32eda510 1085 gcc_unreachable ();
8b4a4127 1086}
1087
1088/* If OP is an integer constant of mode MODE with exactly one
64a1078f 1089 part of mode PART_MODE unequal to DEF, return the number of that
1090 part. Otherwise, return -1. */
8b4a4127 1091
1092int
f588eb9f 1093s390_single_part (rtx op,
1094 enum machine_mode mode,
64a1078f 1095 enum machine_mode part_mode,
1096 int def)
1097{
1098 unsigned HOST_WIDE_INT value = 0;
1099 int n_parts = GET_MODE_SIZE (mode) / GET_MODE_SIZE (part_mode);
0451e449 1100 unsigned HOST_WIDE_INT part_mask
1101 = ((unsigned HOST_WIDE_INT)1 << GET_MODE_BITSIZE (part_mode)) - 1;
64a1078f 1102 int i, part = -1;
1103
1104 if (GET_CODE (op) != CONST_INT)
1105 return -1;
f588eb9f 1106
64a1078f 1107 for (i = 0; i < n_parts; i++)
1108 {
1109 if (i == 0)
1110 value = (unsigned HOST_WIDE_INT) INTVAL (op);
8b4a4127 1111 else
64a1078f 1112 value >>= GET_MODE_BITSIZE (part_mode);
f588eb9f 1113
64a1078f 1114 if ((value & part_mask) != (def & part_mask))
1115 {
1116 if (part != -1)
1117 return -1;
1118 else
1119 part = i;
1120 }
8b4a4127 1121 }
64a1078f 1122 return part == -1 ? -1 : n_parts - 1 - part;
8b4a4127 1123}
1124
f81e845f 1125/* Check whether we can (and want to) split a double-word
1126 move in mode MODE from SRC to DST into two single-word
66795431 1127 moves, moving the subword FIRST_SUBWORD first. */
1128
1129bool
b40da9a7 1130s390_split_ok_p (rtx dst, rtx src, enum machine_mode mode, int first_subword)
66795431 1131{
1132 /* Floating point registers cannot be split. */
1133 if (FP_REG_P (src) || FP_REG_P (dst))
1134 return false;
1135
1fc184ee 1136 /* We don't need to split if operands are directly accessible. */
66795431 1137 if (s_operand (src, mode) || s_operand (dst, mode))
1138 return false;
1139
1140 /* Non-offsettable memory references cannot be split. */
1141 if ((GET_CODE (src) == MEM && !offsettable_memref_p (src))
1142 || (GET_CODE (dst) == MEM && !offsettable_memref_p (dst)))
1143 return false;
1144
1145 /* Moving the first subword must not clobber a register
1146 needed to move the second subword. */
1147 if (register_operand (dst, mode))
1148 {
1149 rtx subreg = operand_subword (dst, first_subword, 0, mode);
1150 if (reg_overlap_mentioned_p (subreg, src))
1151 return false;
1152 }
1153
1154 return true;
1155}
1156
74bdf297 1157/* Return true if it can be proven that [MEM1, MEM1 + SIZE]
1158 and [MEM2, MEM2 + SIZE] do overlap and false
1159 otherwise. */
1160
1161bool
1162s390_overlap_p (rtx mem1, rtx mem2, HOST_WIDE_INT size)
1163{
1164 rtx addr1, addr2, addr_delta;
1165 HOST_WIDE_INT delta;
1166
1167 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
1168 return true;
1169
1170 if (size == 0)
1171 return false;
1172
1173 addr1 = XEXP (mem1, 0);
1174 addr2 = XEXP (mem2, 0);
1175
1176 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
1177
1178 /* This overlapping check is used by peepholes merging memory block operations.
1179 Overlapping operations would otherwise be recognized by the S/390 hardware
1180 and would fall back to a slower implementation. Allowing overlapping
1181 operations would lead to slow code but not to wrong code. Therefore we are
d8eac3bc 1182 somewhat optimistic if we cannot prove that the memory blocks are
74bdf297 1183 overlapping.
1184 That's why we return false here although this may accept operations on
1185 overlapping memory areas. */
1186 if (!addr_delta || GET_CODE (addr_delta) != CONST_INT)
1187 return false;
1188
1189 delta = INTVAL (addr_delta);
1190
1191 if (delta == 0
1192 || (delta > 0 && delta < size)
1193 || (delta < 0 && -delta < size))
1194 return true;
1195
1196 return false;
1197}
1198
9dffd3ff 1199/* Check whether the address of memory reference MEM2 equals exactly
1200 the address of memory reference MEM1 plus DELTA. Return true if
1201 we can prove this to be the case, false otherwise. */
1202
1203bool
1204s390_offset_p (rtx mem1, rtx mem2, rtx delta)
1205{
1206 rtx addr1, addr2, addr_delta;
1207
1208 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
1209 return false;
1210
1211 addr1 = XEXP (mem1, 0);
1212 addr2 = XEXP (mem2, 0);
1213
1214 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
1215 if (!addr_delta || !rtx_equal_p (addr_delta, delta))
1216 return false;
1217
1218 return true;
1219}
1220
3e247a31 1221/* Expand logical operator CODE in mode MODE with operands OPERANDS. */
1222
1223void
1224s390_expand_logical_operator (enum rtx_code code, enum machine_mode mode,
1225 rtx *operands)
1226{
1227 enum machine_mode wmode = mode;
1228 rtx dst = operands[0];
1229 rtx src1 = operands[1];
1230 rtx src2 = operands[2];
1231 rtx op, clob, tem;
1232
1233 /* If we cannot handle the operation directly, use a temp register. */
1234 if (!s390_logical_operator_ok_p (operands))
1235 dst = gen_reg_rtx (mode);
1236
1237 /* QImode and HImode patterns make sense only if we have a destination
1238 in memory. Otherwise perform the operation in SImode. */
1239 if ((mode == QImode || mode == HImode) && GET_CODE (dst) != MEM)
1240 wmode = SImode;
1241
1242 /* Widen operands if required. */
1243 if (mode != wmode)
1244 {
1245 if (GET_CODE (dst) == SUBREG
1246 && (tem = simplify_subreg (wmode, dst, mode, 0)) != 0)
1247 dst = tem;
1248 else if (REG_P (dst))
1249 dst = gen_rtx_SUBREG (wmode, dst, 0);
1250 else
1251 dst = gen_reg_rtx (wmode);
1252
1253 if (GET_CODE (src1) == SUBREG
1254 && (tem = simplify_subreg (wmode, src1, mode, 0)) != 0)
1255 src1 = tem;
1256 else if (GET_MODE (src1) != VOIDmode)
1257 src1 = gen_rtx_SUBREG (wmode, force_reg (mode, src1), 0);
1258
1259 if (GET_CODE (src2) == SUBREG
1260 && (tem = simplify_subreg (wmode, src2, mode, 0)) != 0)
1261 src2 = tem;
1262 else if (GET_MODE (src2) != VOIDmode)
1263 src2 = gen_rtx_SUBREG (wmode, force_reg (mode, src2), 0);
1264 }
1265
1266 /* Emit the instruction. */
1267 op = gen_rtx_SET (VOIDmode, dst, gen_rtx_fmt_ee (code, wmode, src1, src2));
1268 clob = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
1269 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, op, clob)));
1270
1271 /* Fix up the destination if needed. */
1272 if (dst != operands[0])
1273 emit_move_insn (operands[0], gen_lowpart (mode, dst));
1274}
1275
1276/* Check whether OPERANDS are OK for a logical operation (AND, IOR, XOR). */
1277
1278bool
1279s390_logical_operator_ok_p (rtx *operands)
1280{
1281 /* If the destination operand is in memory, it needs to coincide
1282 with one of the source operands. After reload, it has to be
1283 the first source operand. */
1284 if (GET_CODE (operands[0]) == MEM)
1285 return rtx_equal_p (operands[0], operands[1])
1286 || (!reload_completed && rtx_equal_p (operands[0], operands[2]));
1287
1288 return true;
1289}
1290
3f56e755 1291/* Narrow logical operation CODE of memory operand MEMOP with immediate
1292 operand IMMOP to switch from SS to SI type instructions. */
1293
1294void
1295s390_narrow_logical_operator (enum rtx_code code, rtx *memop, rtx *immop)
1296{
1297 int def = code == AND ? -1 : 0;
1298 HOST_WIDE_INT mask;
1299 int part;
1300
1301 gcc_assert (GET_CODE (*memop) == MEM);
1302 gcc_assert (!MEM_VOLATILE_P (*memop));
1303
1304 mask = s390_extract_part (*immop, QImode, def);
1305 part = s390_single_part (*immop, GET_MODE (*memop), QImode, def);
1306 gcc_assert (part >= 0);
1307
1308 *memop = adjust_address (*memop, QImode, part);
1309 *immop = gen_int_mode (mask, QImode);
1310}
1311
2eb8fe23 1312
875862bf 1313/* How to allocate a 'struct machine_function'. */
1314
1315static struct machine_function *
1316s390_init_machine_status (void)
1317{
1318 return ggc_alloc_cleared (sizeof (struct machine_function));
1319}
1320
f81e845f 1321/* Change optimizations to be performed, depending on the
56769981 1322 optimization level.
1323
1324 LEVEL is the optimization level specified; 2 if `-O2' is
1325 specified, 1 if `-O' is specified, and 0 if neither is specified.
1326
808a491c 1327 SIZE is nonzero if `-Os' is specified and zero otherwise. */
4673c1a0 1328
1329void
b40da9a7 1330optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
4673c1a0 1331{
25da9e9f 1332 /* ??? There are apparently still problems with -fcaller-saves. */
1333 flag_caller_saves = 0;
cf351342 1334
1335 /* By default, always emit DWARF-2 unwind info. This allows debugging
1336 without maintaining a stack frame back-chain. */
1337 flag_asynchronous_unwind_tables = 1;
4fa65e2f 1338
1339 /* Use MVCLE instructions to decrease code size if requested. */
1340 if (size != 0)
1341 target_flags |= MASK_MVCLE;
4673c1a0 1342}
1343
28ee8079 1344/* Return true if ARG is the name of a processor. Set *TYPE and *FLAGS
1345 to the associated processor_type and processor_flags if so. */
1346
1347static bool
1348s390_handle_arch_option (const char *arg,
1349 enum processor_type *type,
1350 enum processor_flags *flags)
8b4a4127 1351{
95ae2fd6 1352 static struct pta
1353 {
1354 const char *const name; /* processor name or nickname. */
1355 const enum processor_type processor;
b8c0043c 1356 const enum processor_flags flags;
95ae2fd6 1357 }
1358 const processor_alias_table[] =
1359 {
b8c0043c 1360 {"g5", PROCESSOR_9672_G5, PF_IEEE_FLOAT},
1361 {"g6", PROCESSOR_9672_G6, PF_IEEE_FLOAT},
1362 {"z900", PROCESSOR_2064_Z900, PF_IEEE_FLOAT | PF_ZARCH},
f81e845f 1363 {"z990", PROCESSOR_2084_Z990, PF_IEEE_FLOAT | PF_ZARCH
b8c0043c 1364 | PF_LONG_DISPLACEMENT},
163277cf 1365 {"z9-109", PROCESSOR_2094_Z9_109, PF_IEEE_FLOAT | PF_ZARCH
1366 | PF_LONG_DISPLACEMENT | PF_EXTIMM},
7818a08e 1367 {"z9-ec", PROCESSOR_2094_Z9_109, PF_IEEE_FLOAT | PF_ZARCH
1368 | PF_LONG_DISPLACEMENT | PF_EXTIMM | PF_DFP },
95ae2fd6 1369 };
28ee8079 1370 size_t i;
1371
1372 for (i = 0; i < ARRAY_SIZE (processor_alias_table); i++)
1373 if (strcmp (arg, processor_alias_table[i].name) == 0)
1374 {
1375 *type = processor_alias_table[i].processor;
1376 *flags = processor_alias_table[i].flags;
1377 return true;
1378 }
1379 return false;
1380}
1381
1382/* Implement TARGET_HANDLE_OPTION. */
95ae2fd6 1383
28ee8079 1384static bool
1385s390_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
1386{
1387 switch (code)
1388 {
1389 case OPT_march_:
28ee8079 1390 return s390_handle_arch_option (arg, &s390_arch, &s390_arch_flags);
1391
1392 case OPT_mstack_guard_:
1393 if (sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_stack_guard) != 1)
1394 return false;
1395 if (exact_log2 (s390_stack_guard) == -1)
1396 error ("stack guard value must be an exact power of 2");
1397 return true;
1398
1399 case OPT_mstack_size_:
1400 if (sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_stack_size) != 1)
1401 return false;
1402 if (exact_log2 (s390_stack_size) == -1)
1403 error ("stack size must be an exact power of 2");
1404 return true;
1405
1406 case OPT_mtune_:
1407 return s390_handle_arch_option (arg, &s390_tune, &s390_tune_flags);
1408
1409 case OPT_mwarn_framesize_:
1410 return sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_warn_framesize) == 1;
1411
1412 default:
1413 return true;
1414 }
1415}
95ae2fd6 1416
28ee8079 1417void
1418override_options (void)
1419{
5a5e802f 1420 /* Set up function hooks. */
1421 init_machine_status = s390_init_machine_status;
b8c0043c 1422
1423 /* Architecture mode defaults according to ABI. */
1424 if (!(target_flags_explicit & MASK_ZARCH))
1425 {
1426 if (TARGET_64BIT)
1427 target_flags |= MASK_ZARCH;
1428 else
1429 target_flags &= ~MASK_ZARCH;
1430 }
1431
1432 /* Determine processor architectural level. */
95ae2fd6 1433 if (!s390_arch_string)
28ee8079 1434 {
1435 s390_arch_string = TARGET_ZARCH? "z900" : "g5";
1436 s390_handle_arch_option (s390_arch_string, &s390_arch, &s390_arch_flags);
1437 }
95ae2fd6 1438
b8c0043c 1439 /* Determine processor to tune for. */
28ee8079 1440 if (s390_tune == PROCESSOR_max)
95ae2fd6 1441 {
b8c0043c 1442 s390_tune = s390_arch;
1443 s390_tune_flags = s390_arch_flags;
95ae2fd6 1444 }
1445
b8c0043c 1446 /* Sanity checks. */
7818a08e 1447 if (TARGET_ZARCH && !TARGET_CPU_ZARCH)
3284a242 1448 error ("z/Architecture mode not supported on %s", s390_arch_string);
b8c0043c 1449 if (TARGET_64BIT && !TARGET_ZARCH)
3284a242 1450 error ("64-bit ABI not supported in ESA/390 mode");
cbb300e8 1451
7818a08e 1452 if (TARGET_HARD_DFP && (!TARGET_CPU_DFP || !TARGET_ZARCH))
1453 {
1454 if (target_flags_explicit & MASK_SOFT_DFP)
1455 {
1456 if (!TARGET_CPU_DFP)
1457 error ("Hardware decimal floating point instructions"
1458 " not available on %s", s390_arch_string);
1459 if (!TARGET_ZARCH)
1460 error ("Hardware decimal floating point instructions"
1461 " not available in ESA/390 mode");
1462 }
1463 else
1464 target_flags |= MASK_SOFT_DFP;
1465 }
1466
1467 if ((target_flags_explicit & MASK_SOFT_FLOAT) && TARGET_SOFT_FLOAT)
1468 {
1469 if ((target_flags_explicit & MASK_SOFT_DFP) && TARGET_HARD_DFP)
1470 error ("-mhard-dfp can't be used in conjunction with -msoft-float");
1471
1472 target_flags |= MASK_SOFT_DFP;
1473 }
1474
18925d38 1475 /* Set processor cost function. */
163277cf 1476 if (s390_tune == PROCESSOR_2094_Z9_109)
1477 s390_cost = &z9_109_cost;
1478 else if (s390_tune == PROCESSOR_2084_Z990)
18925d38 1479 s390_cost = &z990_cost;
1480 else
1481 s390_cost = &z900_cost;
163277cf 1482
5724df29 1483 if (TARGET_BACKCHAIN && TARGET_PACKED_STACK && TARGET_HARD_FLOAT)
1484 error ("-mbackchain -mpacked-stack -mhard-float are not supported "
3284a242 1485 "in combination");
5724df29 1486
28ee8079 1487 if (s390_stack_size)
cbb300e8 1488 {
00d233e6 1489 if (s390_stack_guard >= s390_stack_size)
28ee8079 1490 error ("stack size must be greater than the stack guard value");
a4e6ef04 1491 else if (s390_stack_size > 1 << 16)
1492 error ("stack size must not be greater than 64k");
cbb300e8 1493 }
28ee8079 1494 else if (s390_stack_guard)
cbb300e8 1495 error ("-mstack-guard implies use of -mstack-size");
2dde0cc6 1496
1497#ifdef TARGET_DEFAULT_LONG_DOUBLE_128
1498 if (!(target_flags_explicit & MASK_LONG_DOUBLE_128))
1499 target_flags |= MASK_LONG_DOUBLE_128;
1500#endif
5a5e802f 1501}
4673c1a0 1502
1503/* Map for smallest class containing reg regno. */
1504
c8834c5f 1505const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
4673c1a0 1506{ GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1507 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1508 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1509 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1510 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1511 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1512 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1513 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
923cf36d 1514 ADDR_REGS, CC_REGS, ADDR_REGS, ADDR_REGS,
1515 ACCESS_REGS, ACCESS_REGS
4673c1a0 1516};
1517
71343e6b 1518/* Return attribute type of insn. */
1519
1520static enum attr_type
b40da9a7 1521s390_safe_attr_type (rtx insn)
71343e6b 1522{
1523 if (recog_memoized (insn) >= 0)
1524 return get_attr_type (insn);
1525 else
1526 return TYPE_NONE;
1527}
4673c1a0 1528
51aa1e9c 1529/* Return true if DISP is a valid short displacement. */
1530
e5537457 1531static bool
b40da9a7 1532s390_short_displacement (rtx disp)
51aa1e9c 1533{
1534 /* No displacement is OK. */
1535 if (!disp)
e5537457 1536 return true;
51aa1e9c 1537
1538 /* Integer displacement in range. */
1539 if (GET_CODE (disp) == CONST_INT)
1540 return INTVAL (disp) >= 0 && INTVAL (disp) < 4096;
1541
1542 /* GOT offset is not OK, the GOT can be large. */
1543 if (GET_CODE (disp) == CONST
1544 && GET_CODE (XEXP (disp, 0)) == UNSPEC
a6e4e903 1545 && (XINT (XEXP (disp, 0), 1) == UNSPEC_GOT
1546 || XINT (XEXP (disp, 0), 1) == UNSPEC_GOTNTPOFF))
e5537457 1547 return false;
51aa1e9c 1548
1549 /* All other symbolic constants are literal pool references,
1550 which are OK as the literal pool must be small. */
1551 if (GET_CODE (disp) == CONST)
e5537457 1552 return true;
51aa1e9c 1553
e5537457 1554 return false;
51aa1e9c 1555}
1556
875862bf 1557/* Decompose a RTL expression ADDR for a memory address into
1558 its components, returned in OUT.
a5004c3d 1559
e5537457 1560 Returns false if ADDR is not a valid memory address, true
875862bf 1561 otherwise. If OUT is NULL, don't return the components,
1562 but check for validity only.
a5004c3d 1563
875862bf 1564 Note: Only addresses in canonical form are recognized.
1565 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1566 canonical form so that they will be recognized. */
64a1078f 1567
875862bf 1568static int
edd89d66 1569s390_decompose_address (rtx addr, struct s390_address *out)
875862bf 1570{
1571 HOST_WIDE_INT offset = 0;
1572 rtx base = NULL_RTX;
1573 rtx indx = NULL_RTX;
1574 rtx disp = NULL_RTX;
1575 rtx orig_disp;
e5537457 1576 bool pointer = false;
1577 bool base_ptr = false;
1578 bool indx_ptr = false;
05b58257 1579 bool literal_pool = false;
1580
1581 /* We may need to substitute the literal pool base register into the address
1582 below. However, at this point we do not know which register is going to
1583 be used as base, so we substitute the arg pointer register. This is going
1584 to be treated as holding a pointer below -- it shouldn't be used for any
1585 other purpose. */
1586 rtx fake_pool_base = gen_rtx_REG (Pmode, ARG_POINTER_REGNUM);
3f56e755 1587
875862bf 1588 /* Decompose address into base + index + displacement. */
3f56e755 1589
875862bf 1590 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
1591 base = addr;
3f56e755 1592
875862bf 1593 else if (GET_CODE (addr) == PLUS)
6b1c8423 1594 {
875862bf 1595 rtx op0 = XEXP (addr, 0);
1596 rtx op1 = XEXP (addr, 1);
1597 enum rtx_code code0 = GET_CODE (op0);
1598 enum rtx_code code1 = GET_CODE (op1);
6b1c8423 1599
875862bf 1600 if (code0 == REG || code0 == UNSPEC)
1601 {
1602 if (code1 == REG || code1 == UNSPEC)
1603 {
1604 indx = op0; /* index + base */
1605 base = op1;
1606 }
6b1c8423 1607
875862bf 1608 else
1609 {
1610 base = op0; /* base + displacement */
1611 disp = op1;
1612 }
1613 }
a5004c3d 1614
875862bf 1615 else if (code0 == PLUS)
51aa1e9c 1616 {
875862bf 1617 indx = XEXP (op0, 0); /* index + base + disp */
1618 base = XEXP (op0, 1);
1619 disp = op1;
51aa1e9c 1620 }
51aa1e9c 1621
875862bf 1622 else
51aa1e9c 1623 {
e5537457 1624 return false;
51aa1e9c 1625 }
875862bf 1626 }
51aa1e9c 1627
875862bf 1628 else
1629 disp = addr; /* displacement */
51aa1e9c 1630
875862bf 1631 /* Extract integer part of displacement. */
1632 orig_disp = disp;
1633 if (disp)
1634 {
1635 if (GET_CODE (disp) == CONST_INT)
51aa1e9c 1636 {
875862bf 1637 offset = INTVAL (disp);
1638 disp = NULL_RTX;
51aa1e9c 1639 }
875862bf 1640 else if (GET_CODE (disp) == CONST
1641 && GET_CODE (XEXP (disp, 0)) == PLUS
1642 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
1643 {
1644 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
1645 disp = XEXP (XEXP (disp, 0), 0);
1646 }
1647 }
51aa1e9c 1648
875862bf 1649 /* Strip off CONST here to avoid special case tests later. */
1650 if (disp && GET_CODE (disp) == CONST)
1651 disp = XEXP (disp, 0);
63ebd742 1652
875862bf 1653 /* We can convert literal pool addresses to
1654 displacements by basing them off the base register. */
1655 if (disp && GET_CODE (disp) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (disp))
1656 {
1657 /* Either base or index must be free to hold the base register. */
1658 if (!base)
05b58257 1659 base = fake_pool_base, literal_pool = true;
875862bf 1660 else if (!indx)
05b58257 1661 indx = fake_pool_base, literal_pool = true;
875862bf 1662 else
e5537457 1663 return false;
875862bf 1664
1665 /* Mark up the displacement. */
1666 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp),
1667 UNSPEC_LTREL_OFFSET);
51aa1e9c 1668 }
a5004c3d 1669
875862bf 1670 /* Validate base register. */
1671 if (base)
1672 {
1673 if (GET_CODE (base) == UNSPEC)
1674 switch (XINT (base, 1))
1675 {
1676 case UNSPEC_LTREF:
1677 if (!disp)
1678 disp = gen_rtx_UNSPEC (Pmode,
1679 gen_rtvec (1, XVECEXP (base, 0, 0)),
1680 UNSPEC_LTREL_OFFSET);
1681 else
e5537457 1682 return false;
a5004c3d 1683
05b58257 1684 base = XVECEXP (base, 0, 1);
875862bf 1685 break;
64a1078f 1686
875862bf 1687 case UNSPEC_LTREL_BASE:
05b58257 1688 if (XVECLEN (base, 0) == 1)
1689 base = fake_pool_base, literal_pool = true;
1690 else
1691 base = XVECEXP (base, 0, 1);
875862bf 1692 break;
64a1078f 1693
875862bf 1694 default:
e5537457 1695 return false;
875862bf 1696 }
64a1078f 1697
1e280623 1698 if (!REG_P (base)
1699 || (GET_MODE (base) != SImode
1700 && GET_MODE (base) != Pmode))
e5537457 1701 return false;
875862bf 1702
05b58257 1703 if (REGNO (base) == STACK_POINTER_REGNUM
875862bf 1704 || REGNO (base) == FRAME_POINTER_REGNUM
1705 || ((reload_completed || reload_in_progress)
1706 && frame_pointer_needed
1707 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
1708 || REGNO (base) == ARG_POINTER_REGNUM
1709 || (flag_pic
1710 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
e5537457 1711 pointer = base_ptr = true;
05b58257 1712
1713 if ((reload_completed || reload_in_progress)
1714 && base == cfun->machine->base_reg)
1715 pointer = base_ptr = literal_pool = true;
875862bf 1716 }
1717
1718 /* Validate index register. */
1719 if (indx)
64a1078f 1720 {
875862bf 1721 if (GET_CODE (indx) == UNSPEC)
1722 switch (XINT (indx, 1))
1723 {
1724 case UNSPEC_LTREF:
1725 if (!disp)
1726 disp = gen_rtx_UNSPEC (Pmode,
1727 gen_rtvec (1, XVECEXP (indx, 0, 0)),
1728 UNSPEC_LTREL_OFFSET);
1729 else
e5537457 1730 return false;
64a1078f 1731
05b58257 1732 indx = XVECEXP (indx, 0, 1);
875862bf 1733 break;
64a1078f 1734
875862bf 1735 case UNSPEC_LTREL_BASE:
05b58257 1736 if (XVECLEN (indx, 0) == 1)
1737 indx = fake_pool_base, literal_pool = true;
1738 else
1739 indx = XVECEXP (indx, 0, 1);
875862bf 1740 break;
64a1078f 1741
875862bf 1742 default:
e5537457 1743 return false;
875862bf 1744 }
64a1078f 1745
1e280623 1746 if (!REG_P (indx)
1747 || (GET_MODE (indx) != SImode
1748 && GET_MODE (indx) != Pmode))
e5537457 1749 return false;
64a1078f 1750
05b58257 1751 if (REGNO (indx) == STACK_POINTER_REGNUM
875862bf 1752 || REGNO (indx) == FRAME_POINTER_REGNUM
1753 || ((reload_completed || reload_in_progress)
1754 && frame_pointer_needed
1755 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
1756 || REGNO (indx) == ARG_POINTER_REGNUM
1757 || (flag_pic
1758 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
e5537457 1759 pointer = indx_ptr = true;
05b58257 1760
1761 if ((reload_completed || reload_in_progress)
1762 && indx == cfun->machine->base_reg)
1763 pointer = indx_ptr = literal_pool = true;
875862bf 1764 }
f588eb9f 1765
875862bf 1766 /* Prefer to use pointer as base, not index. */
1767 if (base && indx && !base_ptr
1768 && (indx_ptr || (!REG_POINTER (base) && REG_POINTER (indx))))
1769 {
1770 rtx tmp = base;
1771 base = indx;
1772 indx = tmp;
1773 }
64a1078f 1774
875862bf 1775 /* Validate displacement. */
1776 if (!disp)
1777 {
119114cb 1778 /* If virtual registers are involved, the displacement will change later
1779 anyway as the virtual registers get eliminated. This could make a
1780 valid displacement invalid, but it is more likely to make an invalid
1781 displacement valid, because we sometimes access the register save area
1782 via negative offsets to one of those registers.
875862bf 1783 Thus we don't check the displacement for validity here. If after
1784 elimination the displacement turns out to be invalid after all,
1785 this is fixed up by reload in any case. */
1786 if (base != arg_pointer_rtx
1787 && indx != arg_pointer_rtx
1788 && base != return_address_pointer_rtx
119114cb 1789 && indx != return_address_pointer_rtx
1790 && base != frame_pointer_rtx
1791 && indx != frame_pointer_rtx
1792 && base != virtual_stack_vars_rtx
1793 && indx != virtual_stack_vars_rtx)
875862bf 1794 if (!DISP_IN_RANGE (offset))
e5537457 1795 return false;
875862bf 1796 }
1797 else
1798 {
1799 /* All the special cases are pointers. */
e5537457 1800 pointer = true;
64a1078f 1801
875862bf 1802 /* In the small-PIC case, the linker converts @GOT
1803 and @GOTNTPOFF offsets to possible displacements. */
1804 if (GET_CODE (disp) == UNSPEC
1805 && (XINT (disp, 1) == UNSPEC_GOT
1806 || XINT (disp, 1) == UNSPEC_GOTNTPOFF)
875862bf 1807 && flag_pic == 1)
1808 {
1809 ;
1810 }
64a1078f 1811
875862bf 1812 /* Accept chunkified literal pool symbol references. */
86b779d2 1813 else if (cfun && cfun->machine
1814 && cfun->machine->decomposed_literal_pool_addresses_ok_p
1815 && GET_CODE (disp) == MINUS
875862bf 1816 && GET_CODE (XEXP (disp, 0)) == LABEL_REF
1817 && GET_CODE (XEXP (disp, 1)) == LABEL_REF)
1818 {
1819 ;
1820 }
64a1078f 1821
875862bf 1822 /* Accept literal pool references. */
1823 else if (GET_CODE (disp) == UNSPEC
1824 && XINT (disp, 1) == UNSPEC_LTREL_OFFSET)
1825 {
1826 orig_disp = gen_rtx_CONST (Pmode, disp);
1827 if (offset)
1828 {
1829 /* If we have an offset, make sure it does not
1830 exceed the size of the constant pool entry. */
1831 rtx sym = XVECEXP (disp, 0, 0);
1832 if (offset >= GET_MODE_SIZE (get_pool_mode (sym)))
e5537457 1833 return false;
64a1078f 1834
875862bf 1835 orig_disp = plus_constant (orig_disp, offset);
1836 }
1837 }
1838
1839 else
e5537457 1840 return false;
64a1078f 1841 }
1842
875862bf 1843 if (!base && !indx)
e5537457 1844 pointer = true;
875862bf 1845
1846 if (out)
1847 {
1848 out->base = base;
1849 out->indx = indx;
1850 out->disp = orig_disp;
1851 out->pointer = pointer;
05b58257 1852 out->literal_pool = literal_pool;
875862bf 1853 }
1854
e5537457 1855 return true;
64a1078f 1856}
1857
6d6be381 1858/* Decompose a RTL expression OP for a shift count into its components,
1859 and return the base register in BASE and the offset in OFFSET.
1860
6d6be381 1861 Return true if OP is a valid shift count, false if not. */
1862
1863bool
417cba42 1864s390_decompose_shift_count (rtx op, rtx *base, HOST_WIDE_INT *offset)
6d6be381 1865{
1866 HOST_WIDE_INT off = 0;
1867
6d6be381 1868 /* We can have an integer constant, an address register,
1869 or a sum of the two. */
1870 if (GET_CODE (op) == CONST_INT)
1871 {
1872 off = INTVAL (op);
1873 op = NULL_RTX;
1874 }
1875 if (op && GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
1876 {
1877 off = INTVAL (XEXP (op, 1));
1878 op = XEXP (op, 0);
1879 }
1880 while (op && GET_CODE (op) == SUBREG)
1881 op = SUBREG_REG (op);
1882
1883 if (op && GET_CODE (op) != REG)
1884 return false;
1885
1886 if (offset)
1887 *offset = off;
1888 if (base)
1889 *base = op;
1890
1891 return true;
1892}
1893
1894
875862bf 1895/* Return true if CODE is a valid address without index. */
fab7adbf 1896
875862bf 1897bool
1898s390_legitimate_address_without_index_p (rtx op)
1899{
1900 struct s390_address addr;
1901
1902 if (!s390_decompose_address (XEXP (op, 0), &addr))
1903 return false;
1904 if (addr.indx)
1905 return false;
1906
1907 return true;
1908}
1909
59bc01b3 1910
1911/* Evaluates constraint strings described by the regular expression
1912 ([A|B](Q|R|S|T))|U|W and returns 1 if OP is a valid operand for the
1913 constraint given in STR, or 0 else. */
875862bf 1914
1915int
59bc01b3 1916s390_mem_constraint (const char *str, rtx op)
875862bf 1917{
1918 struct s390_address addr;
59bc01b3 1919 char c = str[0];
875862bf 1920
1921 /* Check for offsettable variants of memory constraints. */
1922 if (c == 'A')
1923 {
1924 /* Only accept non-volatile MEMs. */
1925 if (!MEM_P (op) || MEM_VOLATILE_P (op))
1926 return 0;
1927
1928 if ((reload_completed || reload_in_progress)
59bc01b3 1929 ? !offsettable_memref_p (op) : !offsettable_nonstrict_memref_p (op))
875862bf 1930 return 0;
1931
1932 c = str[1];
1933 }
1934
1935 /* Check for non-literal-pool variants of memory constraints. */
1936 else if (c == 'B')
1937 {
1938 if (GET_CODE (op) != MEM)
1939 return 0;
1940 if (!s390_decompose_address (XEXP (op, 0), &addr))
1941 return 0;
05b58257 1942 if (addr.literal_pool)
875862bf 1943 return 0;
1944
1945 c = str[1];
1946 }
1947
1948 switch (c)
1949 {
1950 case 'Q':
1951 if (GET_CODE (op) != MEM)
1952 return 0;
1953 if (!s390_decompose_address (XEXP (op, 0), &addr))
1954 return 0;
1955 if (addr.indx)
1956 return 0;
1957
1958 if (TARGET_LONG_DISPLACEMENT)
1959 {
1960 if (!s390_short_displacement (addr.disp))
1961 return 0;
1962 }
1963 break;
1964
1965 case 'R':
1966 if (GET_CODE (op) != MEM)
1967 return 0;
1968
1969 if (TARGET_LONG_DISPLACEMENT)
1970 {
1971 if (!s390_decompose_address (XEXP (op, 0), &addr))
1972 return 0;
1973 if (!s390_short_displacement (addr.disp))
1974 return 0;
1975 }
1976 break;
1977
1978 case 'S':
1979 if (!TARGET_LONG_DISPLACEMENT)
1980 return 0;
1981 if (GET_CODE (op) != MEM)
1982 return 0;
1983 if (!s390_decompose_address (XEXP (op, 0), &addr))
1984 return 0;
1985 if (addr.indx)
1986 return 0;
1987 if (s390_short_displacement (addr.disp))
1988 return 0;
1989 break;
1990
1991 case 'T':
1992 if (!TARGET_LONG_DISPLACEMENT)
1993 return 0;
1994 if (GET_CODE (op) != MEM)
1995 return 0;
1996 /* Any invalid address here will be fixed up by reload,
1997 so accept it for the most generic constraint. */
1998 if (s390_decompose_address (XEXP (op, 0), &addr)
1999 && s390_short_displacement (addr.disp))
2000 return 0;
2001 break;
2002
2003 case 'U':
2004 if (TARGET_LONG_DISPLACEMENT)
2005 {
2006 if (!s390_decompose_address (op, &addr))
2007 return 0;
2008 if (!s390_short_displacement (addr.disp))
2009 return 0;
2010 }
2011 break;
2012
2013 case 'W':
2014 if (!TARGET_LONG_DISPLACEMENT)
2015 return 0;
2016 /* Any invalid address here will be fixed up by reload,
2017 so accept it for the most generic constraint. */
2018 if (s390_decompose_address (op, &addr)
2019 && s390_short_displacement (addr.disp))
2020 return 0;
2021 break;
2022
2023 case 'Y':
6d6be381 2024 /* Simply check for the basic form of a shift count. Reload will
2025 take care of making sure we have a proper base register. */
417cba42 2026 if (!s390_decompose_shift_count (op, NULL, NULL))
6d6be381 2027 return 0;
2028 break;
875862bf 2029
2030 default:
2031 return 0;
2032 }
2033
2034 return 1;
2035}
2036
59bc01b3 2037
2038
2039/* Evaluates constraint strings starting with letter O. Input
2040 parameter C is the second letter following the "O" in the constraint
2041 string. Returns 1 if VALUE meets the respective constraint and 0
2042 otherwise. */
875862bf 2043
e863b008 2044int
59bc01b3 2045s390_O_constraint_str (const char c, HOST_WIDE_INT value)
e863b008 2046{
59bc01b3 2047 if (!TARGET_EXTIMM)
2048 return 0;
e863b008 2049
59bc01b3 2050 switch (c)
e863b008 2051 {
59bc01b3 2052 case 's':
2053 return trunc_int_for_mode (value, SImode) == value;
2054
2055 case 'p':
2056 return value == 0
2057 || s390_single_part (GEN_INT (value), DImode, SImode, 0) == 1;
2058
2059 case 'n':
2060 return value == -1
2061 || s390_single_part (GEN_INT (value), DImode, SImode, -1) == 1;
2062
e863b008 2063 default:
59bc01b3 2064 gcc_unreachable ();
e863b008 2065 }
2066}
2067
59bc01b3 2068
2069/* Evaluates constraint strings starting with letter N. Parameter STR
2070 contains the letters following letter "N" in the constraint string.
2071 Returns true if VALUE matches the constraint. */
e863b008 2072
875862bf 2073int
59bc01b3 2074s390_N_constraint_str (const char *str, HOST_WIDE_INT value)
875862bf 2075{
2076 enum machine_mode mode, part_mode;
2077 int def;
2078 int part, part_goal;
2079
875862bf 2080
59bc01b3 2081 if (str[0] == 'x')
2082 part_goal = -1;
2083 else
2084 part_goal = str[0] - '0';
875862bf 2085
59bc01b3 2086 switch (str[1])
2087 {
2088 case 'Q':
2089 part_mode = QImode;
875862bf 2090 break;
59bc01b3 2091 case 'H':
2092 part_mode = HImode;
163277cf 2093 break;
59bc01b3 2094 case 'S':
2095 part_mode = SImode;
2096 break;
2097 default:
2098 return 0;
2099 }
163277cf 2100
59bc01b3 2101 switch (str[2])
2102 {
2103 case 'H':
2104 mode = HImode;
2105 break;
2106 case 'S':
2107 mode = SImode;
2108 break;
2109 case 'D':
2110 mode = DImode;
2111 break;
2112 default:
2113 return 0;
2114 }
53239c89 2115
59bc01b3 2116 switch (str[3])
2117 {
2118 case '0':
2119 def = 0;
2120 break;
2121 case 'F':
2122 def = -1;
2123 break;
875862bf 2124 default:
2125 return 0;
2126 }
2127
59bc01b3 2128 if (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (part_mode))
2129 return 0;
2130
2131 part = s390_single_part (GEN_INT (value), mode, part_mode, def);
2132 if (part < 0)
2133 return 0;
2134 if (part_goal != -1 && part_goal != part)
2135 return 0;
2136
875862bf 2137 return 1;
2138}
2139
59bc01b3 2140
2141/* Returns true if the input parameter VALUE is a float zero. */
2142
2143int
2144s390_float_const_zero_p (rtx value)
2145{
2146 return (GET_MODE_CLASS (GET_MODE (value)) == MODE_FLOAT
2147 && value == CONST0_RTX (GET_MODE (value)));
2148}
2149
2150
875862bf 2151/* Compute a (partial) cost for rtx X. Return true if the complete
2152 cost has been computed, and false if subexpressions should be
2153 scanned. In either case, *TOTAL contains the cost result.
2154 CODE contains GET_CODE (x), OUTER_CODE contains the code
2155 of the superexpression of x. */
2156
2157static bool
2158s390_rtx_costs (rtx x, int code, int outer_code, int *total)
fab7adbf 2159{
2160 switch (code)
2161 {
2162 case CONST:
fab7adbf 2163 case CONST_INT:
fab7adbf 2164 case LABEL_REF:
2165 case SYMBOL_REF:
2166 case CONST_DOUBLE:
3f074425 2167 case MEM:
fab7adbf 2168 *total = 0;
2169 return true;
2170
2171 case ASHIFT:
2172 case ASHIFTRT:
2173 case LSHIFTRT:
18925d38 2174 case ROTATE:
2175 case ROTATERT:
fab7adbf 2176 case AND:
2177 case IOR:
2178 case XOR:
fab7adbf 2179 case NEG:
2180 case NOT:
2181 *total = COSTS_N_INSNS (1);
18925d38 2182 return false;
fab7adbf 2183
9cd3f3e6 2184 case PLUS:
2185 case MINUS:
2186 /* Check for multiply and add. */
d95e38cf 2187 if ((GET_MODE (x) == DFmode || GET_MODE (x) == SFmode)
9cd3f3e6 2188 && GET_CODE (XEXP (x, 0)) == MULT
095798e3 2189 && TARGET_HARD_FLOAT && TARGET_FUSED_MADD)
9cd3f3e6 2190 {
2191 /* This is the multiply and add case. */
d95e38cf 2192 if (GET_MODE (x) == DFmode)
2193 *total = s390_cost->madbr;
2194 else
2195 *total = s390_cost->maebr;
2196 *total += rtx_cost (XEXP (XEXP (x, 0), 0), MULT)
9cd3f3e6 2197 + rtx_cost (XEXP (XEXP (x, 0), 1), MULT)
2198 + rtx_cost (XEXP (x, 1), code);
2199 return true; /* Do not do an additional recursive descent. */
2200 }
2201 *total = COSTS_N_INSNS (1);
2202 return false;
2203
18925d38 2204 case MULT:
2205 switch (GET_MODE (x))
2206 {
2207 case SImode:
9cd3f3e6 2208 {
18925d38 2209 rtx left = XEXP (x, 0);
2210 rtx right = XEXP (x, 1);
2211 if (GET_CODE (right) == CONST_INT
cb888f33 2212 && CONST_OK_FOR_K (INTVAL (right)))
18925d38 2213 *total = s390_cost->mhi;
2214 else if (GET_CODE (left) == SIGN_EXTEND)
2215 *total = s390_cost->mh;
2216 else
2217 *total = s390_cost->ms; /* msr, ms, msy */
2218 break;
2219 }
2220 case DImode:
2221 {
2222 rtx left = XEXP (x, 0);
2223 rtx right = XEXP (x, 1);
2224 if (TARGET_64BIT)
2225 {
2226 if (GET_CODE (right) == CONST_INT
cb888f33 2227 && CONST_OK_FOR_K (INTVAL (right)))
18925d38 2228 *total = s390_cost->mghi;
2229 else if (GET_CODE (left) == SIGN_EXTEND)
2230 *total = s390_cost->msgf;
2231 else
2232 *total = s390_cost->msg; /* msgr, msg */
2233 }
2234 else /* TARGET_31BIT */
2235 {
2236 if (GET_CODE (left) == SIGN_EXTEND
2237 && GET_CODE (right) == SIGN_EXTEND)
2238 /* mulsidi case: mr, m */
2239 *total = s390_cost->m;
9cd3f3e6 2240 else if (GET_CODE (left) == ZERO_EXTEND
2241 && GET_CODE (right) == ZERO_EXTEND
2242 && TARGET_CPU_ZARCH)
2243 /* umulsidi case: ml, mlr */
2244 *total = s390_cost->ml;
18925d38 2245 else
2246 /* Complex calculation is required. */
2247 *total = COSTS_N_INSNS (40);
2248 }
2249 break;
2250 }
2251 case SFmode:
2252 case DFmode:
2253 *total = s390_cost->mult_df;
2254 break;
429f9fdb 2255 case TFmode:
2256 *total = s390_cost->mxbr;
2257 break;
18925d38 2258 default:
2259 return false;
2260 }
2261 return false;
fab7adbf 2262
3f074425 2263 case UDIV:
2264 case UMOD:
2265 if (GET_MODE (x) == TImode) /* 128 bit division */
2266 *total = s390_cost->dlgr;
2267 else if (GET_MODE (x) == DImode)
2268 {
2269 rtx right = XEXP (x, 1);
2270 if (GET_CODE (right) == ZERO_EXTEND) /* 64 by 32 bit division */
2271 *total = s390_cost->dlr;
2272 else /* 64 by 64 bit division */
2273 *total = s390_cost->dlgr;
2274 }
2275 else if (GET_MODE (x) == SImode) /* 32 bit division */
2276 *total = s390_cost->dlr;
2277 return false;
2278
fab7adbf 2279 case DIV:
3f074425 2280 case MOD:
2281 if (GET_MODE (x) == DImode)
2282 {
2283 rtx right = XEXP (x, 1);
2284 if (GET_CODE (right) == ZERO_EXTEND) /* 64 by 32 bit division */
2285 if (TARGET_64BIT)
2286 *total = s390_cost->dsgfr;
2287 else
2288 *total = s390_cost->dr;
2289 else /* 64 by 64 bit division */
2290 *total = s390_cost->dsgr;
2291 }
2292 else if (GET_MODE (x) == SImode) /* 32 bit division */
2293 *total = s390_cost->dlr;
2294 else if (GET_MODE (x) == SFmode)
260075cc 2295 {
095798e3 2296 *total = s390_cost->debr;
260075cc 2297 }
2298 else if (GET_MODE (x) == DFmode)
2299 {
095798e3 2300 *total = s390_cost->ddbr;
260075cc 2301 }
429f9fdb 2302 else if (GET_MODE (x) == TFmode)
2303 {
095798e3 2304 *total = s390_cost->dxbr;
429f9fdb 2305 }
18925d38 2306 return false;
2307
9cd3f3e6 2308 case SQRT:
2309 if (GET_MODE (x) == SFmode)
2310 *total = s390_cost->sqebr;
429f9fdb 2311 else if (GET_MODE (x) == DFmode)
9cd3f3e6 2312 *total = s390_cost->sqdbr;
429f9fdb 2313 else /* TFmode */
2314 *total = s390_cost->sqxbr;
9cd3f3e6 2315 return false;
2316
18925d38 2317 case SIGN_EXTEND:
9cd3f3e6 2318 case ZERO_EXTEND:
3f074425 2319 if (outer_code == MULT || outer_code == DIV || outer_code == MOD
2320 || outer_code == PLUS || outer_code == MINUS
2321 || outer_code == COMPARE)
18925d38 2322 *total = 0;
2323 return false;
fab7adbf 2324
3f074425 2325 case COMPARE:
2326 *total = COSTS_N_INSNS (1);
2327 if (GET_CODE (XEXP (x, 0)) == AND
2328 && GET_CODE (XEXP (x, 1)) == CONST_INT
2329 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2330 {
2331 rtx op0 = XEXP (XEXP (x, 0), 0);
2332 rtx op1 = XEXP (XEXP (x, 0), 1);
2333 rtx op2 = XEXP (x, 1);
2334
2335 if (memory_operand (op0, GET_MODE (op0))
2336 && s390_tm_ccmode (op1, op2, 0) != VOIDmode)
2337 return true;
2338 if (register_operand (op0, GET_MODE (op0))
2339 && s390_tm_ccmode (op1, op2, 1) != VOIDmode)
2340 return true;
2341 }
2342 return false;
2343
fab7adbf 2344 default:
2345 return false;
2346 }
2347}
2348
ee9c19ee 2349/* Return the cost of an address rtx ADDR. */
2350
ec0457a8 2351static int
b40da9a7 2352s390_address_cost (rtx addr)
ee9c19ee 2353{
2354 struct s390_address ad;
2355 if (!s390_decompose_address (addr, &ad))
2356 return 1000;
2357
2358 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
2359}
2360
be00aaa8 2361/* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
2362 otherwise return 0. */
2363
2364int
edd89d66 2365tls_symbolic_operand (rtx op)
be00aaa8 2366{
be00aaa8 2367 if (GET_CODE (op) != SYMBOL_REF)
2368 return 0;
a3e33162 2369 return SYMBOL_REF_TLS_MODEL (op);
be00aaa8 2370}
4673c1a0 2371\f
923cf36d 2372/* Split DImode access register reference REG (on 64-bit) into its constituent
2373 low and high parts, and store them into LO and HI. Note that gen_lowpart/
2374 gen_highpart cannot be used as they assume all registers are word-sized,
2375 while our access registers have only half that size. */
2376
2377void
2378s390_split_access_reg (rtx reg, rtx *lo, rtx *hi)
2379{
2380 gcc_assert (TARGET_64BIT);
2381 gcc_assert (ACCESS_REG_P (reg));
2382 gcc_assert (GET_MODE (reg) == DImode);
2383 gcc_assert (!(REGNO (reg) & 1));
2384
2385 *lo = gen_rtx_REG (SImode, REGNO (reg) + 1);
2386 *hi = gen_rtx_REG (SImode, REGNO (reg));
2387}
4673c1a0 2388
56769981 2389/* Return true if OP contains a symbol reference */
4673c1a0 2390
e5537457 2391bool
b40da9a7 2392symbolic_reference_mentioned_p (rtx op)
4673c1a0 2393{
edd89d66 2394 const char *fmt;
2395 int i;
4673c1a0 2396
2397 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
2398 return 1;
2399
2400 fmt = GET_RTX_FORMAT (GET_CODE (op));
2401 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2402 {
2403 if (fmt[i] == 'E')
2404 {
edd89d66 2405 int j;
4673c1a0 2406
2407 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2408 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2409 return 1;
2410 }
2411
2412 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
2413 return 1;
2414 }
2415
2416 return 0;
2417}
2418
be00aaa8 2419/* Return true if OP contains a reference to a thread-local symbol. */
2420
e5537457 2421bool
b40da9a7 2422tls_symbolic_reference_mentioned_p (rtx op)
be00aaa8 2423{
edd89d66 2424 const char *fmt;
2425 int i;
be00aaa8 2426
2427 if (GET_CODE (op) == SYMBOL_REF)
2428 return tls_symbolic_operand (op);
2429
2430 fmt = GET_RTX_FORMAT (GET_CODE (op));
2431 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2432 {
2433 if (fmt[i] == 'E')
2434 {
edd89d66 2435 int j;
be00aaa8 2436
2437 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2438 if (tls_symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
e5537457 2439 return true;
be00aaa8 2440 }
2441
2442 else if (fmt[i] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op, i)))
e5537457 2443 return true;
be00aaa8 2444 }
2445
e5537457 2446 return false;
be00aaa8 2447}
2448
4673c1a0 2449
f81e845f 2450/* Return true if OP is a legitimate general operand when
2451 generating PIC code. It is given that flag_pic is on
56769981 2452 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2453
4673c1a0 2454int
edd89d66 2455legitimate_pic_operand_p (rtx op)
4673c1a0 2456{
8b4a4127 2457 /* Accept all non-symbolic constants. */
4673c1a0 2458 if (!SYMBOLIC_CONST (op))
2459 return 1;
2460
f81e845f 2461 /* Reject everything else; must be handled
be00aaa8 2462 via emit_symbolic_move. */
4673c1a0 2463 return 0;
2464}
2465
56769981 2466/* Returns true if the constant value OP is a legitimate general operand.
2467 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2468
4673c1a0 2469int
edd89d66 2470legitimate_constant_p (rtx op)
4673c1a0 2471{
8b4a4127 2472 /* Accept all non-symbolic constants. */
4673c1a0 2473 if (!SYMBOLIC_CONST (op))
2474 return 1;
2475
be00aaa8 2476 /* Accept immediate LARL operands. */
dafc8d45 2477 if (TARGET_CPU_ZARCH && larl_operand (op, VOIDmode))
be00aaa8 2478 return 1;
2479
2480 /* Thread-local symbols are never legal constants. This is
2481 so that emit_call knows that computing such addresses
2482 might require a function call. */
2483 if (TLS_SYMBOLIC_CONST (op))
2484 return 0;
2485
4673c1a0 2486 /* In the PIC case, symbolic constants must *not* be
2487 forced into the literal pool. We accept them here,
be00aaa8 2488 so that they will be handled by emit_symbolic_move. */
4673c1a0 2489 if (flag_pic)
2490 return 1;
2491
4673c1a0 2492 /* All remaining non-PIC symbolic constants are
2493 forced into the literal pool. */
2494 return 0;
2495}
2496
be00aaa8 2497/* Determine if it's legal to put X into the constant pool. This
2498 is not possible if X contains the address of a symbol that is
2499 not constant (TLS) or not known at final link time (PIC). */
2500
2501static bool
b40da9a7 2502s390_cannot_force_const_mem (rtx x)
be00aaa8 2503{
2504 switch (GET_CODE (x))
2505 {
2506 case CONST_INT:
2507 case CONST_DOUBLE:
2508 /* Accept all non-symbolic constants. */
2509 return false;
2510
2511 case LABEL_REF:
2512 /* Labels are OK iff we are non-PIC. */
2513 return flag_pic != 0;
2514
2515 case SYMBOL_REF:
2516 /* 'Naked' TLS symbol references are never OK,
2517 non-TLS symbols are OK iff we are non-PIC. */
2518 if (tls_symbolic_operand (x))
2519 return true;
2520 else
2521 return flag_pic != 0;
2522
2523 case CONST:
2524 return s390_cannot_force_const_mem (XEXP (x, 0));
2525 case PLUS:
2526 case MINUS:
2527 return s390_cannot_force_const_mem (XEXP (x, 0))
2528 || s390_cannot_force_const_mem (XEXP (x, 1));
2529
2530 case UNSPEC:
2531 switch (XINT (x, 1))
2532 {
2533 /* Only lt-relative or GOT-relative UNSPECs are OK. */
12ef3745 2534 case UNSPEC_LTREL_OFFSET:
2535 case UNSPEC_GOT:
2536 case UNSPEC_GOTOFF:
2537 case UNSPEC_PLTOFF:
be00aaa8 2538 case UNSPEC_TLSGD:
2539 case UNSPEC_TLSLDM:
2540 case UNSPEC_NTPOFF:
2541 case UNSPEC_DTPOFF:
2542 case UNSPEC_GOTNTPOFF:
2543 case UNSPEC_INDNTPOFF:
2544 return false;
2545
d345b493 2546 /* If the literal pool shares the code section, be put
2547 execute template placeholders into the pool as well. */
2548 case UNSPEC_INSN:
2549 return TARGET_CPU_ZARCH;
2550
be00aaa8 2551 default:
2552 return true;
2553 }
2554 break;
2555
2556 default:
32eda510 2557 gcc_unreachable ();
be00aaa8 2558 }
2559}
2560
8b4a4127 2561/* Returns true if the constant value OP is a legitimate general
f81e845f 2562 operand during and after reload. The difference to
8b4a4127 2563 legitimate_constant_p is that this function will not accept
2564 a constant that would need to be forced to the literal pool
2565 before it can be used as operand. */
2566
e5537457 2567bool
edd89d66 2568legitimate_reload_constant_p (rtx op)
8b4a4127 2569{
51aa1e9c 2570 /* Accept la(y) operands. */
f81e845f 2571 if (GET_CODE (op) == CONST_INT
51aa1e9c 2572 && DISP_IN_RANGE (INTVAL (op)))
e5537457 2573 return true;
51aa1e9c 2574
163277cf 2575 /* Accept l(g)hi/l(g)fi operands. */
8b4a4127 2576 if (GET_CODE (op) == CONST_INT
163277cf 2577 && (CONST_OK_FOR_K (INTVAL (op)) || CONST_OK_FOR_Os (INTVAL (op))))
e5537457 2578 return true;
8b4a4127 2579
2580 /* Accept lliXX operands. */
dafc8d45 2581 if (TARGET_ZARCH
53239c89 2582 && GET_CODE (op) == CONST_INT
2583 && trunc_int_for_mode (INTVAL (op), word_mode) == INTVAL (op)
2584 && s390_single_part (op, word_mode, HImode, 0) >= 0)
e5537457 2585 return true;
8b4a4127 2586
163277cf 2587 if (TARGET_EXTIMM
2588 && GET_CODE (op) == CONST_INT
2589 && trunc_int_for_mode (INTVAL (op), word_mode) == INTVAL (op)
2590 && s390_single_part (op, word_mode, SImode, 0) >= 0)
2591 return true;
2592
8b4a4127 2593 /* Accept larl operands. */
dafc8d45 2594 if (TARGET_CPU_ZARCH
8b4a4127 2595 && larl_operand (op, VOIDmode))
e5537457 2596 return true;
8b4a4127 2597
e863b008 2598 /* Accept lzXX operands. */
2599 if (GET_CODE (op) == CONST_DOUBLE
2600 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', "G"))
2601 return true;
2602
53239c89 2603 /* Accept double-word operands that can be split. */
2604 if (GET_CODE (op) == CONST_INT
2605 && trunc_int_for_mode (INTVAL (op), word_mode) != INTVAL (op))
2606 {
2607 enum machine_mode dword_mode = word_mode == SImode ? DImode : TImode;
2608 rtx hi = operand_subword (op, 0, 0, dword_mode);
2609 rtx lo = operand_subword (op, 1, 0, dword_mode);
2610 return legitimate_reload_constant_p (hi)
2611 && legitimate_reload_constant_p (lo);
2612 }
2613
8b4a4127 2614 /* Everything else cannot be handled without reload. */
e5537457 2615 return false;
8b4a4127 2616}
2617
2618/* Given an rtx OP being reloaded into a reg required to be in class CLASS,
2619 return the class of reg to actually use. */
2620
2621enum reg_class
b40da9a7 2622s390_preferred_reload_class (rtx op, enum reg_class class)
8b4a4127 2623{
8b4a4127 2624 switch (GET_CODE (op))
2625 {
2626 /* Constants we cannot reload must be forced into the
990553d7 2627 literal pool. */
2628
8b4a4127 2629 case CONST_DOUBLE:
2630 case CONST_INT:
990553d7 2631 if (legitimate_reload_constant_p (op))
8b4a4127 2632 return class;
990553d7 2633 else
2634 return NO_REGS;
8b4a4127 2635
2636 /* If a symbolic constant or a PLUS is reloaded,
0b300c86 2637 it is most likely being used as an address, so
2638 prefer ADDR_REGS. If 'class' is not a superset
2639 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
8b4a4127 2640 case PLUS:
2641 case LABEL_REF:
2642 case SYMBOL_REF:
2643 case CONST:
0b300c86 2644 if (reg_class_subset_p (ADDR_REGS, class))
2645 return ADDR_REGS;
2646 else
2647 return NO_REGS;
8b4a4127 2648
2649 default:
2650 break;
2651 }
2652
2653 return class;
2654}
4673c1a0 2655
328d5423 2656/* Inform reload about cases where moving X with a mode MODE to a register in
2657 CLASS requires an extra scratch or immediate register. Return the class
2658 needed for the immediate register. */
429f9fdb 2659
328d5423 2660static enum reg_class
2661s390_secondary_reload (bool in_p, rtx x, enum reg_class class,
2662 enum machine_mode mode, secondary_reload_info *sri)
2663{
2664 /* Intermediate register needed. */
dd16a4bd 2665 if (reg_classes_intersect_p (CC_REGS, class))
bcbf02a5 2666 return GENERAL_REGS;
2667
328d5423 2668 /* We need a scratch register when loading a PLUS expression which
2669 is not a legitimate operand of the LOAD ADDRESS instruction. */
2670 if (in_p && s390_plus_operand (x, mode))
2671 sri->icode = (TARGET_64BIT ?
2672 CODE_FOR_reloaddi_plus : CODE_FOR_reloadsi_plus);
2673
efec32e0 2674 /* Performing a multiword move from or to memory we have to make sure the
328d5423 2675 second chunk in memory is addressable without causing a displacement
2676 overflow. If that would be the case we calculate the address in
2677 a scratch register. */
2678 if (MEM_P (x)
2679 && GET_CODE (XEXP (x, 0)) == PLUS
2680 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2681 && !DISP_IN_RANGE (INTVAL (XEXP (XEXP (x, 0), 1))
6938bdf8 2682 + GET_MODE_SIZE (mode) - 1))
328d5423 2683 {
efec32e0 2684 /* For GENERAL_REGS a displacement overflow is no problem if occurring
328d5423 2685 in a s_operand address since we may fallback to lm/stm. So we only
2686 have to care about overflows in the b+i+d case. */
2687 if ((reg_classes_intersect_p (GENERAL_REGS, class)
2688 && s390_class_max_nregs (GENERAL_REGS, mode) > 1
2689 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS)
2690 /* For FP_REGS no lm/stm is available so this check is triggered
2691 for displacement overflows in b+i+d and b+d like addresses. */
2692 || (reg_classes_intersect_p (FP_REGS, class)
2693 && s390_class_max_nregs (FP_REGS, mode) > 1))
2694 {
2695 if (in_p)
2696 sri->icode = (TARGET_64BIT ?
2697 CODE_FOR_reloaddi_nonoffmem_in :
2698 CODE_FOR_reloadsi_nonoffmem_in);
2699 else
2700 sri->icode = (TARGET_64BIT ?
2701 CODE_FOR_reloaddi_nonoffmem_out :
2702 CODE_FOR_reloadsi_nonoffmem_out);
2703 }
2704 }
bcbf02a5 2705
328d5423 2706 /* Either scratch or no register needed. */
66795431 2707 return NO_REGS;
2708}
2709
64f977d6 2710/* Generate code to load SRC, which is PLUS that is not a
2711 legitimate operand for the LA instruction, into TARGET.
2712 SCRATCH may be used as scratch register. */
2713
2714void
edd89d66 2715s390_expand_plus_operand (rtx target, rtx src,
2716 rtx scratch)
64f977d6 2717{
e7f0624a 2718 rtx sum1, sum2;
8ba34dcd 2719 struct s390_address ad;
dc4951d9 2720
dc4951d9 2721 /* src must be a PLUS; get its two operands. */
32eda510 2722 gcc_assert (GET_CODE (src) == PLUS);
2723 gcc_assert (GET_MODE (src) == Pmode);
64f977d6 2724
c10847b9 2725 /* Check if any of the two operands is already scheduled
2726 for replacement by reload. This can happen e.g. when
2727 float registers occur in an address. */
2728 sum1 = find_replacement (&XEXP (src, 0));
2729 sum2 = find_replacement (&XEXP (src, 1));
a5004c3d 2730 src = gen_rtx_PLUS (Pmode, sum1, sum2);
a5004c3d 2731
e7f0624a 2732 /* If the address is already strictly valid, there's nothing to do. */
2733 if (!s390_decompose_address (src, &ad)
1e280623 2734 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
2735 || (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx))))
64f977d6 2736 {
e7f0624a 2737 /* Otherwise, one of the operands cannot be an address register;
2738 we reload its value into the scratch register. */
2739 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
2740 {
2741 emit_move_insn (scratch, sum1);
2742 sum1 = scratch;
2743 }
2744 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
2745 {
2746 emit_move_insn (scratch, sum2);
2747 sum2 = scratch;
2748 }
64f977d6 2749
e7f0624a 2750 /* According to the way these invalid addresses are generated
2751 in reload.c, it should never happen (at least on s390) that
2752 *neither* of the PLUS components, after find_replacements
2753 was applied, is an address register. */
2754 if (sum1 == scratch && sum2 == scratch)
2755 {
2756 debug_rtx (src);
32eda510 2757 gcc_unreachable ();
e7f0624a 2758 }
64f977d6 2759
e7f0624a 2760 src = gen_rtx_PLUS (Pmode, sum1, sum2);
64f977d6 2761 }
2762
2763 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
2764 is only ever performed on addresses, so we can mark the
2765 sum as legitimate for LA in any case. */
4fbc4db5 2766 s390_load_address (target, src);
64f977d6 2767}
2768
2769
e5537457 2770/* Return true if ADDR is a valid memory address.
875862bf 2771 STRICT specifies whether strict register checking applies. */
4673c1a0 2772
e5537457 2773bool
875862bf 2774legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
edd89d66 2775 rtx addr, int strict)
4673c1a0 2776{
875862bf 2777 struct s390_address ad;
2778 if (!s390_decompose_address (addr, &ad))
e5537457 2779 return false;
8ba34dcd 2780
2781 if (strict)
2782 {
1e280623 2783 if (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
e5537457 2784 return false;
1e280623 2785
2786 if (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx)))
e5537457 2787 return false;
8ba34dcd 2788 }
2789 else
2790 {
1e280623 2791 if (ad.base
2792 && !(REGNO (ad.base) >= FIRST_PSEUDO_REGISTER
2793 || REGNO_REG_CLASS (REGNO (ad.base)) == ADDR_REGS))
e5537457 2794 return false;
1e280623 2795
2796 if (ad.indx
2797 && !(REGNO (ad.indx) >= FIRST_PSEUDO_REGISTER
2798 || REGNO_REG_CLASS (REGNO (ad.indx)) == ADDR_REGS))
2799 return false;
8ba34dcd 2800 }
e5537457 2801 return true;
4673c1a0 2802}
2803
e5537457 2804/* Return true if OP is a valid operand for the LA instruction.
2eb8fe23 2805 In 31-bit, we need to prove that the result is used as an
2806 address, as LA performs only a 31-bit addition. */
2807
e5537457 2808bool
edd89d66 2809legitimate_la_operand_p (rtx op)
2eb8fe23 2810{
2811 struct s390_address addr;
8ba34dcd 2812 if (!s390_decompose_address (op, &addr))
e5537457 2813 return false;
2eb8fe23 2814
e5537457 2815 return (TARGET_64BIT || addr.pointer);
64f977d6 2816}
2eb8fe23 2817
e5537457 2818/* Return true if it is valid *and* preferable to use LA to
c6061690 2819 compute the sum of OP1 and OP2. */
f81e845f 2820
e5537457 2821bool
c6061690 2822preferred_la_operand_p (rtx op1, rtx op2)
a40b2054 2823{
2824 struct s390_address addr;
c6061690 2825
2826 if (op2 != const0_rtx)
2827 op1 = gen_rtx_PLUS (Pmode, op1, op2);
2828
2829 if (!s390_decompose_address (op1, &addr))
e5537457 2830 return false;
1e280623 2831 if (addr.base && !REGNO_OK_FOR_BASE_P (REGNO (addr.base)))
e5537457 2832 return false;
1e280623 2833 if (addr.indx && !REGNO_OK_FOR_INDEX_P (REGNO (addr.indx)))
e5537457 2834 return false;
a40b2054 2835
2836 if (!TARGET_64BIT && !addr.pointer)
e5537457 2837 return false;
a40b2054 2838
2839 if (addr.pointer)
e5537457 2840 return true;
a40b2054 2841
ec3b9583 2842 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
2843 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
e5537457 2844 return true;
a40b2054 2845
e5537457 2846 return false;
a40b2054 2847}
2848
4fbc4db5 2849/* Emit a forced load-address operation to load SRC into DST.
2850 This will use the LOAD ADDRESS instruction even in situations
2851 where legitimate_la_operand_p (SRC) returns false. */
2eb8fe23 2852
4fbc4db5 2853void
b40da9a7 2854s390_load_address (rtx dst, rtx src)
64f977d6 2855{
4fbc4db5 2856 if (TARGET_64BIT)
2857 emit_move_insn (dst, src);
2858 else
2859 emit_insn (gen_force_la_31 (dst, src));
2eb8fe23 2860}
2861
4673c1a0 2862/* Return a legitimate reference for ORIG (an address) using the
2863 register REG. If REG is 0, a new pseudo is generated.
2864
2865 There are two types of references that must be handled:
2866
2867 1. Global data references must load the address from the GOT, via
2868 the PIC reg. An insn is emitted to do this load, and the reg is
2869 returned.
2870
2871 2. Static data references, constant pool addresses, and code labels
2872 compute the address as an offset from the GOT, whose base is in
a3e33162 2873 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
4673c1a0 2874 differentiate them from global data objects. The returned
2875 address is the PIC reg + an unspec constant.
2876
2877 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2878 reg also appears in the address. */
2879
2880rtx
b40da9a7 2881legitimize_pic_address (rtx orig, rtx reg)
4673c1a0 2882{
2883 rtx addr = orig;
2884 rtx new = orig;
2885 rtx base;
2886
1ed004b7 2887 gcc_assert (!TLS_SYMBOLIC_CONST (addr));
2888
4673c1a0 2889 if (GET_CODE (addr) == LABEL_REF
a3e33162 2890 || (GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (addr)))
4673c1a0 2891 {
2892 /* This is a local symbol. */
dafc8d45 2893 if (TARGET_CPU_ZARCH && larl_operand (addr, VOIDmode))
4673c1a0 2894 {
f81e845f 2895 /* Access local symbols PC-relative via LARL.
2896 This is the same as in the non-PIC case, so it is
0c034860 2897 handled automatically ... */
4673c1a0 2898 }
2899 else
2900 {
12ef3745 2901 /* Access local symbols relative to the GOT. */
4673c1a0 2902
2903 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2904
12ef3745 2905 if (reload_in_progress || reload_completed)
3072d30e 2906 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
12ef3745 2907
2908 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTOFF);
525d1294 2909 addr = gen_rtx_CONST (Pmode, addr);
2910 addr = force_const_mem (Pmode, addr);
4673c1a0 2911 emit_move_insn (temp, addr);
2912
12ef3745 2913 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
4673c1a0 2914 if (reg != 0)
2915 {
35d04dd3 2916 s390_load_address (reg, new);
4673c1a0 2917 new = reg;
2918 }
2919 }
2920 }
2921 else if (GET_CODE (addr) == SYMBOL_REF)
2922 {
2923 if (reg == 0)
2924 reg = gen_reg_rtx (Pmode);
2925
2926 if (flag_pic == 1)
2927 {
2928 /* Assume GOT offset < 4k. This is handled the same way
12ef3745 2929 in both 31- and 64-bit code (@GOT). */
4673c1a0 2930
9a2a66ae 2931 if (reload_in_progress || reload_completed)
3072d30e 2932 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
4673c1a0 2933
12ef3745 2934 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
4673c1a0 2935 new = gen_rtx_CONST (Pmode, new);
2936 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
e265a6da 2937 new = gen_const_mem (Pmode, new);
4673c1a0 2938 emit_move_insn (reg, new);
2939 new = reg;
2940 }
dafc8d45 2941 else if (TARGET_CPU_ZARCH)
4673c1a0 2942 {
2943 /* If the GOT offset might be >= 4k, we determine the position
2944 of the GOT entry via a PC-relative LARL (@GOTENT). */
2945
2946 rtx temp = gen_reg_rtx (Pmode);
2947
12ef3745 2948 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTENT);
4673c1a0 2949 new = gen_rtx_CONST (Pmode, new);
2950 emit_move_insn (temp, new);
2951
e265a6da 2952 new = gen_const_mem (Pmode, temp);
4673c1a0 2953 emit_move_insn (reg, new);
2954 new = reg;
2955 }
2956 else
2957 {
f81e845f 2958 /* If the GOT offset might be >= 4k, we have to load it
4673c1a0 2959 from the literal pool (@GOT). */
2960
2961 rtx temp = gen_reg_rtx (Pmode);
2962
9a2a66ae 2963 if (reload_in_progress || reload_completed)
3072d30e 2964 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
4673c1a0 2965
12ef3745 2966 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
525d1294 2967 addr = gen_rtx_CONST (Pmode, addr);
2968 addr = force_const_mem (Pmode, addr);
4673c1a0 2969 emit_move_insn (temp, addr);
2970
2971 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
e265a6da 2972 new = gen_const_mem (Pmode, new);
4673c1a0 2973 emit_move_insn (reg, new);
2974 new = reg;
2975 }
f81e845f 2976 }
4673c1a0 2977 else
2978 {
2979 if (GET_CODE (addr) == CONST)
2980 {
2981 addr = XEXP (addr, 0);
2982 if (GET_CODE (addr) == UNSPEC)
2983 {
32eda510 2984 gcc_assert (XVECLEN (addr, 0) == 1);
4673c1a0 2985 switch (XINT (addr, 1))
2986 {
12ef3745 2987 /* If someone moved a GOT-relative UNSPEC
4673c1a0 2988 out of the literal pool, force them back in. */
12ef3745 2989 case UNSPEC_GOTOFF:
2990 case UNSPEC_PLTOFF:
525d1294 2991 new = force_const_mem (Pmode, orig);
4673c1a0 2992 break;
2993
12ef3745 2994 /* @GOT is OK as is if small. */
2995 case UNSPEC_GOT:
2996 if (flag_pic == 2)
2997 new = force_const_mem (Pmode, orig);
2998 break;
2999
4673c1a0 3000 /* @GOTENT is OK as is. */
12ef3745 3001 case UNSPEC_GOTENT:
4673c1a0 3002 break;
3003
3004 /* @PLT is OK as is on 64-bit, must be converted to
12ef3745 3005 GOT-relative @PLTOFF on 31-bit. */
3006 case UNSPEC_PLT:
dafc8d45 3007 if (!TARGET_CPU_ZARCH)
4673c1a0 3008 {
3009 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3010
12ef3745 3011 if (reload_in_progress || reload_completed)
3072d30e 3012 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
12ef3745 3013
4673c1a0 3014 addr = XVECEXP (addr, 0, 0);
f81e845f 3015 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
12ef3745 3016 UNSPEC_PLTOFF);
525d1294 3017 addr = gen_rtx_CONST (Pmode, addr);
3018 addr = force_const_mem (Pmode, addr);
4673c1a0 3019 emit_move_insn (temp, addr);
3020
12ef3745 3021 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
4673c1a0 3022 if (reg != 0)
3023 {
35d04dd3 3024 s390_load_address (reg, new);
4673c1a0 3025 new = reg;
3026 }
3027 }
3028 break;
3029
3030 /* Everything else cannot happen. */
3031 default:
32eda510 3032 gcc_unreachable ();
4673c1a0 3033 }
3034 }
32eda510 3035 else
3036 gcc_assert (GET_CODE (addr) == PLUS);
4673c1a0 3037 }
3038 if (GET_CODE (addr) == PLUS)
3039 {
3040 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
1ed004b7 3041
3042 gcc_assert (!TLS_SYMBOLIC_CONST (op0));
3043 gcc_assert (!TLS_SYMBOLIC_CONST (op1));
3044
f81e845f 3045 /* Check first to see if this is a constant offset
4673c1a0 3046 from a local symbol reference. */
3047 if ((GET_CODE (op0) == LABEL_REF
a3e33162 3048 || (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (op0)))
4673c1a0 3049 && GET_CODE (op1) == CONST_INT)
3050 {
240fd4a0 3051 if (TARGET_CPU_ZARCH
3052 && larl_operand (op0, VOIDmode)
3053 && INTVAL (op1) < (HOST_WIDE_INT)1 << 31
3054 && INTVAL (op1) >= -((HOST_WIDE_INT)1 << 31))
4673c1a0 3055 {
3056 if (INTVAL (op1) & 1)
3057 {
f81e845f 3058 /* LARL can't handle odd offsets, so emit a
4673c1a0 3059 pair of LARL and LA. */
3060 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3061
51aa1e9c 3062 if (!DISP_IN_RANGE (INTVAL (op1)))
4673c1a0 3063 {
240fd4a0 3064 HOST_WIDE_INT even = INTVAL (op1) - 1;
4673c1a0 3065 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
800949fb 3066 op0 = gen_rtx_CONST (Pmode, op0);
bcd9bd66 3067 op1 = const1_rtx;
4673c1a0 3068 }
3069
3070 emit_move_insn (temp, op0);
3071 new = gen_rtx_PLUS (Pmode, temp, op1);
3072
3073 if (reg != 0)
3074 {
35d04dd3 3075 s390_load_address (reg, new);
4673c1a0 3076 new = reg;
3077 }
3078 }
3079 else
3080 {
3081 /* If the offset is even, we can just use LARL.
3082 This will happen automatically. */
3083 }
3084 }
3085 else
3086 {
12ef3745 3087 /* Access local symbols relative to the GOT. */
4673c1a0 3088
3089 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3090
12ef3745 3091 if (reload_in_progress || reload_completed)
3072d30e 3092 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
12ef3745 3093
f81e845f 3094 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op0),
12ef3745 3095 UNSPEC_GOTOFF);
525d1294 3096 addr = gen_rtx_PLUS (Pmode, addr, op1);
3097 addr = gen_rtx_CONST (Pmode, addr);
3098 addr = force_const_mem (Pmode, addr);
f4286f7a 3099 emit_move_insn (temp, addr);
4673c1a0 3100
12ef3745 3101 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
4673c1a0 3102 if (reg != 0)
3103 {
35d04dd3 3104 s390_load_address (reg, new);
4673c1a0 3105 new = reg;
3106 }
3107 }
3108 }
3109
12ef3745 3110 /* Now, check whether it is a GOT relative symbol plus offset
4673c1a0 3111 that was pulled out of the literal pool. Force it back in. */
3112
3113 else if (GET_CODE (op0) == UNSPEC
f4286f7a 3114 && GET_CODE (op1) == CONST_INT
3115 && XINT (op0, 1) == UNSPEC_GOTOFF)
4673c1a0 3116 {
32eda510 3117 gcc_assert (XVECLEN (op0, 0) == 1);
4673c1a0 3118
525d1294 3119 new = force_const_mem (Pmode, orig);
4673c1a0 3120 }
3121
3122 /* Otherwise, compute the sum. */
3123 else
3124 {
3125 base = legitimize_pic_address (XEXP (addr, 0), reg);
3126 new = legitimize_pic_address (XEXP (addr, 1),
3127 base == reg ? NULL_RTX : reg);
3128 if (GET_CODE (new) == CONST_INT)
3129 new = plus_constant (base, INTVAL (new));
3130 else
3131 {
3132 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
3133 {
3134 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
3135 new = XEXP (new, 1);
3136 }
3137 new = gen_rtx_PLUS (Pmode, base, new);
3138 }
3139
3140 if (GET_CODE (new) == CONST)
3141 new = XEXP (new, 0);
3142 new = force_operand (new, 0);
3143 }
3144 }
3145 }
3146 return new;
3147}
3148
be00aaa8 3149/* Load the thread pointer into a register. */
3150
cc87d0c5 3151rtx
3152s390_get_thread_pointer (void)
be00aaa8 3153{
923cf36d 3154 rtx tp = gen_reg_rtx (Pmode);
be00aaa8 3155
923cf36d 3156 emit_move_insn (tp, gen_rtx_REG (Pmode, TP_REGNUM));
be00aaa8 3157 mark_reg_pointer (tp, BITS_PER_WORD);
3158
3159 return tp;
3160}
3161
7346ca58 3162/* Emit a tls call insn. The call target is the SYMBOL_REF stored
3163 in s390_tls_symbol which always refers to __tls_get_offset.
3164 The returned offset is written to RESULT_REG and an USE rtx is
3165 generated for TLS_CALL. */
be00aaa8 3166
3167static GTY(()) rtx s390_tls_symbol;
7346ca58 3168
3169static void
3170s390_emit_tls_call_insn (rtx result_reg, rtx tls_call)
be00aaa8 3171{
7346ca58 3172 rtx insn;
f588eb9f 3173
32eda510 3174 gcc_assert (flag_pic);
7346ca58 3175
be00aaa8 3176 if (!s390_tls_symbol)
3177 s390_tls_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_offset");
3178
f588eb9f 3179 insn = s390_emit_call (s390_tls_symbol, tls_call, result_reg,
3180 gen_rtx_REG (Pmode, RETURN_REGNUM));
7346ca58 3181
3182 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), result_reg);
3183 CONST_OR_PURE_CALL_P (insn) = 1;
be00aaa8 3184}
3185
3186/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3187 this (thread-local) address. REG may be used as temporary. */
3188
3189static rtx
b40da9a7 3190legitimize_tls_address (rtx addr, rtx reg)
be00aaa8 3191{
3192 rtx new, tls_call, temp, base, r2, insn;
3193
3194 if (GET_CODE (addr) == SYMBOL_REF)
3195 switch (tls_symbolic_operand (addr))
3196 {
3197 case TLS_MODEL_GLOBAL_DYNAMIC:
3198 start_sequence ();
3199 r2 = gen_rtx_REG (Pmode, 2);
3200 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_TLSGD);
3201 new = gen_rtx_CONST (Pmode, tls_call);
3202 new = force_const_mem (Pmode, new);
3203 emit_move_insn (r2, new);
7346ca58 3204 s390_emit_tls_call_insn (r2, tls_call);
be00aaa8 3205 insn = get_insns ();
3206 end_sequence ();
3207
3208 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3209 temp = gen_reg_rtx (Pmode);
3210 emit_libcall_block (insn, temp, r2, new);
3211
cc87d0c5 3212 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
be00aaa8 3213 if (reg != 0)
3214 {
3215 s390_load_address (reg, new);
3216 new = reg;
3217 }
3218 break;
3219
3220 case TLS_MODEL_LOCAL_DYNAMIC:
3221 start_sequence ();
3222 r2 = gen_rtx_REG (Pmode, 2);
3223 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM);
3224 new = gen_rtx_CONST (Pmode, tls_call);
3225 new = force_const_mem (Pmode, new);
3226 emit_move_insn (r2, new);
7346ca58 3227 s390_emit_tls_call_insn (r2, tls_call);
be00aaa8 3228 insn = get_insns ();
3229 end_sequence ();
3230
3231 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_NTPOFF);
3232 temp = gen_reg_rtx (Pmode);
3233 emit_libcall_block (insn, temp, r2, new);
3234
cc87d0c5 3235 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
be00aaa8 3236 base = gen_reg_rtx (Pmode);
3237 s390_load_address (base, new);
3238
3239 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_DTPOFF);
3240 new = gen_rtx_CONST (Pmode, new);
3241 new = force_const_mem (Pmode, new);
3242 temp = gen_reg_rtx (Pmode);
3243 emit_move_insn (temp, new);
3244
3245 new = gen_rtx_PLUS (Pmode, base, temp);
3246 if (reg != 0)
3247 {
3248 s390_load_address (reg, new);
3249 new = reg;
3250 }
3251 break;
3252
3253 case TLS_MODEL_INITIAL_EXEC:
3254 if (flag_pic == 1)
3255 {
3256 /* Assume GOT offset < 4k. This is handled the same way
3257 in both 31- and 64-bit code. */
3258
3259 if (reload_in_progress || reload_completed)
3072d30e 3260 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
be00aaa8 3261
3262 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3263 new = gen_rtx_CONST (Pmode, new);
3264 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
e265a6da 3265 new = gen_const_mem (Pmode, new);
be00aaa8 3266 temp = gen_reg_rtx (Pmode);
3267 emit_move_insn (temp, new);
3268 }
dafc8d45 3269 else if (TARGET_CPU_ZARCH)
be00aaa8 3270 {
3271 /* If the GOT offset might be >= 4k, we determine the position
3272 of the GOT entry via a PC-relative LARL. */
3273
3274 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3275 new = gen_rtx_CONST (Pmode, new);
3276 temp = gen_reg_rtx (Pmode);
3277 emit_move_insn (temp, new);
3278
e265a6da 3279 new = gen_const_mem (Pmode, temp);
be00aaa8 3280 temp = gen_reg_rtx (Pmode);
3281 emit_move_insn (temp, new);
3282 }
3283 else if (flag_pic)
3284 {
f81e845f 3285 /* If the GOT offset might be >= 4k, we have to load it
be00aaa8 3286 from the literal pool. */
3287
3288 if (reload_in_progress || reload_completed)
3072d30e 3289 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
be00aaa8 3290
3291 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3292 new = gen_rtx_CONST (Pmode, new);
3293 new = force_const_mem (Pmode, new);
3294 temp = gen_reg_rtx (Pmode);
3295 emit_move_insn (temp, new);
3296
3297 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
e265a6da 3298 new = gen_const_mem (Pmode, new);
be00aaa8 3299
3300 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
3301 temp = gen_reg_rtx (Pmode);
3302 emit_insn (gen_rtx_SET (Pmode, temp, new));
3303 }
3304 else
3305 {
3306 /* In position-dependent code, load the absolute address of
3307 the GOT entry from the literal pool. */
3308
3309 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3310 new = gen_rtx_CONST (Pmode, new);
3311 new = force_const_mem (Pmode, new);
3312 temp = gen_reg_rtx (Pmode);
3313 emit_move_insn (temp, new);
3314
3315 new = temp;
e265a6da 3316 new = gen_const_mem (Pmode, new);
be00aaa8 3317 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
3318 temp = gen_reg_rtx (Pmode);
3319 emit_insn (gen_rtx_SET (Pmode, temp, new));
3320 }
3321
cc87d0c5 3322 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
be00aaa8 3323 if (reg != 0)
3324 {
3325 s390_load_address (reg, new);
3326 new = reg;
3327 }
3328 break;
3329
3330 case TLS_MODEL_LOCAL_EXEC:
3331 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3332 new = gen_rtx_CONST (Pmode, new);
3333 new = force_const_mem (Pmode, new);
3334 temp = gen_reg_rtx (Pmode);
3335 emit_move_insn (temp, new);
3336
cc87d0c5 3337 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
be00aaa8 3338 if (reg != 0)
3339 {
3340 s390_load_address (reg, new);
3341 new = reg;
3342 }
3343 break;
3344
3345 default:
32eda510 3346 gcc_unreachable ();
be00aaa8 3347 }
3348
3349 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == UNSPEC)
3350 {
3351 switch (XINT (XEXP (addr, 0), 1))
3352 {
3353 case UNSPEC_INDNTPOFF:
32eda510 3354 gcc_assert (TARGET_CPU_ZARCH);
3355 new = addr;
be00aaa8 3356 break;
3357
3358 default:
32eda510 3359 gcc_unreachable ();
be00aaa8 3360 }
3361 }
3362
b7ace65c 3363 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3364 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3365 {
3366 new = XEXP (XEXP (addr, 0), 0);
3367 if (GET_CODE (new) != SYMBOL_REF)
3368 new = gen_rtx_CONST (Pmode, new);
3369
3370 new = legitimize_tls_address (new, reg);
3371 new = plus_constant (new, INTVAL (XEXP (XEXP (addr, 0), 1)));
3372 new = force_operand (new, 0);
3373 }
3374
be00aaa8 3375 else
32eda510 3376 gcc_unreachable (); /* for now ... */
be00aaa8 3377
3378 return new;
3379}
3380
4673c1a0 3381/* Emit insns to move operands[1] into operands[0]. */
3382
3383void
b40da9a7 3384emit_symbolic_move (rtx *operands)
4673c1a0 3385{
e1ba4a27 3386 rtx temp = !can_create_pseudo_p () ? operands[0] : gen_reg_rtx (Pmode);
4673c1a0 3387
be00aaa8 3388 if (GET_CODE (operands[0]) == MEM)
4673c1a0 3389 operands[1] = force_reg (Pmode, operands[1]);
be00aaa8 3390 else if (TLS_SYMBOLIC_CONST (operands[1]))
3391 operands[1] = legitimize_tls_address (operands[1], temp);
3392 else if (flag_pic)
4673c1a0 3393 operands[1] = legitimize_pic_address (operands[1], temp);
3394}
3395
56769981 3396/* Try machine-dependent ways of modifying an illegitimate address X
4673c1a0 3397 to be legitimate. If we find one, return the new, valid address.
4673c1a0 3398
3399 OLDX is the address as it was before break_out_memory_refs was called.
3400 In some cases it is useful to look at this to decide what needs to be done.
3401
56769981 3402 MODE is the mode of the operand pointed to by X.
4673c1a0 3403
3404 When -fpic is used, special handling is needed for symbolic references.
3405 See comments by legitimize_pic_address for details. */
3406
3407rtx
edd89d66 3408legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
b40da9a7 3409 enum machine_mode mode ATTRIBUTE_UNUSED)
4673c1a0 3410{
2eb8fe23 3411 rtx constant_term = const0_rtx;
4673c1a0 3412
be00aaa8 3413 if (TLS_SYMBOLIC_CONST (x))
3414 {
3415 x = legitimize_tls_address (x, 0);
3416
3417 if (legitimate_address_p (mode, x, FALSE))
3418 return x;
3419 }
1ed004b7 3420 else if (GET_CODE (x) == PLUS
3421 && (TLS_SYMBOLIC_CONST (XEXP (x, 0))
3422 || TLS_SYMBOLIC_CONST (XEXP (x, 1))))
3423 {
3424 return x;
3425 }
be00aaa8 3426 else if (flag_pic)
4673c1a0 3427 {
2eb8fe23 3428 if (SYMBOLIC_CONST (x)
f81e845f 3429 || (GET_CODE (x) == PLUS
3430 && (SYMBOLIC_CONST (XEXP (x, 0))
2eb8fe23 3431 || SYMBOLIC_CONST (XEXP (x, 1)))))
3432 x = legitimize_pic_address (x, 0);
3433
3434 if (legitimate_address_p (mode, x, FALSE))
3435 return x;
4673c1a0 3436 }
4673c1a0 3437
2eb8fe23 3438 x = eliminate_constant_term (x, &constant_term);
56769981 3439
de84f805 3440 /* Optimize loading of large displacements by splitting them
3441 into the multiple of 4K and the rest; this allows the
f81e845f 3442 former to be CSE'd if possible.
de84f805 3443
3444 Don't do this if the displacement is added to a register
3445 pointing into the stack frame, as the offsets will
3446 change later anyway. */
3447
3448 if (GET_CODE (constant_term) == CONST_INT
51aa1e9c 3449 && !TARGET_LONG_DISPLACEMENT
3450 && !DISP_IN_RANGE (INTVAL (constant_term))
de84f805 3451 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
3452 {
3453 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
3454 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
3455
3456 rtx temp = gen_reg_rtx (Pmode);
3457 rtx val = force_operand (GEN_INT (upper), temp);
3458 if (val != temp)
3459 emit_move_insn (temp, val);
3460
3461 x = gen_rtx_PLUS (Pmode, x, temp);
3462 constant_term = GEN_INT (lower);
3463 }
3464
2eb8fe23 3465 if (GET_CODE (x) == PLUS)
4673c1a0 3466 {
2eb8fe23 3467 if (GET_CODE (XEXP (x, 0)) == REG)
3468 {
edd89d66 3469 rtx temp = gen_reg_rtx (Pmode);
3470 rtx val = force_operand (XEXP (x, 1), temp);
2eb8fe23 3471 if (val != temp)
3472 emit_move_insn (temp, val);
3473
3474 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
3475 }
3476
3477 else if (GET_CODE (XEXP (x, 1)) == REG)
3478 {
edd89d66 3479 rtx temp = gen_reg_rtx (Pmode);
3480 rtx val = force_operand (XEXP (x, 0), temp);
2eb8fe23 3481 if (val != temp)
3482 emit_move_insn (temp, val);
3483
3484 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
3485 }
4673c1a0 3486 }
2eb8fe23 3487
3488 if (constant_term != const0_rtx)
3489 x = gen_rtx_PLUS (Pmode, x, constant_term);
3490
3491 return x;
4673c1a0 3492}
3493
e4542435 3494/* Try a machine-dependent way of reloading an illegitimate address AD
3495 operand. If we find one, push the reload and and return the new address.
3496
3497 MODE is the mode of the enclosing MEM. OPNUM is the operand number
3498 and TYPE is the reload type of the current reload. */
3499
3500rtx
3501legitimize_reload_address (rtx ad, enum machine_mode mode ATTRIBUTE_UNUSED,
3502 int opnum, int type)
3503{
3504 if (!optimize || TARGET_LONG_DISPLACEMENT)
3505 return NULL_RTX;
3506
3507 if (GET_CODE (ad) == PLUS)
3508 {
3509 rtx tem = simplify_binary_operation (PLUS, Pmode,
3510 XEXP (ad, 0), XEXP (ad, 1));
3511 if (tem)
3512 ad = tem;
3513 }
3514
3515 if (GET_CODE (ad) == PLUS
3516 && GET_CODE (XEXP (ad, 0)) == REG
3517 && GET_CODE (XEXP (ad, 1)) == CONST_INT
3518 && !DISP_IN_RANGE (INTVAL (XEXP (ad, 1))))
3519 {
3520 HOST_WIDE_INT lower = INTVAL (XEXP (ad, 1)) & 0xfff;
3521 HOST_WIDE_INT upper = INTVAL (XEXP (ad, 1)) ^ lower;
3522 rtx cst, tem, new;
3523
3524 cst = GEN_INT (upper);
3525 if (!legitimate_reload_constant_p (cst))
3526 cst = force_const_mem (Pmode, cst);
3527
3528 tem = gen_rtx_PLUS (Pmode, XEXP (ad, 0), cst);
3529 new = gen_rtx_PLUS (Pmode, tem, GEN_INT (lower));
3530
3531 push_reload (XEXP (tem, 1), 0, &XEXP (tem, 1), 0,
3532 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3533 opnum, (enum reload_type) type);
3534 return new;
3535 }
3536
3537 return NULL_RTX;
3538}
3539
4fbc4db5 3540/* Emit code to move LEN bytes from DST to SRC. */
3541
3542void
008c057d 3543s390_expand_movmem (rtx dst, rtx src, rtx len)
4fbc4db5 3544{
4fbc4db5 3545 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3546 {
3547 if (INTVAL (len) > 0)
008c057d 3548 emit_insn (gen_movmem_short (dst, src, GEN_INT (INTVAL (len) - 1)));
4fbc4db5 3549 }
3550
3551 else if (TARGET_MVCLE)
3552 {
008c057d 3553 emit_insn (gen_movmem_long (dst, src, convert_to_mode (Pmode, len, 1)));
4fbc4db5 3554 }
3555
3556 else
3557 {
3558 rtx dst_addr, src_addr, count, blocks, temp;
7746964e 3559 rtx loop_start_label = gen_label_rtx ();
4ee9c684 3560 rtx loop_end_label = gen_label_rtx ();
4fbc4db5 3561 rtx end_label = gen_label_rtx ();
3562 enum machine_mode mode;
4fbc4db5 3563
3564 mode = GET_MODE (len);
3565 if (mode == VOIDmode)
31838f66 3566 mode = Pmode;
4fbc4db5 3567
4fbc4db5 3568 dst_addr = gen_reg_rtx (Pmode);
3569 src_addr = gen_reg_rtx (Pmode);
3570 count = gen_reg_rtx (mode);
3571 blocks = gen_reg_rtx (mode);
3572
3573 convert_move (count, len, 1);
f81e845f 3574 emit_cmp_and_jump_insns (count, const0_rtx,
4fbc4db5 3575 EQ, NULL_RTX, mode, 1, end_label);
3576
3577 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3578 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
3579 dst = change_address (dst, VOIDmode, dst_addr);
3580 src = change_address (src, VOIDmode, src_addr);
f81e845f 3581
4fbc4db5 3582 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3583 if (temp != count)
3584 emit_move_insn (count, temp);
3585
1ed6fd08 3586 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, 0);
4fbc4db5 3587 if (temp != blocks)
3588 emit_move_insn (blocks, temp);
3589
4ee9c684 3590 emit_cmp_and_jump_insns (blocks, const0_rtx,
3591 EQ, NULL_RTX, mode, 1, loop_end_label);
7746964e 3592
3593 emit_label (loop_start_label);
4fbc4db5 3594
008c057d 3595 emit_insn (gen_movmem_short (dst, src, GEN_INT (255)));
f81e845f 3596 s390_load_address (dst_addr,
4fbc4db5 3597 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
f81e845f 3598 s390_load_address (src_addr,
4fbc4db5 3599 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
f81e845f 3600
4fbc4db5 3601 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3602 if (temp != blocks)
3603 emit_move_insn (blocks, temp);
3604
4ee9c684 3605 emit_cmp_and_jump_insns (blocks, const0_rtx,
3606 EQ, NULL_RTX, mode, 1, loop_end_label);
7746964e 3607
3608 emit_jump (loop_start_label);
4ee9c684 3609 emit_label (loop_end_label);
4fbc4db5 3610
008c057d 3611 emit_insn (gen_movmem_short (dst, src,
31838f66 3612 convert_to_mode (Pmode, count, 1)));
4fbc4db5 3613 emit_label (end_label);
3614 }
3615}
3616
805a133b 3617/* Emit code to set LEN bytes at DST to VAL.
3618 Make use of clrmem if VAL is zero. */
4fbc4db5 3619
3620void
805a133b 3621s390_expand_setmem (rtx dst, rtx len, rtx val)
4fbc4db5 3622{
1ed6fd08 3623 if (GET_CODE (len) == CONST_INT && INTVAL (len) == 0)
3624 return;
3625
805a133b 3626 gcc_assert (GET_CODE (val) == CONST_INT || GET_MODE (val) == QImode);
3627
1ed6fd08 3628 if (GET_CODE (len) == CONST_INT && INTVAL (len) > 0 && INTVAL (len) <= 257)
4fbc4db5 3629 {
805a133b 3630 if (val == const0_rtx && INTVAL (len) <= 256)
008c057d 3631 emit_insn (gen_clrmem_short (dst, GEN_INT (INTVAL (len) - 1)));
805a133b 3632 else
3633 {
3634 /* Initialize memory by storing the first byte. */
3635 emit_move_insn (adjust_address (dst, QImode, 0), val);
3636
3637 if (INTVAL (len) > 1)
3638 {
3639 /* Initiate 1 byte overlap move.
3640 The first byte of DST is propagated through DSTP1.
3641 Prepare a movmem for: DST+1 = DST (length = LEN - 1).
3642 DST is set to size 1 so the rest of the memory location
3643 does not count as source operand. */
3644 rtx dstp1 = adjust_address (dst, VOIDmode, 1);
3645 set_mem_size (dst, const1_rtx);
3646
3647 emit_insn (gen_movmem_short (dstp1, dst,
3648 GEN_INT (INTVAL (len) - 2)));
3649 }
3650 }
4fbc4db5 3651 }
3652
3653 else if (TARGET_MVCLE)
3654 {
805a133b 3655 val = force_not_mem (convert_modes (Pmode, QImode, val, 1));
3656 emit_insn (gen_setmem_long (dst, convert_to_mode (Pmode, len, 1), val));
4fbc4db5 3657 }
3658
3659 else
3660 {
805a133b 3661 rtx dst_addr, src_addr, count, blocks, temp, dstp1 = NULL_RTX;
7746964e 3662 rtx loop_start_label = gen_label_rtx ();
4ee9c684 3663 rtx loop_end_label = gen_label_rtx ();
4fbc4db5 3664 rtx end_label = gen_label_rtx ();
3665 enum machine_mode mode;
4fbc4db5 3666
3667 mode = GET_MODE (len);
3668 if (mode == VOIDmode)
31838f66 3669 mode = Pmode;
4fbc4db5 3670
4fbc4db5 3671 dst_addr = gen_reg_rtx (Pmode);
3672 src_addr = gen_reg_rtx (Pmode);
3673 count = gen_reg_rtx (mode);
3674 blocks = gen_reg_rtx (mode);
3675
3676 convert_move (count, len, 1);
f81e845f 3677 emit_cmp_and_jump_insns (count, const0_rtx,
4fbc4db5 3678 EQ, NULL_RTX, mode, 1, end_label);
3679
3680 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3681 dst = change_address (dst, VOIDmode, dst_addr);
f81e845f 3682
805a133b 3683 if (val == const0_rtx)
3684 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3685 else
3686 {
3687 dstp1 = adjust_address (dst, VOIDmode, 1);
3688 set_mem_size (dst, const1_rtx);
3689
3690 /* Initialize memory by storing the first byte. */
3691 emit_move_insn (adjust_address (dst, QImode, 0), val);
3692
3693 /* If count is 1 we are done. */
3694 emit_cmp_and_jump_insns (count, const1_rtx,
3695 EQ, NULL_RTX, mode, 1, end_label);
3696
3697 temp = expand_binop (mode, add_optab, count, GEN_INT (-2), count, 1, 0);
3698 }
4fbc4db5 3699 if (temp != count)
3700 emit_move_insn (count, temp);
3701
1ed6fd08 3702 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, 0);
4fbc4db5 3703 if (temp != blocks)
3704 emit_move_insn (blocks, temp);
3705
4ee9c684 3706 emit_cmp_and_jump_insns (blocks, const0_rtx,
3707 EQ, NULL_RTX, mode, 1, loop_end_label);
7746964e 3708
3709 emit_label (loop_start_label);
4fbc4db5 3710
805a133b 3711 if (val == const0_rtx)
3712 emit_insn (gen_clrmem_short (dst, GEN_INT (255)));
3713 else
3714 emit_insn (gen_movmem_short (dstp1, dst, GEN_INT (255)));
f81e845f 3715 s390_load_address (dst_addr,
4fbc4db5 3716 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
f81e845f 3717
4fbc4db5 3718 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3719 if (temp != blocks)
3720 emit_move_insn (blocks, temp);
3721
4ee9c684 3722 emit_cmp_and_jump_insns (blocks, const0_rtx,
3723 EQ, NULL_RTX, mode, 1, loop_end_label);
7746964e 3724
3725 emit_jump (loop_start_label);
4ee9c684 3726 emit_label (loop_end_label);
4fbc4db5 3727
805a133b 3728 if (val == const0_rtx)
3729 emit_insn (gen_clrmem_short (dst, convert_to_mode (Pmode, count, 1)));
3730 else
3731 emit_insn (gen_movmem_short (dstp1, dst, convert_to_mode (Pmode, count, 1)));
4fbc4db5 3732 emit_label (end_label);
3733 }
3734}
3735
3736/* Emit code to compare LEN bytes at OP0 with those at OP1,
3737 and return the result in TARGET. */
3738
3739void
b40da9a7 3740s390_expand_cmpmem (rtx target, rtx op0, rtx op1, rtx len)
4fbc4db5 3741{
80b53886 3742 rtx ccreg = gen_rtx_REG (CCUmode, CC_REGNUM);
dd16a4bd 3743 rtx tmp;
3744
3745 /* As the result of CMPINT is inverted compared to what we need,
3746 we have to swap the operands. */
3747 tmp = op0; op0 = op1; op1 = tmp;
4fbc4db5 3748
4fbc4db5 3749 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3750 {
3751 if (INTVAL (len) > 0)
3752 {
31838f66 3753 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (INTVAL (len) - 1)));
dd16a4bd 3754 emit_insn (gen_cmpint (target, ccreg));
4fbc4db5 3755 }
3756 else
3757 emit_move_insn (target, const0_rtx);
3758 }
bcbf02a5 3759 else if (TARGET_MVCLE)
4fbc4db5 3760 {
31838f66 3761 emit_insn (gen_cmpmem_long (op0, op1, convert_to_mode (Pmode, len, 1)));
dd16a4bd 3762 emit_insn (gen_cmpint (target, ccreg));
4fbc4db5 3763 }
4fbc4db5 3764 else
3765 {
3766 rtx addr0, addr1, count, blocks, temp;
7746964e 3767 rtx loop_start_label = gen_label_rtx ();
4ee9c684 3768 rtx loop_end_label = gen_label_rtx ();
4fbc4db5 3769 rtx end_label = gen_label_rtx ();
3770 enum machine_mode mode;
4fbc4db5 3771
3772 mode = GET_MODE (len);
3773 if (mode == VOIDmode)
31838f66 3774 mode = Pmode;
4fbc4db5 3775
4fbc4db5 3776 addr0 = gen_reg_rtx (Pmode);
3777 addr1 = gen_reg_rtx (Pmode);
3778 count = gen_reg_rtx (mode);
3779 blocks = gen_reg_rtx (mode);
3780
3781 convert_move (count, len, 1);
f81e845f 3782 emit_cmp_and_jump_insns (count, const0_rtx,
4fbc4db5 3783 EQ, NULL_RTX, mode, 1, end_label);
3784
3785 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
3786 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
3787 op0 = change_address (op0, VOIDmode, addr0);
3788 op1 = change_address (op1, VOIDmode, addr1);
f81e845f 3789
4fbc4db5 3790 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3791 if (temp != count)
3792 emit_move_insn (count, temp);
3793
1ed6fd08 3794 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, 0);
4fbc4db5 3795 if (temp != blocks)
3796 emit_move_insn (blocks, temp);
3797
4ee9c684 3798 emit_cmp_and_jump_insns (blocks, const0_rtx,
3799 EQ, NULL_RTX, mode, 1, loop_end_label);
7746964e 3800
3801 emit_label (loop_start_label);
4fbc4db5 3802
31838f66 3803 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (255)));
80b53886 3804 temp = gen_rtx_NE (VOIDmode, ccreg, const0_rtx);
f81e845f 3805 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
4fbc4db5 3806 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
3807 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
3808 emit_jump_insn (temp);
3809
f81e845f 3810 s390_load_address (addr0,
4fbc4db5 3811 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
f81e845f 3812 s390_load_address (addr1,
4fbc4db5 3813 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
f81e845f 3814
4fbc4db5 3815 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3816 if (temp != blocks)
3817 emit_move_insn (blocks, temp);
3818
4ee9c684 3819 emit_cmp_and_jump_insns (blocks, const0_rtx,
3820 EQ, NULL_RTX, mode, 1, loop_end_label);
7746964e 3821
3822 emit_jump (loop_start_label);
4ee9c684 3823 emit_label (loop_end_label);
4fbc4db5 3824
f588eb9f 3825 emit_insn (gen_cmpmem_short (op0, op1,
31838f66 3826 convert_to_mode (Pmode, count, 1)));
4fbc4db5 3827 emit_label (end_label);
3828
dd16a4bd 3829 emit_insn (gen_cmpint (target, ccreg));
4fbc4db5 3830 }
3831}
3832
3b699fc7 3833
3834/* Expand conditional increment or decrement using alc/slb instructions.
3835 Should generate code setting DST to either SRC or SRC + INCREMENT,
3836 depending on the result of the comparison CMP_OP0 CMP_CODE CMP_OP1.
eeba5f25 3837 Returns true if successful, false otherwise.
3838
3839 That makes it possible to implement some if-constructs without jumps e.g.:
3840 (borrow = CC0 | CC1 and carry = CC2 | CC3)
3841 unsigned int a, b, c;
3842 if (a < b) c++; -> CCU b > a -> CC2; c += carry;
3843 if (a < b) c--; -> CCL3 a - b -> borrow; c -= borrow;
3844 if (a <= b) c++; -> CCL3 b - a -> borrow; c += carry;
3845 if (a <= b) c--; -> CCU a <= b -> borrow; c -= borrow;
3846
3847 Checks for EQ and NE with a nonzero value need an additional xor e.g.:
3848 if (a == b) c++; -> CCL3 a ^= b; 0 - a -> borrow; c += carry;
3849 if (a == b) c--; -> CCU a ^= b; a <= 0 -> CC0 | CC1; c -= borrow;
3850 if (a != b) c++; -> CCU a ^= b; a > 0 -> CC2; c += carry;
3851 if (a != b) c--; -> CCL3 a ^= b; 0 - a -> borrow; c -= borrow; */
3b699fc7 3852
3853bool
3854s390_expand_addcc (enum rtx_code cmp_code, rtx cmp_op0, rtx cmp_op1,
3855 rtx dst, rtx src, rtx increment)
3856{
3857 enum machine_mode cmp_mode;
3858 enum machine_mode cc_mode;
3859 rtx op_res;
3860 rtx insn;
3861 rtvec p;
32eda510 3862 int ret;
3b699fc7 3863
3864 if ((GET_MODE (cmp_op0) == SImode || GET_MODE (cmp_op0) == VOIDmode)
3865 && (GET_MODE (cmp_op1) == SImode || GET_MODE (cmp_op1) == VOIDmode))
3866 cmp_mode = SImode;
3867 else if ((GET_MODE (cmp_op0) == DImode || GET_MODE (cmp_op0) == VOIDmode)
3868 && (GET_MODE (cmp_op1) == DImode || GET_MODE (cmp_op1) == VOIDmode))
3869 cmp_mode = DImode;
3870 else
3871 return false;
3872
3873 /* Try ADD LOGICAL WITH CARRY. */
3874 if (increment == const1_rtx)
3875 {
3876 /* Determine CC mode to use. */
3877 if (cmp_code == EQ || cmp_code == NE)
3878 {
3879 if (cmp_op1 != const0_rtx)
3880 {
3881 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
3882 NULL_RTX, 0, OPTAB_WIDEN);
3883 cmp_op1 = const0_rtx;
3884 }
3885
3886 cmp_code = cmp_code == EQ ? LEU : GTU;
3887 }
3888
3889 if (cmp_code == LTU || cmp_code == LEU)
3890 {
3891 rtx tem = cmp_op0;
3892 cmp_op0 = cmp_op1;
3893 cmp_op1 = tem;
3894 cmp_code = swap_condition (cmp_code);
3895 }
3896
3897 switch (cmp_code)
3898 {
3899 case GTU:
3900 cc_mode = CCUmode;
3901 break;
3902
3903 case GEU:
3904 cc_mode = CCL3mode;
3905 break;
3906
3907 default:
3908 return false;
3909 }
3910
3911 /* Emit comparison instruction pattern. */
3912 if (!register_operand (cmp_op0, cmp_mode))
3913 cmp_op0 = force_reg (cmp_mode, cmp_op0);
3914
3915 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
3916 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
3917 /* We use insn_invalid_p here to add clobbers if required. */
32eda510 3918 ret = insn_invalid_p (emit_insn (insn));
3919 gcc_assert (!ret);
3b699fc7 3920
3921 /* Emit ALC instruction pattern. */
3922 op_res = gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
3923 gen_rtx_REG (cc_mode, CC_REGNUM),
3924 const0_rtx);
3925
3926 if (src != const0_rtx)
3927 {
3928 if (!register_operand (src, GET_MODE (dst)))
3929 src = force_reg (GET_MODE (dst), src);
3930
3931 src = gen_rtx_PLUS (GET_MODE (dst), src, const0_rtx);
3932 op_res = gen_rtx_PLUS (GET_MODE (dst), src, op_res);
3933 }
3934
3935 p = rtvec_alloc (2);
3936 RTVEC_ELT (p, 0) =
3937 gen_rtx_SET (VOIDmode, dst, op_res);
3938 RTVEC_ELT (p, 1) =
3939 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
3940 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
3941
3942 return true;
3943 }
3944
3945 /* Try SUBTRACT LOGICAL WITH BORROW. */
3946 if (increment == constm1_rtx)
3947 {
3948 /* Determine CC mode to use. */
3949 if (cmp_code == EQ || cmp_code == NE)
3950 {
3951 if (cmp_op1 != const0_rtx)
3952 {
3953 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
3954 NULL_RTX, 0, OPTAB_WIDEN);
3955 cmp_op1 = const0_rtx;
3956 }
3957
3958 cmp_code = cmp_code == EQ ? LEU : GTU;
3959 }
3960
3961 if (cmp_code == GTU || cmp_code == GEU)
3962 {
3963 rtx tem = cmp_op0;
3964 cmp_op0 = cmp_op1;
3965 cmp_op1 = tem;
3966 cmp_code = swap_condition (cmp_code);
3967 }
3968
3969 switch (cmp_code)
3970 {
3971 case LEU:
3972 cc_mode = CCUmode;
3973 break;
3974
3975 case LTU:
3976 cc_mode = CCL3mode;
3977 break;
3978
3979 default:
3980 return false;
3981 }
3982
3983 /* Emit comparison instruction pattern. */
3984 if (!register_operand (cmp_op0, cmp_mode))
3985 cmp_op0 = force_reg (cmp_mode, cmp_op0);
3986
3987 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
3988 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
3989 /* We use insn_invalid_p here to add clobbers if required. */
32eda510 3990 ret = insn_invalid_p (emit_insn (insn));
3991 gcc_assert (!ret);
3b699fc7 3992
3993 /* Emit SLB instruction pattern. */
3994 if (!register_operand (src, GET_MODE (dst)))
3995 src = force_reg (GET_MODE (dst), src);
3996
3997 op_res = gen_rtx_MINUS (GET_MODE (dst),
3998 gen_rtx_MINUS (GET_MODE (dst), src, const0_rtx),
3999 gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
4000 gen_rtx_REG (cc_mode, CC_REGNUM),
4001 const0_rtx));
4002 p = rtvec_alloc (2);
4003 RTVEC_ELT (p, 0) =
4004 gen_rtx_SET (VOIDmode, dst, op_res);
4005 RTVEC_ELT (p, 1) =
4006 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
4007 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
4008
4009 return true;
4010 }
4011
4012 return false;
4013}
4014
0349cc73 4015/* Expand code for the insv template. Return true if successful, false else. */
4016
4017bool
4018s390_expand_insv (rtx dest, rtx op1, rtx op2, rtx src)
4019{
4020 int bitsize = INTVAL (op1);
4021 int bitpos = INTVAL (op2);
4022
7063afc3 4023 /* We need byte alignment. */
0349cc73 4024 if (bitsize % BITS_PER_UNIT)
4025 return false;
4026
4027 if (bitpos == 0
4028 && memory_operand (dest, VOIDmode)
4029 && (register_operand (src, word_mode)
4030 || const_int_operand (src, VOIDmode)))
4031 {
4032 /* Emit standard pattern if possible. */
4033 enum machine_mode mode = smallest_mode_for_size (bitsize, MODE_INT);
4034 if (GET_MODE_BITSIZE (mode) == bitsize)
4035 emit_move_insn (adjust_address (dest, mode, 0), gen_lowpart (mode, src));
4036
4037 /* (set (ze (mem)) (const_int)). */
4038 else if (const_int_operand (src, VOIDmode))
4039 {
4040 int size = bitsize / BITS_PER_UNIT;
4041 rtx src_mem = adjust_address (force_const_mem (word_mode, src), BLKmode,
4042 GET_MODE_SIZE (word_mode) - size);
4043
4044 dest = adjust_address (dest, BLKmode, 0);
4045 set_mem_size (dest, GEN_INT (size));
4046 s390_expand_movmem (dest, src_mem, GEN_INT (size));
4047 }
4048
4049 /* (set (ze (mem)) (reg)). */
4050 else if (register_operand (src, word_mode))
4051 {
4052 if (bitsize <= GET_MODE_BITSIZE (SImode))
4053 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, op1,
4054 const0_rtx), src);
4055 else
4056 {
4057 /* Emit st,stcmh sequence. */
4058 int stcmh_width = bitsize - GET_MODE_BITSIZE (SImode);
4059 int size = stcmh_width / BITS_PER_UNIT;
4060
4061 emit_move_insn (adjust_address (dest, SImode, size),
4062 gen_lowpart (SImode, src));
4063 set_mem_size (dest, GEN_INT (size));
4064 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, GEN_INT
4065 (stcmh_width), const0_rtx),
4066 gen_rtx_LSHIFTRT (word_mode, src, GEN_INT
4067 (GET_MODE_BITSIZE (SImode))));
4068 }
4069 }
4070 else
4071 return false;
4072
4073 return true;
4074 }
4075
4076 /* (set (ze (reg)) (const_int)). */
4077 if (TARGET_ZARCH
4078 && register_operand (dest, word_mode)
4079 && (bitpos % 16) == 0
4080 && (bitsize % 16) == 0
4081 && const_int_operand (src, VOIDmode))
4082 {
4083 HOST_WIDE_INT val = INTVAL (src);
4084 int regpos = bitpos + bitsize;
4085
4086 while (regpos > bitpos)
4087 {
4088 enum machine_mode putmode;
4089 int putsize;
4090
4091 if (TARGET_EXTIMM && (regpos % 32 == 0) && (regpos >= bitpos + 32))
4092 putmode = SImode;
4093 else
4094 putmode = HImode;
4095
4096 putsize = GET_MODE_BITSIZE (putmode);
4097 regpos -= putsize;
4098 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest,
4099 GEN_INT (putsize),
4100 GEN_INT (regpos)),
4101 gen_int_mode (val, putmode));
4102 val >>= putsize;
4103 }
4104 gcc_assert (regpos == bitpos);
4105 return true;
4106 }
4107
4108 return false;
4109}
3b699fc7 4110
7cc66daf 4111/* A subroutine of s390_expand_cs_hqi and s390_expand_atomic which returns a
4112 register that holds VAL of mode MODE shifted by COUNT bits. */
182f815e 4113
4114static inline rtx
4115s390_expand_mask_and_shift (rtx val, enum machine_mode mode, rtx count)
4116{
4117 val = expand_simple_binop (SImode, AND, val, GEN_INT (GET_MODE_MASK (mode)),
4118 NULL_RTX, 1, OPTAB_DIRECT);
4119 return expand_simple_binop (SImode, ASHIFT, val, count,
4120 NULL_RTX, 1, OPTAB_DIRECT);
4121}
4122
4123/* Structure to hold the initial parameters for a compare_and_swap operation
4124 in HImode and QImode. */
4125
4126struct alignment_context
4127{
4128 rtx memsi; /* SI aligned memory location. */
4129 rtx shift; /* Bit offset with regard to lsb. */
4130 rtx modemask; /* Mask of the HQImode shifted by SHIFT bits. */
4131 rtx modemaski; /* ~modemask */
191ec5a2 4132 bool aligned; /* True if memory is aligned, false else. */
182f815e 4133};
4134
7cc66daf 4135/* A subroutine of s390_expand_cs_hqi and s390_expand_atomic to initialize
4136 structure AC for transparent simplifying, if the memory alignment is known
4137 to be at least 32bit. MEM is the memory location for the actual operation
4138 and MODE its mode. */
182f815e 4139
4140static void
4141init_alignment_context (struct alignment_context *ac, rtx mem,
4142 enum machine_mode mode)
4143{
4144 ac->shift = GEN_INT (GET_MODE_SIZE (SImode) - GET_MODE_SIZE (mode));
4145 ac->aligned = (MEM_ALIGN (mem) >= GET_MODE_BITSIZE (SImode));
4146
4147 if (ac->aligned)
4148 ac->memsi = adjust_address (mem, SImode, 0); /* Memory is aligned. */
4149 else
4150 {
4151 /* Alignment is unknown. */
4152 rtx byteoffset, addr, align;
4153
4154 /* Force the address into a register. */
4155 addr = force_reg (Pmode, XEXP (mem, 0));
4156
4157 /* Align it to SImode. */
4158 align = expand_simple_binop (Pmode, AND, addr,
4159 GEN_INT (-GET_MODE_SIZE (SImode)),
4160 NULL_RTX, 1, OPTAB_DIRECT);
4161 /* Generate MEM. */
4162 ac->memsi = gen_rtx_MEM (SImode, align);
4163 MEM_VOLATILE_P (ac->memsi) = MEM_VOLATILE_P (mem);
bd1da572 4164 set_mem_alias_set (ac->memsi, ALIAS_SET_MEMORY_BARRIER);
182f815e 4165 set_mem_align (ac->memsi, GET_MODE_BITSIZE (SImode));
4166
4167 /* Calculate shiftcount. */
4168 byteoffset = expand_simple_binop (Pmode, AND, addr,
4169 GEN_INT (GET_MODE_SIZE (SImode) - 1),
4170 NULL_RTX, 1, OPTAB_DIRECT);
4171 /* As we already have some offset, evaluate the remaining distance. */
4172 ac->shift = expand_simple_binop (SImode, MINUS, ac->shift, byteoffset,
4173 NULL_RTX, 1, OPTAB_DIRECT);
4174
4175 }
4176 /* Shift is the byte count, but we need the bitcount. */
4177 ac->shift = expand_simple_binop (SImode, MULT, ac->shift, GEN_INT (BITS_PER_UNIT),
4178 NULL_RTX, 1, OPTAB_DIRECT);
4179 /* Calculate masks. */
4180 ac->modemask = expand_simple_binop (SImode, ASHIFT,
4181 GEN_INT (GET_MODE_MASK (mode)), ac->shift,
4182 NULL_RTX, 1, OPTAB_DIRECT);
4183 ac->modemaski = expand_simple_unop (SImode, NOT, ac->modemask, NULL_RTX, 1);
4184}
4185
4186/* Expand an atomic compare and swap operation for HImode and QImode. MEM is
4187 the memory location, CMP the old value to compare MEM with and NEW the value
4188 to set if CMP == MEM.
4189 CMP is never in memory for compare_and_swap_cc because
4190 expand_bool_compare_and_swap puts it into a register for later compare. */
4191
4192void
4193s390_expand_cs_hqi (enum machine_mode mode, rtx target, rtx mem, rtx cmp, rtx new)
4194{
4195 struct alignment_context ac;
4196 rtx cmpv, newv, val, resv, cc;
4197 rtx res = gen_reg_rtx (SImode);
4198 rtx csloop = gen_label_rtx ();
4199 rtx csend = gen_label_rtx ();
4200
4201 gcc_assert (register_operand (target, VOIDmode));
4202 gcc_assert (MEM_P (mem));
4203
4204 init_alignment_context (&ac, mem, mode);
4205
4206 /* Shift the values to the correct bit positions. */
4207 if (!(ac.aligned && MEM_P (cmp)))
4208 cmp = s390_expand_mask_and_shift (cmp, mode, ac.shift);
4209 if (!(ac.aligned && MEM_P (new)))
4210 new = s390_expand_mask_and_shift (new, mode, ac.shift);
4211
4212 /* Load full word. Subsequent loads are performed by CS. */
4213 val = expand_simple_binop (SImode, AND, ac.memsi, ac.modemaski,
4214 NULL_RTX, 1, OPTAB_DIRECT);
4215
4216 /* Start CS loop. */
4217 emit_label (csloop);
4218 /* val = "<mem>00..0<mem>"
4219 * cmp = "00..0<cmp>00..0"
4220 * new = "00..0<new>00..0"
4221 */
4222
4223 /* Patch cmp and new with val at correct position. */
4224 if (ac.aligned && MEM_P (cmp))
4225 {
4226 cmpv = force_reg (SImode, val);
4227 store_bit_field (cmpv, GET_MODE_BITSIZE (mode), 0, SImode, cmp);
4228 }
4229 else
4230 cmpv = force_reg (SImode, expand_simple_binop (SImode, IOR, cmp, val,
4231 NULL_RTX, 1, OPTAB_DIRECT));
4232 if (ac.aligned && MEM_P (new))
4233 {
4234 newv = force_reg (SImode, val);
4235 store_bit_field (newv, GET_MODE_BITSIZE (mode), 0, SImode, new);
4236 }
4237 else
4238 newv = force_reg (SImode, expand_simple_binop (SImode, IOR, new, val,
4239 NULL_RTX, 1, OPTAB_DIRECT));
4240
182f815e 4241 /* Jump to end if we're done (likely?). */
db1f11e3 4242 s390_emit_jump (csend, s390_emit_compare_and_swap (EQ, res, ac.memsi,
4243 cmpv, newv));
182f815e 4244
4245 /* Check for changes outside mode. */
4246 resv = expand_simple_binop (SImode, AND, res, ac.modemaski,
4247 NULL_RTX, 1, OPTAB_DIRECT);
4248 cc = s390_emit_compare (NE, resv, val);
4249 emit_move_insn (val, resv);
4250 /* Loop internal if so. */
4251 s390_emit_jump (csloop, cc);
4252
4253 emit_label (csend);
4254
4255 /* Return the correct part of the bitfield. */
4256 convert_move (target, expand_simple_binop (SImode, LSHIFTRT, res, ac.shift,
4257 NULL_RTX, 1, OPTAB_DIRECT), 1);
4258}
4259
7cc66daf 4260/* Expand an atomic operation CODE of mode MODE. MEM is the memory location
85694bac 4261 and VAL the value to play with. If AFTER is true then store the value
7cc66daf 4262 MEM holds after the operation, if AFTER is false then store the value MEM
4263 holds before the operation. If TARGET is zero then discard that value, else
4264 store it to TARGET. */
4265
4266void
4267s390_expand_atomic (enum machine_mode mode, enum rtx_code code,
4268 rtx target, rtx mem, rtx val, bool after)
4269{
4270 struct alignment_context ac;
4271 rtx cmp;
4272 rtx new = gen_reg_rtx (SImode);
4273 rtx orig = gen_reg_rtx (SImode);
4274 rtx csloop = gen_label_rtx ();
4275
4276 gcc_assert (!target || register_operand (target, VOIDmode));
4277 gcc_assert (MEM_P (mem));
4278
4279 init_alignment_context (&ac, mem, mode);
4280
4281 /* Shift val to the correct bit positions.
4282 Preserve "icm", but prevent "ex icm". */
4283 if (!(ac.aligned && code == SET && MEM_P (val)))
4284 val = s390_expand_mask_and_shift (val, mode, ac.shift);
4285
4286 /* Further preparation insns. */
4287 if (code == PLUS || code == MINUS)
4288 emit_move_insn (orig, val);
4289 else if (code == MULT || code == AND) /* val = "11..1<val>11..1" */
4290 val = expand_simple_binop (SImode, XOR, val, ac.modemaski,
4291 NULL_RTX, 1, OPTAB_DIRECT);
4292
4293 /* Load full word. Subsequent loads are performed by CS. */
4294 cmp = force_reg (SImode, ac.memsi);
4295
4296 /* Start CS loop. */
4297 emit_label (csloop);
4298 emit_move_insn (new, cmp);
4299
4300 /* Patch new with val at correct position. */
4301 switch (code)
4302 {
4303 case PLUS:
4304 case MINUS:
4305 val = expand_simple_binop (SImode, code, new, orig,
4306 NULL_RTX, 1, OPTAB_DIRECT);
4307 val = expand_simple_binop (SImode, AND, val, ac.modemask,
4308 NULL_RTX, 1, OPTAB_DIRECT);
4309 /* FALLTHRU */
4310 case SET:
4311 if (ac.aligned && MEM_P (val))
4312 store_bit_field (new, GET_MODE_BITSIZE (mode), 0, SImode, val);
4313 else
4314 {
4315 new = expand_simple_binop (SImode, AND, new, ac.modemaski,
4316 NULL_RTX, 1, OPTAB_DIRECT);
4317 new = expand_simple_binop (SImode, IOR, new, val,
4318 NULL_RTX, 1, OPTAB_DIRECT);
4319 }
4320 break;
4321 case AND:
4322 case IOR:
4323 case XOR:
4324 new = expand_simple_binop (SImode, code, new, val,
4325 NULL_RTX, 1, OPTAB_DIRECT);
4326 break;
4327 case MULT: /* NAND */
4328 new = expand_simple_binop (SImode, XOR, new, ac.modemask,
4329 NULL_RTX, 1, OPTAB_DIRECT);
4330 new = expand_simple_binop (SImode, AND, new, val,
4331 NULL_RTX, 1, OPTAB_DIRECT);
4332 break;
4333 default:
4334 gcc_unreachable ();
4335 }
7cc66daf 4336
db1f11e3 4337 s390_emit_jump (csloop, s390_emit_compare_and_swap (NE, cmp,
4338 ac.memsi, cmp, new));
7cc66daf 4339
4340 /* Return the correct part of the bitfield. */
4341 if (target)
4342 convert_move (target, expand_simple_binop (SImode, LSHIFTRT,
4343 after ? new : cmp, ac.shift,
4344 NULL_RTX, 1, OPTAB_DIRECT), 1);
4345}
4346
40af64cc 4347/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
03c118d5 4348 We need to emit DTP-relative relocations. */
4349
40af64cc 4350static void s390_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
4351
4352static void
b40da9a7 4353s390_output_dwarf_dtprel (FILE *file, int size, rtx x)
03c118d5 4354{
4355 switch (size)
4356 {
4357 case 4:
4358 fputs ("\t.long\t", file);
4359 break;
4360 case 8:
4361 fputs ("\t.quad\t", file);
4362 break;
4363 default:
32eda510 4364 gcc_unreachable ();
03c118d5 4365 }
4366 output_addr_const (file, x);
4367 fputs ("@DTPOFF", file);
4368}
4369
4257b08a 4370#ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
eddcdde1 4371/* Implement TARGET_MANGLE_TYPE. */
4257b08a 4372
4373static const char *
eddcdde1 4374s390_mangle_type (tree type)
4257b08a 4375{
4376 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
4377 && TARGET_LONG_DOUBLE_128)
4378 return "g";
4379
4380 /* For all other types, use normal C++ mangling. */
4381 return NULL;
4382}
4383#endif
4384
e93986bb 4385/* In the name of slightly smaller debug output, and to cater to
06b27565 4386 general assembler lossage, recognize various UNSPEC sequences
e93986bb 4387 and turn them back into a direct symbol reference. */
4388
07576557 4389static rtx
b40da9a7 4390s390_delegitimize_address (rtx orig_x)
e93986bb 4391{
4392 rtx x = orig_x, y;
4393
4394 if (GET_CODE (x) != MEM)
4395 return orig_x;
4396
4397 x = XEXP (x, 0);
4398 if (GET_CODE (x) == PLUS
4399 && GET_CODE (XEXP (x, 1)) == CONST
4400 && GET_CODE (XEXP (x, 0)) == REG
4401 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
4402 {
4403 y = XEXP (XEXP (x, 1), 0);
4404 if (GET_CODE (y) == UNSPEC
12ef3745 4405 && XINT (y, 1) == UNSPEC_GOT)
e93986bb 4406 return XVECEXP (y, 0, 0);
4407 return orig_x;
4408 }
4409
4410 if (GET_CODE (x) == CONST)
4411 {
4412 y = XEXP (x, 0);
4413 if (GET_CODE (y) == UNSPEC
12ef3745 4414 && XINT (y, 1) == UNSPEC_GOTENT)
e93986bb 4415 return XVECEXP (y, 0, 0);
4416 return orig_x;
4417 }
4418
f81e845f 4419 return orig_x;
e93986bb 4420}
2eb8fe23 4421
805a133b 4422/* Output operand OP to stdio stream FILE.
4423 OP is an address (register + offset) which is not used to address data;
4424 instead the rightmost bits are interpreted as the value. */
63ebd742 4425
4426static void
4427print_shift_count_operand (FILE *file, rtx op)
4428{
6d6be381 4429 HOST_WIDE_INT offset;
4430 rtx base;
9a09ba70 4431
6d6be381 4432 /* Extract base register and offset. */
417cba42 4433 if (!s390_decompose_shift_count (op, &base, &offset))
6d6be381 4434 gcc_unreachable ();
63ebd742 4435
4436 /* Sanity check. */
6d6be381 4437 if (base)
32eda510 4438 {
6d6be381 4439 gcc_assert (GET_CODE (base) == REG);
4440 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
4441 gcc_assert (REGNO_REG_CLASS (REGNO (base)) == ADDR_REGS);
32eda510 4442 }
63ebd742 4443
805a133b 4444 /* Offsets are constricted to twelve bits. */
4445 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset & ((1 << 12) - 1));
6d6be381 4446 if (base)
4447 fprintf (file, "(%s)", reg_names[REGNO (base)]);
63ebd742 4448}
4449
875862bf 4450/* See 'get_some_local_dynamic_name'. */
be00aaa8 4451
4452static int
b40da9a7 4453get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
be00aaa8 4454{
4455 rtx x = *px;
4456
4457 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
4458 {
4459 x = get_pool_constant (x);
4460 return for_each_rtx (&x, get_some_local_dynamic_name_1, 0);
4461 }
4462
4463 if (GET_CODE (x) == SYMBOL_REF
4464 && tls_symbolic_operand (x) == TLS_MODEL_LOCAL_DYNAMIC)
4465 {
4466 cfun->machine->some_ld_name = XSTR (x, 0);
4467 return 1;
4468 }
4469
4470 return 0;
4471}
4472
875862bf 4473/* Locate some local-dynamic symbol still in use by this function
4474 so that we can print its name in local-dynamic base patterns. */
4475
4476static const char *
4477get_some_local_dynamic_name (void)
4478{
4479 rtx insn;
4480
4481 if (cfun->machine->some_ld_name)
4482 return cfun->machine->some_ld_name;
4483
4484 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
4485 if (INSN_P (insn)
4486 && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
4487 return cfun->machine->some_ld_name;
4488
32eda510 4489 gcc_unreachable ();
875862bf 4490}
4491
f588eb9f 4492/* Output machine-dependent UNSPECs occurring in address constant X
74d2529d 4493 in assembler syntax to stdio stream FILE. Returns true if the
4494 constant X could be recognized, false otherwise. */
4673c1a0 4495
74d2529d 4496bool
4497s390_output_addr_const_extra (FILE *file, rtx x)
4673c1a0 4498{
74d2529d 4499 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
4500 switch (XINT (x, 1))
4501 {
4502 case UNSPEC_GOTENT:
4503 output_addr_const (file, XVECEXP (x, 0, 0));
4504 fprintf (file, "@GOTENT");
4505 return true;
4506 case UNSPEC_GOT:
4507 output_addr_const (file, XVECEXP (x, 0, 0));
4508 fprintf (file, "@GOT");
4509 return true;
4510 case UNSPEC_GOTOFF:
4511 output_addr_const (file, XVECEXP (x, 0, 0));
4512 fprintf (file, "@GOTOFF");
4513 return true;
4514 case UNSPEC_PLT:
4515 output_addr_const (file, XVECEXP (x, 0, 0));
4516 fprintf (file, "@PLT");
4517 return true;
4518 case UNSPEC_PLTOFF:
4519 output_addr_const (file, XVECEXP (x, 0, 0));
4520 fprintf (file, "@PLTOFF");
4521 return true;
4522 case UNSPEC_TLSGD:
4523 output_addr_const (file, XVECEXP (x, 0, 0));
4524 fprintf (file, "@TLSGD");
4525 return true;
4526 case UNSPEC_TLSLDM:
4527 assemble_name (file, get_some_local_dynamic_name ());
4528 fprintf (file, "@TLSLDM");
4529 return true;
4530 case UNSPEC_DTPOFF:
4531 output_addr_const (file, XVECEXP (x, 0, 0));
4532 fprintf (file, "@DTPOFF");
4533 return true;
4534 case UNSPEC_NTPOFF:
4535 output_addr_const (file, XVECEXP (x, 0, 0));
4536 fprintf (file, "@NTPOFF");
4537 return true;
4538 case UNSPEC_GOTNTPOFF:
4539 output_addr_const (file, XVECEXP (x, 0, 0));
4540 fprintf (file, "@GOTNTPOFF");
4541 return true;
4542 case UNSPEC_INDNTPOFF:
4543 output_addr_const (file, XVECEXP (x, 0, 0));
4544 fprintf (file, "@INDNTPOFF");
4545 return true;
4546 }
4673c1a0 4547
74d2529d 4548 return false;
4673c1a0 4549}
4550
f81e845f 4551/* Output address operand ADDR in assembler syntax to
56769981 4552 stdio stream FILE. */
4673c1a0 4553
4554void
b40da9a7 4555print_operand_address (FILE *file, rtx addr)
4673c1a0 4556{
4557 struct s390_address ad;
4558
8ba34dcd 4559 if (!s390_decompose_address (addr, &ad)
1e280623 4560 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
4561 || (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx))))
3284a242 4562 output_operand_lossage ("cannot decompose address");
f81e845f 4563
4673c1a0 4564 if (ad.disp)
74d2529d 4565 output_addr_const (file, ad.disp);
4673c1a0 4566 else
4567 fprintf (file, "0");
4568
4569 if (ad.base && ad.indx)
4570 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
4571 reg_names[REGNO (ad.base)]);
4572 else if (ad.base)
4573 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
4574}
4575
f81e845f 4576/* Output operand X in assembler syntax to stdio stream FILE.
4577 CODE specified the format flag. The following format flags
56769981 4578 are recognized:
4579
4580 'C': print opcode suffix for branch condition.
4581 'D': print opcode suffix for inverse branch condition.
be00aaa8 4582 'J': print tls_load/tls_gdcall/tls_ldcall suffix
cc87d0c5 4583 'G': print the size of the operand in bytes.
56769981 4584 'O': print only the displacement of a memory reference.
4585 'R': print only the base register of a memory reference.
0574acbe 4586 'S': print S-type memory reference (base+displacement).
56769981 4587 'N': print the second word of a DImode operand.
4588 'M': print the second word of a TImode operand.
63ebd742 4589 'Y': print shift count operand.
56769981 4590
45981c0a 4591 'b': print integer X as if it's an unsigned byte.
b9059d39 4592 'x': print integer X as if it's an unsigned halfword.
4593 'h': print integer X as if it's a signed halfword.
64a1078f 4594 'i': print the first nonzero HImode part of X.
b9059d39 4595 'j': print the first HImode part unequal to -1 of X.
4596 'k': print the first nonzero SImode part of X.
4597 'm': print the first SImode part unequal to -1 of X.
4598 'o': print integer X as if it's an unsigned 32bit word. */
4673c1a0 4599
4600void
b40da9a7 4601print_operand (FILE *file, rtx x, int code)
4673c1a0 4602{
4603 switch (code)
4604 {
4605 case 'C':
2eb8fe23 4606 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
4673c1a0 4607 return;
4608
4609 case 'D':
2eb8fe23 4610 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
4673c1a0 4611 return;
4612
be00aaa8 4613 case 'J':
4614 if (GET_CODE (x) == SYMBOL_REF)
4615 {
4616 fprintf (file, "%s", ":tls_load:");
4617 output_addr_const (file, x);
4618 }
4619 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD)
4620 {
4621 fprintf (file, "%s", ":tls_gdcall:");
4622 output_addr_const (file, XVECEXP (x, 0, 0));
4623 }
4624 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM)
4625 {
4626 fprintf (file, "%s", ":tls_ldcall:");
4627 assemble_name (file, get_some_local_dynamic_name ());
4628 }
4629 else
32eda510 4630 gcc_unreachable ();
be00aaa8 4631 return;
4632
cc87d0c5 4633 case 'G':
4634 fprintf (file, "%u", GET_MODE_SIZE (GET_MODE (x)));
4635 return;
4636
4673c1a0 4637 case 'O':
4638 {
4639 struct s390_address ad;
32eda510 4640 int ret;
4673c1a0 4641
32eda510 4642 gcc_assert (GET_CODE (x) == MEM);
4643 ret = s390_decompose_address (XEXP (x, 0), &ad);
4644 gcc_assert (ret);
1e280623 4645 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
32eda510 4646 gcc_assert (!ad.indx);
4673c1a0 4647
4648 if (ad.disp)
74d2529d 4649 output_addr_const (file, ad.disp);
4673c1a0 4650 else
4651 fprintf (file, "0");
4652 }
4653 return;
4654
4655 case 'R':
4656 {
4657 struct s390_address ad;
32eda510 4658 int ret;
4673c1a0 4659
32eda510 4660 gcc_assert (GET_CODE (x) == MEM);
4661 ret = s390_decompose_address (XEXP (x, 0), &ad);
4662 gcc_assert (ret);
1e280623 4663 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
32eda510 4664 gcc_assert (!ad.indx);
4673c1a0 4665
4666 if (ad.base)
4667 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
4668 else
4669 fprintf (file, "0");
4670 }
4671 return;
4672
0574acbe 4673 case 'S':
4674 {
4675 struct s390_address ad;
32eda510 4676 int ret;
0574acbe 4677
32eda510 4678 gcc_assert (GET_CODE (x) == MEM);
4679 ret = s390_decompose_address (XEXP (x, 0), &ad);
4680 gcc_assert (ret);
1e280623 4681 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
32eda510 4682 gcc_assert (!ad.indx);
0574acbe 4683
4684 if (ad.disp)
4685 output_addr_const (file, ad.disp);
4686 else
4687 fprintf (file, "0");
4688
4689 if (ad.base)
4690 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
4691 }
4692 return;
4693
4673c1a0 4694 case 'N':
4695 if (GET_CODE (x) == REG)
4696 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
4697 else if (GET_CODE (x) == MEM)
4698 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
4699 else
32eda510 4700 gcc_unreachable ();
4673c1a0 4701 break;
4702
4703 case 'M':
4704 if (GET_CODE (x) == REG)
4705 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
4706 else if (GET_CODE (x) == MEM)
4707 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
4708 else
32eda510 4709 gcc_unreachable ();
4673c1a0 4710 break;
63ebd742 4711
4712 case 'Y':
4713 print_shift_count_operand (file, x);
4714 return;
4673c1a0 4715 }
4716
4717 switch (GET_CODE (x))
4718 {
4719 case REG:
4720 fprintf (file, "%s", reg_names[REGNO (x)]);
4721 break;
4722
4723 case MEM:
4724 output_address (XEXP (x, 0));
4725 break;
4726
4727 case CONST:
4728 case CODE_LABEL:
4729 case LABEL_REF:
4730 case SYMBOL_REF:
74d2529d 4731 output_addr_const (file, x);
4673c1a0 4732 break;
4733
4734 case CONST_INT:
4735 if (code == 'b')
8b4a4127 4736 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
4737 else if (code == 'x')
4738 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
4739 else if (code == 'h')
4740 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
64a1078f 4741 else if (code == 'i')
f588eb9f 4742 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
64a1078f 4743 s390_extract_part (x, HImode, 0));
4744 else if (code == 'j')
f588eb9f 4745 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4746 s390_extract_part (x, HImode, -1));
163277cf 4747 else if (code == 'k')
4748 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4749 s390_extract_part (x, SImode, 0));
4750 else if (code == 'm')
4751 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4752 s390_extract_part (x, SImode, -1));
4753 else if (code == 'o')
4754 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffffffff);
8b4a4127 4755 else
4756 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
4757 break;
4758
4759 case CONST_DOUBLE:
32eda510 4760 gcc_assert (GET_MODE (x) == VOIDmode);
8b4a4127 4761 if (code == 'b')
4762 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
4673c1a0 4763 else if (code == 'x')
8b4a4127 4764 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
4673c1a0 4765 else if (code == 'h')
8b4a4127 4766 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
4673c1a0 4767 else
32eda510 4768 gcc_unreachable ();
4673c1a0 4769 break;
4770
4771 default:
4772 fatal_insn ("UNKNOWN in print_operand !?", x);
4773 break;
4774 }
4775}
4776
58356836 4777/* Target hook for assembling integer objects. We need to define it
4778 here to work a round a bug in some versions of GAS, which couldn't
4779 handle values smaller than INT_MIN when printed in decimal. */
4780
4781static bool
b40da9a7 4782s390_assemble_integer (rtx x, unsigned int size, int aligned_p)
58356836 4783{
4784 if (size == 8 && aligned_p
4785 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
4786 {
4840a03a 4787 fprintf (asm_out_file, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX "\n",
4788 INTVAL (x));
58356836 4789 return true;
4790 }
4791 return default_assemble_integer (x, size, aligned_p);
4792}
4793
f81e845f 4794/* Returns true if register REGNO is used for forming
56769981 4795 a memory address in expression X. */
4673c1a0 4796
e5537457 4797static bool
b40da9a7 4798reg_used_in_mem_p (int regno, rtx x)
4673c1a0 4799{
4800 enum rtx_code code = GET_CODE (x);
4801 int i, j;
4802 const char *fmt;
f81e845f 4803
4673c1a0 4804 if (code == MEM)
4805 {
4806 if (refers_to_regno_p (regno, regno+1,
4807 XEXP (x, 0), 0))
e5537457 4808 return true;
4673c1a0 4809 }
f81e845f 4810 else if (code == SET
8b4a4127 4811 && GET_CODE (SET_DEST (x)) == PC)
4812 {
4813 if (refers_to_regno_p (regno, regno+1,
4814 SET_SRC (x), 0))
e5537457 4815 return true;
8b4a4127 4816 }
4673c1a0 4817
4818 fmt = GET_RTX_FORMAT (code);
4819 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4820 {
4821 if (fmt[i] == 'e'
4822 && reg_used_in_mem_p (regno, XEXP (x, i)))
e5537457 4823 return true;
f81e845f 4824
4673c1a0 4825 else if (fmt[i] == 'E')
4826 for (j = 0; j < XVECLEN (x, i); j++)
4827 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
e5537457 4828 return true;
4673c1a0 4829 }
e5537457 4830 return false;
4673c1a0 4831}
4832
0c034860 4833/* Returns true if expression DEP_RTX sets an address register
56769981 4834 used by instruction INSN to address memory. */
4673c1a0 4835
e5537457 4836static bool
b40da9a7 4837addr_generation_dependency_p (rtx dep_rtx, rtx insn)
4673c1a0 4838{
8b4a4127 4839 rtx target, pat;
4673c1a0 4840
71343e6b 4841 if (GET_CODE (dep_rtx) == INSN)
4842 dep_rtx = PATTERN (dep_rtx);
4843
4673c1a0 4844 if (GET_CODE (dep_rtx) == SET)
4845 {
4846 target = SET_DEST (dep_rtx);
147b6a2d 4847 if (GET_CODE (target) == STRICT_LOW_PART)
4848 target = XEXP (target, 0);
4849 while (GET_CODE (target) == SUBREG)
4850 target = SUBREG_REG (target);
4851
4673c1a0 4852 if (GET_CODE (target) == REG)
4853 {
4854 int regno = REGNO (target);
4855
71343e6b 4856 if (s390_safe_attr_type (insn) == TYPE_LA)
8b4a4127 4857 {
4858 pat = PATTERN (insn);
4859 if (GET_CODE (pat) == PARALLEL)
4860 {
32eda510 4861 gcc_assert (XVECLEN (pat, 0) == 2);
8b4a4127 4862 pat = XVECEXP (pat, 0, 0);
4863 }
32eda510 4864 gcc_assert (GET_CODE (pat) == SET);
4865 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
8b4a4127 4866 }
71343e6b 4867 else if (get_attr_atype (insn) == ATYPE_AGEN)
8b4a4127 4868 return reg_used_in_mem_p (regno, PATTERN (insn));
4869 }
4673c1a0 4870 }
e5537457 4871 return false;
4673c1a0 4872}
4873
71343e6b 4874/* Return 1, if dep_insn sets register used in insn in the agen unit. */
4875
f81e845f 4876int
b40da9a7 4877s390_agen_dep_p (rtx dep_insn, rtx insn)
f81e845f 4878{
71343e6b 4879 rtx dep_rtx = PATTERN (dep_insn);
4880 int i;
f81e845f 4881
4882 if (GET_CODE (dep_rtx) == SET
71343e6b 4883 && addr_generation_dependency_p (dep_rtx, insn))
4884 return 1;
4885 else if (GET_CODE (dep_rtx) == PARALLEL)
4886 {
4887 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
4888 {
4889 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
4890 return 1;
4891 }
4892 }
4893 return 0;
4894}
4895
e51ae8ff 4896/* A C statement (sans semicolon) to update the integer scheduling priority
4897 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
4898 reduce the priority to execute INSN later. Do not define this macro if
f81e845f 4899 you do not need to adjust the scheduling priorities of insns.
e51ae8ff 4900
f81e845f 4901 A STD instruction should be scheduled earlier,
e51ae8ff 4902 in order to use the bypass. */
4903
4904static int
b40da9a7 4905s390_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
e51ae8ff 4906{
4907 if (! INSN_P (insn))
4908 return priority;
4909
163277cf 4910 if (s390_tune != PROCESSOR_2084_Z990
4911 && s390_tune != PROCESSOR_2094_Z9_109)
e51ae8ff 4912 return priority;
4913
4914 switch (s390_safe_attr_type (insn))
4915 {
11f88fec 4916 case TYPE_FSTOREDF:
4917 case TYPE_FSTORESF:
e51ae8ff 4918 priority = priority << 3;
4919 break;
4920 case TYPE_STORE:
76dbb8df 4921 case TYPE_STM:
e51ae8ff 4922 priority = priority << 1;
4923 break;
4924 default:
4925 break;
4926 }
4927 return priority;
4928}
369293ed 4929
71343e6b 4930/* The number of instructions that can be issued per cycle. */
369293ed 4931
71343e6b 4932static int
b40da9a7 4933s390_issue_rate (void)
71343e6b 4934{
163277cf 4935 if (s390_tune == PROCESSOR_2084_Z990
4936 || s390_tune == PROCESSOR_2094_Z9_109)
e51ae8ff 4937 return 3;
71343e6b 4938 return 1;
4939}
369293ed 4940
e51ae8ff 4941static int
b40da9a7 4942s390_first_cycle_multipass_dfa_lookahead (void)
e51ae8ff 4943{
a65ea517 4944 return 4;
e51ae8ff 4945}
4946
e51ae8ff 4947
20074f87 4948/* Annotate every literal pool reference in X by an UNSPEC_LTREF expression.
4949 Fix up MEMs as required. */
4950
4951static void
4952annotate_constant_pool_refs (rtx *x)
4953{
4954 int i, j;
4955 const char *fmt;
4956
32eda510 4957 gcc_assert (GET_CODE (*x) != SYMBOL_REF
4958 || !CONSTANT_POOL_ADDRESS_P (*x));
20074f87 4959
4960 /* Literal pool references can only occur inside a MEM ... */
4961 if (GET_CODE (*x) == MEM)
4962 {
4963 rtx memref = XEXP (*x, 0);
4964
4965 if (GET_CODE (memref) == SYMBOL_REF
4966 && CONSTANT_POOL_ADDRESS_P (memref))
4967 {
4968 rtx base = cfun->machine->base_reg;
4969 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, memref, base),
4970 UNSPEC_LTREF);
4971
4972 *x = replace_equiv_address (*x, addr);
4973 return;
4974 }
4975
4976 if (GET_CODE (memref) == CONST
4977 && GET_CODE (XEXP (memref, 0)) == PLUS
4978 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
4979 && GET_CODE (XEXP (XEXP (memref, 0), 0)) == SYMBOL_REF
4980 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (memref, 0), 0)))
4981 {
4982 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
4983 rtx sym = XEXP (XEXP (memref, 0), 0);
4984 rtx base = cfun->machine->base_reg;
4985 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
4986 UNSPEC_LTREF);
4987
4988 *x = replace_equiv_address (*x, plus_constant (addr, off));
4989 return;
4990 }
4991 }
4992
4993 /* ... or a load-address type pattern. */
4994 if (GET_CODE (*x) == SET)
4995 {
4996 rtx addrref = SET_SRC (*x);
4997
4998 if (GET_CODE (addrref) == SYMBOL_REF
4999 && CONSTANT_POOL_ADDRESS_P (addrref))
5000 {
5001 rtx base = cfun->machine->base_reg;
5002 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, addrref, base),
5003 UNSPEC_LTREF);
5004
5005 SET_SRC (*x) = addr;
5006 return;
5007 }
5008
5009 if (GET_CODE (addrref) == CONST
5010 && GET_CODE (XEXP (addrref, 0)) == PLUS
5011 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
5012 && GET_CODE (XEXP (XEXP (addrref, 0), 0)) == SYMBOL_REF
5013 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (addrref, 0), 0)))
5014 {
5015 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
5016 rtx sym = XEXP (XEXP (addrref, 0), 0);
5017 rtx base = cfun->machine->base_reg;
5018 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
5019 UNSPEC_LTREF);
5020
5021 SET_SRC (*x) = plus_constant (addr, off);
5022 return;
5023 }
5024 }
5025
5026 /* Annotate LTREL_BASE as well. */
5027 if (GET_CODE (*x) == UNSPEC
5028 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
5029 {
5030 rtx base = cfun->machine->base_reg;
5031 *x = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XVECEXP (*x, 0, 0), base),
5032 UNSPEC_LTREL_BASE);
5033 return;
5034 }
5035
5036 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5037 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5038 {
5039 if (fmt[i] == 'e')
5040 {
5041 annotate_constant_pool_refs (&XEXP (*x, i));
5042 }
5043 else if (fmt[i] == 'E')
5044 {
5045 for (j = 0; j < XVECLEN (*x, i); j++)
5046 annotate_constant_pool_refs (&XVECEXP (*x, i, j));
5047 }
5048 }
5049}
5050
875862bf 5051/* Split all branches that exceed the maximum distance.
5052 Returns true if this created a new literal pool entry. */
5053
5054static int
5055s390_split_branches (void)
5056{
5057 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
32eda510 5058 int new_literal = 0, ret;
875862bf 5059 rtx insn, pat, tmp, target;
5060 rtx *label;
5061
5062 /* We need correct insn addresses. */
5063
5064 shorten_branches (get_insns ());
5065
5066 /* Find all branches that exceed 64KB, and split them. */
5067
5068 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5069 {
5070 if (GET_CODE (insn) != JUMP_INSN)
5071 continue;
5072
5073 pat = PATTERN (insn);
5074 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
5075 pat = XVECEXP (pat, 0, 0);
5076 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
5077 continue;
5078
5079 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
5080 {
5081 label = &SET_SRC (pat);
5082 }
5083 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
5084 {
5085 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
5086 label = &XEXP (SET_SRC (pat), 1);
5087 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
5088 label = &XEXP (SET_SRC (pat), 2);
5089 else
5090 continue;
5091 }
5092 else
5093 continue;
5094
5095 if (get_attr_length (insn) <= 4)
5096 continue;
5097
77beec48 5098 /* We are going to use the return register as scratch register,
5099 make sure it will be saved/restored by the prologue/epilogue. */
5100 cfun_frame_layout.save_return_addr_p = 1;
5101
875862bf 5102 if (!flag_pic)
5103 {
5104 new_literal = 1;
5105 tmp = force_const_mem (Pmode, *label);
5106 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
5107 INSN_ADDRESSES_NEW (tmp, -1);
5108 annotate_constant_pool_refs (&PATTERN (tmp));
5109
5110 target = temp_reg;
5111 }
5112 else
5113 {
5114 new_literal = 1;
5115 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, *label),
5116 UNSPEC_LTREL_OFFSET);
5117 target = gen_rtx_CONST (Pmode, target);
5118 target = force_const_mem (Pmode, target);
5119 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, target), insn);
5120 INSN_ADDRESSES_NEW (tmp, -1);
5121 annotate_constant_pool_refs (&PATTERN (tmp));
5122
5123 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XEXP (target, 0),
5124 cfun->machine->base_reg),
5125 UNSPEC_LTREL_BASE);
5126 target = gen_rtx_PLUS (Pmode, temp_reg, target);
5127 }
5128
32eda510 5129 ret = validate_change (insn, label, target, 0);
5130 gcc_assert (ret);
875862bf 5131 }
5132
5133 return new_literal;
5134}
5135
0756cebb 5136
20074f87 5137/* Find an annotated literal pool symbol referenced in RTX X,
5138 and store it at REF. Will abort if X contains references to
5139 more than one such pool symbol; multiple references to the same
5140 symbol are allowed, however.
0756cebb 5141
f81e845f 5142 The rtx pointed to by REF must be initialized to NULL_RTX
0756cebb 5143 by the caller before calling this routine. */
5144
5145static void
b40da9a7 5146find_constant_pool_ref (rtx x, rtx *ref)
0756cebb 5147{
5148 int i, j;
5149 const char *fmt;
5150
12ef3745 5151 /* Ignore LTREL_BASE references. */
5152 if (GET_CODE (x) == UNSPEC
5153 && XINT (x, 1) == UNSPEC_LTREL_BASE)
5154 return;
c2c1332a 5155 /* Likewise POOL_ENTRY insns. */
5156 if (GET_CODE (x) == UNSPEC_VOLATILE
5157 && XINT (x, 1) == UNSPECV_POOL_ENTRY)
5158 return;
12ef3745 5159
32eda510 5160 gcc_assert (GET_CODE (x) != SYMBOL_REF
5161 || !CONSTANT_POOL_ADDRESS_P (x));
20074f87 5162
5163 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_LTREF)
0756cebb 5164 {
20074f87 5165 rtx sym = XVECEXP (x, 0, 0);
32eda510 5166 gcc_assert (GET_CODE (sym) == SYMBOL_REF
5167 && CONSTANT_POOL_ADDRESS_P (sym));
20074f87 5168
0756cebb 5169 if (*ref == NULL_RTX)
20074f87 5170 *ref = sym;
32eda510 5171 else
5172 gcc_assert (*ref == sym);
20074f87 5173
5174 return;
0756cebb 5175 }
5176
5177 fmt = GET_RTX_FORMAT (GET_CODE (x));
5178 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5179 {
5180 if (fmt[i] == 'e')
5181 {
5182 find_constant_pool_ref (XEXP (x, i), ref);
5183 }
5184 else if (fmt[i] == 'E')
5185 {
5186 for (j = 0; j < XVECLEN (x, i); j++)
5187 find_constant_pool_ref (XVECEXP (x, i, j), ref);
5188 }
5189 }
5190}
5191
20074f87 5192/* Replace every reference to the annotated literal pool
5193 symbol REF in X by its base plus OFFSET. */
0756cebb 5194
5195static void
20074f87 5196replace_constant_pool_ref (rtx *x, rtx ref, rtx offset)
0756cebb 5197{
5198 int i, j;
5199 const char *fmt;
5200
32eda510 5201 gcc_assert (*x != ref);
0756cebb 5202
20074f87 5203 if (GET_CODE (*x) == UNSPEC
5204 && XINT (*x, 1) == UNSPEC_LTREF
5205 && XVECEXP (*x, 0, 0) == ref)
0756cebb 5206 {
20074f87 5207 *x = gen_rtx_PLUS (Pmode, XVECEXP (*x, 0, 1), offset);
5208 return;
0756cebb 5209 }
5210
20074f87 5211 if (GET_CODE (*x) == PLUS
5212 && GET_CODE (XEXP (*x, 1)) == CONST_INT
5213 && GET_CODE (XEXP (*x, 0)) == UNSPEC
5214 && XINT (XEXP (*x, 0), 1) == UNSPEC_LTREF
5215 && XVECEXP (XEXP (*x, 0), 0, 0) == ref)
0756cebb 5216 {
20074f87 5217 rtx addr = gen_rtx_PLUS (Pmode, XVECEXP (XEXP (*x, 0), 0, 1), offset);
5218 *x = plus_constant (addr, INTVAL (XEXP (*x, 1)));
5219 return;
0756cebb 5220 }
5221
5222 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5223 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5224 {
5225 if (fmt[i] == 'e')
5226 {
20074f87 5227 replace_constant_pool_ref (&XEXP (*x, i), ref, offset);
0756cebb 5228 }
5229 else if (fmt[i] == 'E')
5230 {
5231 for (j = 0; j < XVECLEN (*x, i); j++)
20074f87 5232 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, offset);
0756cebb 5233 }
5234 }
5235}
5236
f81e845f 5237/* Check whether X contains an UNSPEC_LTREL_BASE.
12ef3745 5238 Return its constant pool symbol if found, NULL_RTX otherwise. */
96be3ab6 5239
12ef3745 5240static rtx
b40da9a7 5241find_ltrel_base (rtx x)
96be3ab6 5242{
96be3ab6 5243 int i, j;
5244 const char *fmt;
5245
12ef3745 5246 if (GET_CODE (x) == UNSPEC
5247 && XINT (x, 1) == UNSPEC_LTREL_BASE)
5248 return XVECEXP (x, 0, 0);
96be3ab6 5249
5250 fmt = GET_RTX_FORMAT (GET_CODE (x));
5251 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5252 {
5253 if (fmt[i] == 'e')
5254 {
12ef3745 5255 rtx fnd = find_ltrel_base (XEXP (x, i));
5256 if (fnd)
5257 return fnd;
96be3ab6 5258 }
5259 else if (fmt[i] == 'E')
5260 {
5261 for (j = 0; j < XVECLEN (x, i); j++)
12ef3745 5262 {
5263 rtx fnd = find_ltrel_base (XVECEXP (x, i, j));
5264 if (fnd)
5265 return fnd;
5266 }
96be3ab6 5267 }
5268 }
5269
12ef3745 5270 return NULL_RTX;
96be3ab6 5271}
5272
20074f87 5273/* Replace any occurrence of UNSPEC_LTREL_BASE in X with its base. */
96be3ab6 5274
5275static void
20074f87 5276replace_ltrel_base (rtx *x)
96be3ab6 5277{
12ef3745 5278 int i, j;
96be3ab6 5279 const char *fmt;
5280
12ef3745 5281 if (GET_CODE (*x) == UNSPEC
5282 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
96be3ab6 5283 {
20074f87 5284 *x = XVECEXP (*x, 0, 1);
12ef3745 5285 return;
96be3ab6 5286 }
5287
5288 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5289 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5290 {
5291 if (fmt[i] == 'e')
5292 {
20074f87 5293 replace_ltrel_base (&XEXP (*x, i));
96be3ab6 5294 }
5295 else if (fmt[i] == 'E')
5296 {
5297 for (j = 0; j < XVECLEN (*x, i); j++)
20074f87 5298 replace_ltrel_base (&XVECEXP (*x, i, j));
96be3ab6 5299 }
5300 }
5301}
5302
5303
12ef3745 5304/* We keep a list of constants which we have to add to internal
0756cebb 5305 constant tables in the middle of large functions. */
5306
36868490 5307#define NR_C_MODES 11
f81e845f 5308enum machine_mode constant_modes[NR_C_MODES] =
0756cebb 5309{
36868490 5310 TFmode, TImode, TDmode,
5311 DFmode, DImode, DDmode,
5312 SFmode, SImode, SDmode,
0756cebb 5313 HImode,
5314 QImode
5315};
5316
0756cebb 5317struct constant
5318{
5319 struct constant *next;
5320 rtx value;
5321 rtx label;
5322};
5323
5324struct constant_pool
5325{
5326 struct constant_pool *next;
5327 rtx first_insn;
96be3ab6 5328 rtx pool_insn;
5329 bitmap insns;
0756cebb 5330
5331 struct constant *constants[NR_C_MODES];
d345b493 5332 struct constant *execute;
0756cebb 5333 rtx label;
5334 int size;
5335};
5336
875862bf 5337/* Allocate new constant_pool structure. */
5338
5339static struct constant_pool *
5340s390_alloc_pool (void)
5341{
5342 struct constant_pool *pool;
5343 int i;
5344
5345 pool = (struct constant_pool *) xmalloc (sizeof *pool);
5346 pool->next = NULL;
5347 for (i = 0; i < NR_C_MODES; i++)
5348 pool->constants[i] = NULL;
5349
5350 pool->execute = NULL;
5351 pool->label = gen_label_rtx ();
5352 pool->first_insn = NULL_RTX;
5353 pool->pool_insn = NULL_RTX;
5354 pool->insns = BITMAP_ALLOC (NULL);
5355 pool->size = 0;
5356
5357 return pool;
5358}
0756cebb 5359
5360/* Create new constant pool covering instructions starting at INSN
5361 and chain it to the end of POOL_LIST. */
5362
5363static struct constant_pool *
b40da9a7 5364s390_start_pool (struct constant_pool **pool_list, rtx insn)
0756cebb 5365{
5366 struct constant_pool *pool, **prev;
0756cebb 5367
c2c1332a 5368 pool = s390_alloc_pool ();
0756cebb 5369 pool->first_insn = insn;
96be3ab6 5370
0756cebb 5371 for (prev = pool_list; *prev; prev = &(*prev)->next)
5372 ;
5373 *prev = pool;
5374
5375 return pool;
5376}
5377
96be3ab6 5378/* End range of instructions covered by POOL at INSN and emit
5379 placeholder insn representing the pool. */
0756cebb 5380
5381static void
b40da9a7 5382s390_end_pool (struct constant_pool *pool, rtx insn)
0756cebb 5383{
96be3ab6 5384 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
5385
5386 if (!insn)
5387 insn = get_last_insn ();
5388
5389 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
5390 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5391}
5392
5393/* Add INSN to the list of insns covered by POOL. */
5394
5395static void
b40da9a7 5396s390_add_pool_insn (struct constant_pool *pool, rtx insn)
96be3ab6 5397{
5398 bitmap_set_bit (pool->insns, INSN_UID (insn));
0756cebb 5399}
5400
5401/* Return pool out of POOL_LIST that covers INSN. */
5402
5403static struct constant_pool *
b40da9a7 5404s390_find_pool (struct constant_pool *pool_list, rtx insn)
0756cebb 5405{
0756cebb 5406 struct constant_pool *pool;
5407
0756cebb 5408 for (pool = pool_list; pool; pool = pool->next)
96be3ab6 5409 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
0756cebb 5410 break;
5411
5412 return pool;
5413}
5414
96be3ab6 5415/* Add constant VAL of mode MODE to the constant pool POOL. */
0756cebb 5416
96be3ab6 5417static void
b40da9a7 5418s390_add_constant (struct constant_pool *pool, rtx val, enum machine_mode mode)
0756cebb 5419{
5420 struct constant *c;
0756cebb 5421 int i;
5422
5423 for (i = 0; i < NR_C_MODES; i++)
5424 if (constant_modes[i] == mode)
5425 break;
32eda510 5426 gcc_assert (i != NR_C_MODES);
0756cebb 5427
5428 for (c = pool->constants[i]; c != NULL; c = c->next)
5429 if (rtx_equal_p (val, c->value))
5430 break;
5431
5432 if (c == NULL)
5433 {
5434 c = (struct constant *) xmalloc (sizeof *c);
5435 c->value = val;
5436 c->label = gen_label_rtx ();
5437 c->next = pool->constants[i];
5438 pool->constants[i] = c;
5439 pool->size += GET_MODE_SIZE (mode);
5440 }
96be3ab6 5441}
0756cebb 5442
96be3ab6 5443/* Find constant VAL of mode MODE in the constant pool POOL.
5444 Return an RTX describing the distance from the start of
5445 the pool to the location of the new constant. */
f81e845f 5446
96be3ab6 5447static rtx
b40da9a7 5448s390_find_constant (struct constant_pool *pool, rtx val,
5449 enum machine_mode mode)
96be3ab6 5450{
5451 struct constant *c;
5452 rtx offset;
5453 int i;
f81e845f 5454
96be3ab6 5455 for (i = 0; i < NR_C_MODES; i++)
5456 if (constant_modes[i] == mode)
5457 break;
32eda510 5458 gcc_assert (i != NR_C_MODES);
f81e845f 5459
96be3ab6 5460 for (c = pool->constants[i]; c != NULL; c = c->next)
5461 if (rtx_equal_p (val, c->value))
5462 break;
f81e845f 5463
32eda510 5464 gcc_assert (c);
f81e845f 5465
96be3ab6 5466 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
5467 gen_rtx_LABEL_REF (Pmode, pool->label));
0756cebb 5468 offset = gen_rtx_CONST (Pmode, offset);
5469 return offset;
5470}
5471
875862bf 5472/* Check whether INSN is an execute. Return the label_ref to its
5473 execute target template if so, NULL_RTX otherwise. */
5474
5475static rtx
5476s390_execute_label (rtx insn)
5477{
5478 if (GET_CODE (insn) == INSN
5479 && GET_CODE (PATTERN (insn)) == PARALLEL
5480 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == UNSPEC
5481 && XINT (XVECEXP (PATTERN (insn), 0, 0), 1) == UNSPEC_EXECUTE)
5482 return XVECEXP (XVECEXP (PATTERN (insn), 0, 0), 0, 2);
5483
5484 return NULL_RTX;
5485}
5486
d345b493 5487/* Add execute target for INSN to the constant pool POOL. */
5488
5489static void
5490s390_add_execute (struct constant_pool *pool, rtx insn)
5491{
5492 struct constant *c;
5493
5494 for (c = pool->execute; c != NULL; c = c->next)
5495 if (INSN_UID (insn) == INSN_UID (c->value))
5496 break;
5497
5498 if (c == NULL)
5499 {
d345b493 5500 c = (struct constant *) xmalloc (sizeof *c);
5501 c->value = insn;
babfdedf 5502 c->label = gen_label_rtx ();
d345b493 5503 c->next = pool->execute;
5504 pool->execute = c;
babfdedf 5505 pool->size += 6;
d345b493 5506 }
5507}
5508
5509/* Find execute target for INSN in the constant pool POOL.
5510 Return an RTX describing the distance from the start of
5511 the pool to the location of the execute target. */
5512
5513static rtx
5514s390_find_execute (struct constant_pool *pool, rtx insn)
5515{
5516 struct constant *c;
5517 rtx offset;
5518
5519 for (c = pool->execute; c != NULL; c = c->next)
5520 if (INSN_UID (insn) == INSN_UID (c->value))
5521 break;
5522
32eda510 5523 gcc_assert (c);
d345b493 5524
5525 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
5526 gen_rtx_LABEL_REF (Pmode, pool->label));
5527 offset = gen_rtx_CONST (Pmode, offset);
5528 return offset;
5529}
5530
875862bf 5531/* For an execute INSN, extract the execute target template. */
d345b493 5532
5533static rtx
875862bf 5534s390_execute_target (rtx insn)
d345b493 5535{
875862bf 5536 rtx pattern = PATTERN (insn);
5537 gcc_assert (s390_execute_label (insn));
d345b493 5538
5539 if (XVECLEN (pattern, 0) == 2)
5540 {
5541 pattern = copy_rtx (XVECEXP (pattern, 0, 1));
5542 }
5543 else
5544 {
5545 rtvec vec = rtvec_alloc (XVECLEN (pattern, 0) - 1);
5546 int i;
5547
5548 for (i = 0; i < XVECLEN (pattern, 0) - 1; i++)
5549 RTVEC_ELT (vec, i) = copy_rtx (XVECEXP (pattern, 0, i + 1));
5550
5551 pattern = gen_rtx_PARALLEL (VOIDmode, vec);
5552 }
5553
5554 return pattern;
5555}
5556
5557/* Indicate that INSN cannot be duplicated. This is the case for
5558 execute insns that carry a unique label. */
5559
5560static bool
5561s390_cannot_copy_insn_p (rtx insn)
5562{
5563 rtx label = s390_execute_label (insn);
5564 return label && label != const0_rtx;
5565}
5566
c2c1332a 5567/* Dump out the constants in POOL. If REMOTE_LABEL is true,
5568 do not emit the pool base label. */
0756cebb 5569
d345b493 5570static void
c2c1332a 5571s390_dump_pool (struct constant_pool *pool, bool remote_label)
0756cebb 5572{
5573 struct constant *c;
d345b493 5574 rtx insn = pool->pool_insn;
0756cebb 5575 int i;
5576
d345b493 5577 /* Switch to rodata section. */
5578 if (TARGET_CPU_ZARCH)
5579 {
5580 insn = emit_insn_after (gen_pool_section_start (), insn);
5581 INSN_ADDRESSES_NEW (insn, -1);
5582 }
5583
5584 /* Ensure minimum pool alignment. */
dafc8d45 5585 if (TARGET_CPU_ZARCH)
d345b493 5586 insn = emit_insn_after (gen_pool_align (GEN_INT (8)), insn);
0756cebb 5587 else
d345b493 5588 insn = emit_insn_after (gen_pool_align (GEN_INT (4)), insn);
0756cebb 5589 INSN_ADDRESSES_NEW (insn, -1);
5590
d345b493 5591 /* Emit pool base label. */
c2c1332a 5592 if (!remote_label)
5593 {
5594 insn = emit_label_after (pool->label, insn);
5595 INSN_ADDRESSES_NEW (insn, -1);
5596 }
0756cebb 5597
5598 /* Dump constants in descending alignment requirement order,
5599 ensuring proper alignment for every constant. */
5600 for (i = 0; i < NR_C_MODES; i++)
5601 for (c = pool->constants[i]; c; c = c->next)
5602 {
12ef3745 5603 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
96be3ab6 5604 rtx value = c->value;
5605 if (GET_CODE (value) == CONST
5606 && GET_CODE (XEXP (value, 0)) == UNSPEC
12ef3745 5607 && XINT (XEXP (value, 0), 1) == UNSPEC_LTREL_OFFSET
96be3ab6 5608 && XVECLEN (XEXP (value, 0), 0) == 1)
5609 {
5610 value = gen_rtx_MINUS (Pmode, XVECEXP (XEXP (value, 0), 0, 0),
b40da9a7 5611 gen_rtx_LABEL_REF (VOIDmode, pool->label));
96be3ab6 5612 value = gen_rtx_CONST (VOIDmode, value);
5613 }
5614
0756cebb 5615 insn = emit_label_after (c->label, insn);
5616 INSN_ADDRESSES_NEW (insn, -1);
df82fb76 5617
f588eb9f 5618 value = gen_rtx_UNSPEC_VOLATILE (constant_modes[i],
df82fb76 5619 gen_rtvec (1, value),
5620 UNSPECV_POOL_ENTRY);
5621 insn = emit_insn_after (value, insn);
0756cebb 5622 INSN_ADDRESSES_NEW (insn, -1);
5623 }
5624
d345b493 5625 /* Ensure minimum alignment for instructions. */
5626 insn = emit_insn_after (gen_pool_align (GEN_INT (2)), insn);
0756cebb 5627 INSN_ADDRESSES_NEW (insn, -1);
5628
d345b493 5629 /* Output in-pool execute template insns. */
5630 for (c = pool->execute; c; c = c->next)
5631 {
d345b493 5632 insn = emit_label_after (c->label, insn);
5633 INSN_ADDRESSES_NEW (insn, -1);
5634
5635 insn = emit_insn_after (s390_execute_target (c->value), insn);
5636 INSN_ADDRESSES_NEW (insn, -1);
5637 }
5638
5639 /* Switch back to previous section. */
5640 if (TARGET_CPU_ZARCH)
5641 {
5642 insn = emit_insn_after (gen_pool_section_end (), insn);
5643 INSN_ADDRESSES_NEW (insn, -1);
5644 }
5645
0756cebb 5646 insn = emit_barrier_after (insn);
5647 INSN_ADDRESSES_NEW (insn, -1);
5648
96be3ab6 5649 /* Remove placeholder insn. */
5650 remove_insn (pool->pool_insn);
d345b493 5651}
5652
0756cebb 5653/* Free all memory used by POOL. */
5654
5655static void
b40da9a7 5656s390_free_pool (struct constant_pool *pool)
0756cebb 5657{
d345b493 5658 struct constant *c, *next;
0756cebb 5659 int i;
5660
5661 for (i = 0; i < NR_C_MODES; i++)
d345b493 5662 for (c = pool->constants[i]; c; c = next)
5663 {
5664 next = c->next;
5665 free (c);
5666 }
5667
5668 for (c = pool->execute; c; c = next)
0756cebb 5669 {
d345b493 5670 next = c->next;
5671 free (c);
0756cebb 5672 }
5673
4d6e8511 5674 BITMAP_FREE (pool->insns);
0756cebb 5675 free (pool);
f81e845f 5676}
0756cebb 5677
0756cebb 5678
c2c1332a 5679/* Collect main literal pool. Return NULL on overflow. */
5680
5681static struct constant_pool *
5682s390_mainpool_start (void)
5683{
5684 struct constant_pool *pool;
5685 rtx insn;
5686
5687 pool = s390_alloc_pool ();
5688
5689 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5690 {
5691 if (GET_CODE (insn) == INSN
20074f87 5692 && GET_CODE (PATTERN (insn)) == SET
5693 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC_VOLATILE
5694 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPECV_MAIN_POOL)
c2c1332a 5695 {
32eda510 5696 gcc_assert (!pool->pool_insn);
c2c1332a 5697 pool->pool_insn = insn;
5698 }
5699
babfdedf 5700 if (!TARGET_CPU_ZARCH && s390_execute_label (insn))
d345b493 5701 {
5702 s390_add_execute (pool, insn);
5703 }
5704 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
c2c1332a 5705 {
5706 rtx pool_ref = NULL_RTX;
5707 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5708 if (pool_ref)
5709 {
5710 rtx constant = get_pool_constant (pool_ref);
5711 enum machine_mode mode = get_pool_mode (pool_ref);
5712 s390_add_constant (pool, constant, mode);
5713 }
5714 }
5715 }
5716
32eda510 5717 gcc_assert (pool->pool_insn || pool->size == 0);
c2c1332a 5718
5719 if (pool->size >= 4096)
5720 {
7de9f7aa 5721 /* We're going to chunkify the pool, so remove the main
5722 pool placeholder insn. */
5723 remove_insn (pool->pool_insn);
5724
c2c1332a 5725 s390_free_pool (pool);
5726 pool = NULL;
5727 }
5728
5729 return pool;
5730}
5731
5732/* POOL holds the main literal pool as collected by s390_mainpool_start.
5733 Modify the current function to output the pool constants as well as
20074f87 5734 the pool register setup instruction. */
c2c1332a 5735
5736static void
20074f87 5737s390_mainpool_finish (struct constant_pool *pool)
c2c1332a 5738{
4fed3f99 5739 rtx base_reg = cfun->machine->base_reg;
c2c1332a 5740 rtx insn;
5741
5742 /* If the pool is empty, we're done. */
5743 if (pool->size == 0)
5744 {
4fed3f99 5745 /* We don't actually need a base register after all. */
5746 cfun->machine->base_reg = NULL_RTX;
5747
5748 if (pool->pool_insn)
5749 remove_insn (pool->pool_insn);
c2c1332a 5750 s390_free_pool (pool);
5751 return;
5752 }
5753
5754 /* We need correct insn addresses. */
5755 shorten_branches (get_insns ());
5756
dafc8d45 5757 /* On zSeries, we use a LARL to load the pool register. The pool is
c2c1332a 5758 located in the .rodata section, so we emit it after the function. */
dafc8d45 5759 if (TARGET_CPU_ZARCH)
c2c1332a 5760 {
5761 insn = gen_main_base_64 (base_reg, pool->label);
5762 insn = emit_insn_after (insn, pool->pool_insn);
5763 INSN_ADDRESSES_NEW (insn, -1);
5764 remove_insn (pool->pool_insn);
f588eb9f 5765
5766 insn = get_last_insn ();
c2c1332a 5767 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5768 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5769
5770 s390_dump_pool (pool, 0);
5771 }
5772
dafc8d45 5773 /* On S/390, if the total size of the function's code plus literal pool
c2c1332a 5774 does not exceed 4096 bytes, we use BASR to set up a function base
5775 pointer, and emit the literal pool at the end of the function. */
5776 else if (INSN_ADDRESSES (INSN_UID (get_last_insn ()))
5777 + pool->size + 8 /* alignment slop */ < 4096)
5778 {
5779 insn = gen_main_base_31_small (base_reg, pool->label);
5780 insn = emit_insn_after (insn, pool->pool_insn);
5781 INSN_ADDRESSES_NEW (insn, -1);
5782 remove_insn (pool->pool_insn);
5783
5784 insn = emit_label_after (pool->label, insn);
5785 INSN_ADDRESSES_NEW (insn, -1);
5786
5787 insn = get_last_insn ();
5788 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5789 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5790
5791 s390_dump_pool (pool, 1);
5792 }
5793
5794 /* Otherwise, we emit an inline literal pool and use BASR to branch
5795 over it, setting up the pool register at the same time. */
5796 else
5797 {
5798 rtx pool_end = gen_label_rtx ();
5799
5800 insn = gen_main_base_31_large (base_reg, pool->label, pool_end);
5801 insn = emit_insn_after (insn, pool->pool_insn);
5802 INSN_ADDRESSES_NEW (insn, -1);
5803 remove_insn (pool->pool_insn);
5804
5805 insn = emit_label_after (pool->label, insn);
5806 INSN_ADDRESSES_NEW (insn, -1);
5807
5808 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5809 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5810
5811 insn = emit_label_after (pool_end, pool->pool_insn);
5812 INSN_ADDRESSES_NEW (insn, -1);
5813
5814 s390_dump_pool (pool, 1);
5815 }
5816
5817
5818 /* Replace all literal pool references. */
5819
5820 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5821 {
5822 if (INSN_P (insn))
20074f87 5823 replace_ltrel_base (&PATTERN (insn));
c2c1332a 5824
5825 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5826 {
5827 rtx addr, pool_ref = NULL_RTX;
5828 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5829 if (pool_ref)
5830 {
d345b493 5831 if (s390_execute_label (insn))
5832 addr = s390_find_execute (pool, insn);
5833 else
5834 addr = s390_find_constant (pool, get_pool_constant (pool_ref),
5835 get_pool_mode (pool_ref));
5836
c2c1332a 5837 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
5838 INSN_CODE (insn) = -1;
5839 }
5840 }
5841 }
5842
5843
5844 /* Free the pool. */
5845 s390_free_pool (pool);
5846}
5847
5848/* POOL holds the main literal pool as collected by s390_mainpool_start.
5849 We have decided we cannot use this pool, so revert all changes
5850 to the current function that were done by s390_mainpool_start. */
5851static void
5852s390_mainpool_cancel (struct constant_pool *pool)
5853{
5854 /* We didn't actually change the instruction stream, so simply
5855 free the pool memory. */
5856 s390_free_pool (pool);
5857}
5858
5859
20074f87 5860/* Chunkify the literal pool. */
4673c1a0 5861
0756cebb 5862#define S390_POOL_CHUNK_MIN 0xc00
5863#define S390_POOL_CHUNK_MAX 0xe00
5864
f81e845f 5865static struct constant_pool *
20074f87 5866s390_chunkify_start (void)
4673c1a0 5867{
0756cebb 5868 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
5869 int extra_size = 0;
5870 bitmap far_labels;
12ef3745 5871 rtx pending_ltrel = NULL_RTX;
479ca6e8 5872 rtx insn;
4673c1a0 5873
b40da9a7 5874 rtx (*gen_reload_base) (rtx, rtx) =
dafc8d45 5875 TARGET_CPU_ZARCH? gen_reload_base_64 : gen_reload_base_31;
96be3ab6 5876
5877
9a2a66ae 5878 /* We need correct insn addresses. */
5879
5880 shorten_branches (get_insns ());
5881
12ef3745 5882 /* Scan all insns and move literals to pool chunks. */
479ca6e8 5883
479ca6e8 5884 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4673c1a0 5885 {
12ef3745 5886 /* Check for pending LTREL_BASE. */
5887 if (INSN_P (insn))
5888 {
5889 rtx ltrel_base = find_ltrel_base (PATTERN (insn));
5890 if (ltrel_base)
5891 {
32eda510 5892 gcc_assert (ltrel_base == pending_ltrel);
5893 pending_ltrel = NULL_RTX;
12ef3745 5894 }
5895 }
5896
babfdedf 5897 if (!TARGET_CPU_ZARCH && s390_execute_label (insn))
d345b493 5898 {
5899 if (!curr_pool)
5900 curr_pool = s390_start_pool (&pool_list, insn);
5901
5902 s390_add_execute (curr_pool, insn);
5903 s390_add_pool_insn (curr_pool, insn);
5904 }
5905 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
0756cebb 5906 {
96be3ab6 5907 rtx pool_ref = NULL_RTX;
0756cebb 5908 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5909 if (pool_ref)
5910 {
12ef3745 5911 rtx constant = get_pool_constant (pool_ref);
5912 enum machine_mode mode = get_pool_mode (pool_ref);
5913
0756cebb 5914 if (!curr_pool)
5915 curr_pool = s390_start_pool (&pool_list, insn);
5916
12ef3745 5917 s390_add_constant (curr_pool, constant, mode);
96be3ab6 5918 s390_add_pool_insn (curr_pool, insn);
96be3ab6 5919
12ef3745 5920 /* Don't split the pool chunk between a LTREL_OFFSET load
5921 and the corresponding LTREL_BASE. */
5922 if (GET_CODE (constant) == CONST
5923 && GET_CODE (XEXP (constant, 0)) == UNSPEC
5924 && XINT (XEXP (constant, 0), 1) == UNSPEC_LTREL_OFFSET)
5925 {
32eda510 5926 gcc_assert (!pending_ltrel);
12ef3745 5927 pending_ltrel = pool_ref;
5928 }
0756cebb 5929 }
5930 }
5931
96be3ab6 5932 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
12ef3745 5933 {
5934 if (curr_pool)
5935 s390_add_pool_insn (curr_pool, insn);
5936 /* An LTREL_BASE must follow within the same basic block. */
32eda510 5937 gcc_assert (!pending_ltrel);
12ef3745 5938 }
96be3ab6 5939
f81e845f 5940 if (!curr_pool
0756cebb 5941 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
5942 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
4673c1a0 5943 continue;
479ca6e8 5944
dafc8d45 5945 if (TARGET_CPU_ZARCH)
4673c1a0 5946 {
0756cebb 5947 if (curr_pool->size < S390_POOL_CHUNK_MAX)
5948 continue;
479ca6e8 5949
96be3ab6 5950 s390_end_pool (curr_pool, NULL_RTX);
0756cebb 5951 curr_pool = NULL;
5952 }
5953 else
4673c1a0 5954 {
0756cebb 5955 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
b40da9a7 5956 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
0756cebb 5957 + extra_size;
5958
5959 /* We will later have to insert base register reload insns.
5960 Those will have an effect on code size, which we need to
5961 consider here. This calculation makes rather pessimistic
5962 worst-case assumptions. */
96be3ab6 5963 if (GET_CODE (insn) == CODE_LABEL)
0756cebb 5964 extra_size += 6;
0756cebb 5965
5966 if (chunk_size < S390_POOL_CHUNK_MIN
5967 && curr_pool->size < S390_POOL_CHUNK_MIN)
5968 continue;
5969
5970 /* Pool chunks can only be inserted after BARRIERs ... */
5971 if (GET_CODE (insn) == BARRIER)
5972 {
5973 s390_end_pool (curr_pool, insn);
5974 curr_pool = NULL;
5975 extra_size = 0;
5976 }
5977
5978 /* ... so if we don't find one in time, create one. */
5979 else if ((chunk_size > S390_POOL_CHUNK_MAX
96be3ab6 5980 || curr_pool->size > S390_POOL_CHUNK_MAX))
0756cebb 5981 {
0756cebb 5982 rtx label, jump, barrier;
5983
96be3ab6 5984 /* We can insert the barrier only after a 'real' insn. */
5985 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
5986 continue;
5987 if (get_attr_length (insn) == 0)
5988 continue;
5989
f81e845f 5990 /* Don't separate LTREL_BASE from the corresponding
12ef3745 5991 LTREL_OFFSET load. */
5992 if (pending_ltrel)
96be3ab6 5993 continue;
5994
b40da9a7 5995 label = gen_label_rtx ();
0756cebb 5996 jump = emit_jump_insn_after (gen_jump (label), insn);
5997 barrier = emit_barrier_after (jump);
5998 insn = emit_label_after (label, barrier);
5999 JUMP_LABEL (jump) = label;
6000 LABEL_NUSES (label) = 1;
6001
96be3ab6 6002 INSN_ADDRESSES_NEW (jump, -1);
6003 INSN_ADDRESSES_NEW (barrier, -1);
0756cebb 6004 INSN_ADDRESSES_NEW (insn, -1);
6005
6006 s390_end_pool (curr_pool, barrier);
6007 curr_pool = NULL;
6008 extra_size = 0;
6009 }
479ca6e8 6010 }
4673c1a0 6011 }
9fa6d5d9 6012
96be3ab6 6013 if (curr_pool)
6014 s390_end_pool (curr_pool, NULL_RTX);
32eda510 6015 gcc_assert (!pending_ltrel);
0756cebb 6016
f81e845f 6017 /* Find all labels that are branched into
479ca6e8 6018 from an insn belonging to a different chunk. */
9fa6d5d9 6019
4d6e8511 6020 far_labels = BITMAP_ALLOC (NULL);
a8ef833a 6021
479ca6e8 6022 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4673c1a0 6023 {
0756cebb 6024 /* Labels marked with LABEL_PRESERVE_P can be target
6025 of non-local jumps, so we have to mark them.
6026 The same holds for named labels.
6027
6028 Don't do that, however, if it is the label before
6029 a jump table. */
6030
f81e845f 6031 if (GET_CODE (insn) == CODE_LABEL
0756cebb 6032 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
6033 {
6034 rtx vec_insn = next_real_insn (insn);
f81e845f 6035 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
0756cebb 6036 PATTERN (vec_insn) : NULL_RTX;
6037 if (!vec_pat
6038 || !(GET_CODE (vec_pat) == ADDR_VEC
6039 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
6040 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
6041 }
6042
6043 /* If we have a direct jump (conditional or unconditional)
6044 or a casesi jump, check all potential targets. */
f81e845f 6045 else if (GET_CODE (insn) == JUMP_INSN)
479ca6e8 6046 {
6047 rtx pat = PATTERN (insn);
3c482144 6048 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
6049 pat = XVECEXP (pat, 0, 0);
6050
f81e845f 6051 if (GET_CODE (pat) == SET)
479ca6e8 6052 {
96be3ab6 6053 rtx label = JUMP_LABEL (insn);
479ca6e8 6054 if (label)
6055 {
f81e845f 6056 if (s390_find_pool (pool_list, label)
0756cebb 6057 != s390_find_pool (pool_list, insn))
6058 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
479ca6e8 6059 }
f81e845f 6060 }
0756cebb 6061 else if (GET_CODE (pat) == PARALLEL
6062 && XVECLEN (pat, 0) == 2
6063 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
6064 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
6065 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
6066 {
6067 /* Find the jump table used by this casesi jump. */
6068 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
6069 rtx vec_insn = next_real_insn (vec_label);
f81e845f 6070 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
0756cebb 6071 PATTERN (vec_insn) : NULL_RTX;
6072 if (vec_pat
6073 && (GET_CODE (vec_pat) == ADDR_VEC
6074 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
6075 {
6076 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
479ca6e8 6077
0756cebb 6078 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
6079 {
6080 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
479ca6e8 6081
f81e845f 6082 if (s390_find_pool (pool_list, label)
0756cebb 6083 != s390_find_pool (pool_list, insn))
6084 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
6085 }
6086 }
6087 }
479ca6e8 6088 }
4673c1a0 6089 }
9fa6d5d9 6090
0756cebb 6091 /* Insert base register reload insns before every pool. */
6092
6093 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
96be3ab6 6094 {
20074f87 6095 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
6096 curr_pool->label);
96be3ab6 6097 rtx insn = curr_pool->first_insn;
6098 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
6099 }
0756cebb 6100
6101 /* Insert base register reload insns at every far label. */
479ca6e8 6102
479ca6e8 6103 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
f81e845f 6104 if (GET_CODE (insn) == CODE_LABEL
0756cebb 6105 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
6106 {
6107 struct constant_pool *pool = s390_find_pool (pool_list, insn);
6108 if (pool)
6109 {
20074f87 6110 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
6111 pool->label);
96be3ab6 6112 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
0756cebb 6113 }
6114 }
6115
96be3ab6 6116
4d6e8511 6117 BITMAP_FREE (far_labels);
479ca6e8 6118
479ca6e8 6119
6120 /* Recompute insn addresses. */
6121
6122 init_insn_lengths ();
6123 shorten_branches (get_insns ());
4673c1a0 6124
96be3ab6 6125 return pool_list;
6126}
4673c1a0 6127
96be3ab6 6128/* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
f81e845f 6129 After we have decided to use this list, finish implementing
20074f87 6130 all changes to the current function as required. */
f81e845f 6131
96be3ab6 6132static void
20074f87 6133s390_chunkify_finish (struct constant_pool *pool_list)
96be3ab6 6134{
96be3ab6 6135 struct constant_pool *curr_pool = NULL;
6136 rtx insn;
f81e845f 6137
6138
96be3ab6 6139 /* Replace all literal pool references. */
6140
f81e845f 6141 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
96be3ab6 6142 {
12ef3745 6143 if (INSN_P (insn))
20074f87 6144 replace_ltrel_base (&PATTERN (insn));
12ef3745 6145
96be3ab6 6146 curr_pool = s390_find_pool (pool_list, insn);
6147 if (!curr_pool)
6148 continue;
6149
6150 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
6151 {
6152 rtx addr, pool_ref = NULL_RTX;
6153 find_constant_pool_ref (PATTERN (insn), &pool_ref);
6154 if (pool_ref)
6155 {
d345b493 6156 if (s390_execute_label (insn))
6157 addr = s390_find_execute (curr_pool, insn);
6158 else
6159 addr = s390_find_constant (curr_pool,
6160 get_pool_constant (pool_ref),
6161 get_pool_mode (pool_ref));
6162
96be3ab6 6163 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
6164 INSN_CODE (insn) = -1;
6165 }
96be3ab6 6166 }
6167 }
6168
6169 /* Dump out all literal pools. */
f81e845f 6170
96be3ab6 6171 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
c2c1332a 6172 s390_dump_pool (curr_pool, 0);
f81e845f 6173
96be3ab6 6174 /* Free pool list. */
6175
6176 while (pool_list)
6177 {
6178 struct constant_pool *next = pool_list->next;
6179 s390_free_pool (pool_list);
6180 pool_list = next;
6181 }
6182}
6183
6184/* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
6185 We have decided we cannot use this list, so revert all changes
6186 to the current function that were done by s390_chunkify_start. */
f81e845f 6187
96be3ab6 6188static void
b40da9a7 6189s390_chunkify_cancel (struct constant_pool *pool_list)
96be3ab6 6190{
6191 struct constant_pool *curr_pool = NULL;
6192 rtx insn;
6193
6194 /* Remove all pool placeholder insns. */
6195
6196 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
6197 {
6198 /* Did we insert an extra barrier? Remove it. */
6199 rtx barrier = PREV_INSN (curr_pool->pool_insn);
6200 rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
6201 rtx label = NEXT_INSN (curr_pool->pool_insn);
6202
6203 if (jump && GET_CODE (jump) == JUMP_INSN
6204 && barrier && GET_CODE (barrier) == BARRIER
6205 && label && GET_CODE (label) == CODE_LABEL
6206 && GET_CODE (PATTERN (jump)) == SET
6207 && SET_DEST (PATTERN (jump)) == pc_rtx
6208 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
6209 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
6210 {
6211 remove_insn (jump);
6212 remove_insn (barrier);
6213 remove_insn (label);
0756cebb 6214 }
4673c1a0 6215
96be3ab6 6216 remove_insn (curr_pool->pool_insn);
6217 }
6218
12ef3745 6219 /* Remove all base register reload insns. */
96be3ab6 6220
6221 for (insn = get_insns (); insn; )
6222 {
6223 rtx next_insn = NEXT_INSN (insn);
6224
6225 if (GET_CODE (insn) == INSN
6226 && GET_CODE (PATTERN (insn)) == SET
6227 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
12ef3745 6228 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
96be3ab6 6229 remove_insn (insn);
4673c1a0 6230
96be3ab6 6231 insn = next_insn;
6232 }
6233
6234 /* Free pool list. */
4673c1a0 6235
0756cebb 6236 while (pool_list)
4673c1a0 6237 {
0756cebb 6238 struct constant_pool *next = pool_list->next;
6239 s390_free_pool (pool_list);
6240 pool_list = next;
4673c1a0 6241 }
4673c1a0 6242}
6243
0756cebb 6244
74d2529d 6245/* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
df82fb76 6246
6247void
74d2529d 6248s390_output_pool_entry (rtx exp, enum machine_mode mode, unsigned int align)
df82fb76 6249{
6250 REAL_VALUE_TYPE r;
6251
6252 switch (GET_MODE_CLASS (mode))
6253 {
6254 case MODE_FLOAT:
36868490 6255 case MODE_DECIMAL_FLOAT:
32eda510 6256 gcc_assert (GET_CODE (exp) == CONST_DOUBLE);
df82fb76 6257
6258 REAL_VALUE_FROM_CONST_DOUBLE (r, exp);
6259 assemble_real (r, mode, align);
6260 break;
6261
6262 case MODE_INT:
74d2529d 6263 assemble_integer (exp, GET_MODE_SIZE (mode), align, 1);
df82fb76 6264 break;
6265
6266 default:
32eda510 6267 gcc_unreachable ();
df82fb76 6268 }
6269}
6270
6271
875862bf 6272/* Return an RTL expression representing the value of the return address
6273 for the frame COUNT steps up from the current frame. FRAME is the
6274 frame pointer of that frame. */
0756cebb 6275
875862bf 6276rtx
6277s390_return_addr_rtx (int count, rtx frame ATTRIBUTE_UNUSED)
0756cebb 6278{
875862bf 6279 int offset;
6280 rtx addr;
96be3ab6 6281
875862bf 6282 /* Without backchain, we fail for all but the current frame. */
9a2a66ae 6283
875862bf 6284 if (!TARGET_BACKCHAIN && count > 0)
6285 return NULL_RTX;
9a2a66ae 6286
875862bf 6287 /* For the current frame, we need to make sure the initial
6288 value of RETURN_REGNUM is actually saved. */
9a2a66ae 6289
875862bf 6290 if (count == 0)
9a2a66ae 6291 {
1e639cb0 6292 /* On non-z architectures branch splitting could overwrite r14. */
6293 if (TARGET_CPU_ZARCH)
6294 return get_hard_reg_initial_val (Pmode, RETURN_REGNUM);
6295 else
6296 {
6297 cfun_frame_layout.save_return_addr_p = true;
6298 return gen_rtx_MEM (Pmode, return_address_pointer_rtx);
6299 }
875862bf 6300 }
9a2a66ae 6301
875862bf 6302 if (TARGET_PACKED_STACK)
6303 offset = -2 * UNITS_PER_WORD;
6304 else
6305 offset = RETURN_REGNUM * UNITS_PER_WORD;
9a2a66ae 6306
875862bf 6307 addr = plus_constant (frame, offset);
6308 addr = memory_address (Pmode, addr);
6309 return gen_rtx_MEM (Pmode, addr);
6310}
9a2a66ae 6311
875862bf 6312/* Return an RTL expression representing the back chain stored in
6313 the current stack frame. */
5fe74ca1 6314
875862bf 6315rtx
6316s390_back_chain_rtx (void)
6317{
6318 rtx chain;
5fe74ca1 6319
875862bf 6320 gcc_assert (TARGET_BACKCHAIN);
5fe74ca1 6321
875862bf 6322 if (TARGET_PACKED_STACK)
6323 chain = plus_constant (stack_pointer_rtx,
6324 STACK_POINTER_OFFSET - UNITS_PER_WORD);
6325 else
6326 chain = stack_pointer_rtx;
5fe74ca1 6327
875862bf 6328 chain = gen_rtx_MEM (Pmode, chain);
6329 return chain;
6330}
9a2a66ae 6331
875862bf 6332/* Find first call clobbered register unused in a function.
6333 This could be used as base register in a leaf function
6334 or for holding the return address before epilogue. */
9a2a66ae 6335
875862bf 6336static int
6337find_unused_clobbered_reg (void)
6338{
6339 int i;
6340 for (i = 0; i < 6; i++)
3072d30e 6341 if (!df_regs_ever_live_p (i))
875862bf 6342 return i;
6343 return 0;
6344}
9a2a66ae 6345
1e639cb0 6346
6347/* Helper function for s390_regs_ever_clobbered. Sets the fields in DATA for all
6348 clobbered hard regs in SETREG. */
6349
6350static void
81a410b1 6351s390_reg_clobbered_rtx (rtx setreg, const_rtx set_insn ATTRIBUTE_UNUSED, void *data)
1e639cb0 6352{
6353 int *regs_ever_clobbered = (int *)data;
6354 unsigned int i, regno;
6355 enum machine_mode mode = GET_MODE (setreg);
6356
6357 if (GET_CODE (setreg) == SUBREG)
6358 {
6359 rtx inner = SUBREG_REG (setreg);
6360 if (!GENERAL_REG_P (inner))
6361 return;
6362 regno = subreg_regno (setreg);
6363 }
6364 else if (GENERAL_REG_P (setreg))
6365 regno = REGNO (setreg);
6366 else
6367 return;
6368
6369 for (i = regno;
6370 i < regno + HARD_REGNO_NREGS (regno, mode);
6371 i++)
6372 regs_ever_clobbered[i] = 1;
6373}
6374
6375/* Walks through all basic blocks of the current function looking
6376 for clobbered hard regs using s390_reg_clobbered_rtx. The fields
6377 of the passed integer array REGS_EVER_CLOBBERED are set to one for
6378 each of those regs. */
6379
6380static void
6381s390_regs_ever_clobbered (int *regs_ever_clobbered)
6382{
6383 basic_block cur_bb;
6384 rtx cur_insn;
6385 unsigned int i;
6386
6387 memset (regs_ever_clobbered, 0, 16 * sizeof (int));
6388
6389 /* For non-leaf functions we have to consider all call clobbered regs to be
6390 clobbered. */
6391 if (!current_function_is_leaf)
6392 {
6393 for (i = 0; i < 16; i++)
6394 regs_ever_clobbered[i] = call_really_used_regs[i];
6395 }
6396
6397 /* Make the "magic" eh_return registers live if necessary. For regs_ever_live
6398 this work is done by liveness analysis (mark_regs_live_at_end).
6399 Special care is needed for functions containing landing pads. Landing pads
6400 may use the eh registers, but the code which sets these registers is not
6401 contained in that function. Hence s390_regs_ever_clobbered is not able to
6402 deal with this automatically. */
6403 if (current_function_calls_eh_return || cfun->machine->has_landing_pad_p)
6404 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM ; i++)
220be973 6405 if (current_function_calls_eh_return
6406 || (cfun->machine->has_landing_pad_p
3072d30e 6407 && df_regs_ever_live_p (EH_RETURN_DATA_REGNO (i))))
220be973 6408 regs_ever_clobbered[EH_RETURN_DATA_REGNO (i)] = 1;
1e639cb0 6409
6410 /* For nonlocal gotos all call-saved registers have to be saved.
6411 This flag is also set for the unwinding code in libgcc.
6412 See expand_builtin_unwind_init. For regs_ever_live this is done by
6413 reload. */
6414 if (current_function_has_nonlocal_label)
6415 for (i = 0; i < 16; i++)
6416 if (!call_really_used_regs[i])
6417 regs_ever_clobbered[i] = 1;
6418
6419 FOR_EACH_BB (cur_bb)
6420 {
6421 FOR_BB_INSNS (cur_bb, cur_insn)
6422 {
6423 if (INSN_P (cur_insn))
6424 note_stores (PATTERN (cur_insn),
6425 s390_reg_clobbered_rtx,
6426 regs_ever_clobbered);
6427 }
6428 }
6429}
6430
875862bf 6431/* Determine the frame area which actually has to be accessed
6432 in the function epilogue. The values are stored at the
6433 given pointers AREA_BOTTOM (address of the lowest used stack
6434 address) and AREA_TOP (address of the first item which does
6435 not belong to the stack frame). */
5fe74ca1 6436
875862bf 6437static void
6438s390_frame_area (int *area_bottom, int *area_top)
6439{
6440 int b, t;
6441 int i;
5fe74ca1 6442
875862bf 6443 b = INT_MAX;
6444 t = INT_MIN;
67928721 6445
6446 if (cfun_frame_layout.first_restore_gpr != -1)
6447 {
6448 b = (cfun_frame_layout.gprs_offset
6449 + cfun_frame_layout.first_restore_gpr * UNITS_PER_WORD);
6450 t = b + (cfun_frame_layout.last_restore_gpr
6451 - cfun_frame_layout.first_restore_gpr + 1) * UNITS_PER_WORD;
6452 }
6453
6454 if (TARGET_64BIT && cfun_save_high_fprs_p)
6455 {
6456 b = MIN (b, cfun_frame_layout.f8_offset);
6457 t = MAX (t, (cfun_frame_layout.f8_offset
6458 + cfun_frame_layout.high_fprs * 8));
6459 }
6460
6461 if (!TARGET_64BIT)
6462 for (i = 2; i < 4; i++)
6463 if (cfun_fpr_bit_p (i))
6464 {
6465 b = MIN (b, cfun_frame_layout.f4_offset + (i - 2) * 8);
6466 t = MAX (t, cfun_frame_layout.f4_offset + (i - 1) * 8);
6467 }
6468
6469 *area_bottom = b;
6470 *area_top = t;
6471}
6472
4fed3f99 6473/* Fill cfun->machine with info about register usage of current function.
1e639cb0 6474 Return in CLOBBERED_REGS which GPRs are currently considered set. */
8b4a4127 6475
6476static void
1e639cb0 6477s390_register_info (int clobbered_regs[])
8b4a4127 6478{
6479 int i, j;
8b4a4127 6480
67928721 6481 /* fprs 8 - 15 are call saved for 64 Bit ABI. */
6482 cfun_frame_layout.fpr_bitmap = 0;
6483 cfun_frame_layout.high_fprs = 0;
9a2a66ae 6484 if (TARGET_64BIT)
f81e845f 6485 for (i = 24; i < 32; i++)
3072d30e 6486 if (df_regs_ever_live_p (i) && !global_regs[i])
9a2a66ae 6487 {
67928721 6488 cfun_set_fpr_bit (i - 16);
6489 cfun_frame_layout.high_fprs++;
9a2a66ae 6490 }
8b4a4127 6491
beee1f75 6492 /* Find first and last gpr to be saved. We trust regs_ever_live
6493 data, except that we don't save and restore global registers.
5fe74ca1 6494
beee1f75 6495 Also, all registers with special meaning to the compiler need
6496 to be handled extra. */
5fe74ca1 6497
1e639cb0 6498 s390_regs_ever_clobbered (clobbered_regs);
6499
beee1f75 6500 for (i = 0; i < 16; i++)
77beec48 6501 clobbered_regs[i] = clobbered_regs[i] && !global_regs[i] && !fixed_regs[i];
1e639cb0 6502
6503 if (frame_pointer_needed)
6504 clobbered_regs[HARD_FRAME_POINTER_REGNUM] = 1;
9a2a66ae 6505
beee1f75 6506 if (flag_pic)
1e639cb0 6507 clobbered_regs[PIC_OFFSET_TABLE_REGNUM]
3072d30e 6508 |= df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM);
4fed3f99 6509
1e639cb0 6510 clobbered_regs[BASE_REGNUM]
77beec48 6511 |= (cfun->machine->base_reg
6512 && REGNO (cfun->machine->base_reg) == BASE_REGNUM);
4fed3f99 6513
1e639cb0 6514 clobbered_regs[RETURN_REGNUM]
77beec48 6515 |= (!current_function_is_leaf
9bee2845 6516 || TARGET_TPF_PROFILING
77beec48 6517 || cfun->machine->split_branches_pending_p
6518 || cfun_frame_layout.save_return_addr_p
6519 || current_function_calls_eh_return
6520 || current_function_stdarg);
4fed3f99 6521
1e639cb0 6522 clobbered_regs[STACK_POINTER_REGNUM]
77beec48 6523 |= (!current_function_is_leaf
6524 || TARGET_TPF_PROFILING
6525 || cfun_save_high_fprs_p
6526 || get_frame_size () > 0
6527 || current_function_calls_alloca
6528 || current_function_stdarg);
1e639cb0 6529
beee1f75 6530 for (i = 6; i < 16; i++)
3072d30e 6531 if (df_regs_ever_live_p (i) || clobbered_regs[i])
beee1f75 6532 break;
8b4a4127 6533 for (j = 15; j > i; j--)
3072d30e 6534 if (df_regs_ever_live_p (j) || clobbered_regs[j])
beee1f75 6535 break;
9a2a66ae 6536
beee1f75 6537 if (i == 16)
6538 {
6539 /* Nothing to save/restore. */
5214e6ae 6540 cfun_frame_layout.first_save_gpr_slot = -1;
6541 cfun_frame_layout.last_save_gpr_slot = -1;
67928721 6542 cfun_frame_layout.first_save_gpr = -1;
6543 cfun_frame_layout.first_restore_gpr = -1;
6544 cfun_frame_layout.last_save_gpr = -1;
6545 cfun_frame_layout.last_restore_gpr = -1;
beee1f75 6546 }
6547 else
6548 {
5214e6ae 6549 /* Save slots for gprs from i to j. */
6550 cfun_frame_layout.first_save_gpr_slot = i;
6551 cfun_frame_layout.last_save_gpr_slot = j;
6552
6553 for (i = cfun_frame_layout.first_save_gpr_slot;
6554 i < cfun_frame_layout.last_save_gpr_slot + 1;
6555 i++)
6556 if (clobbered_regs[i])
6557 break;
6558
6559 for (j = cfun_frame_layout.last_save_gpr_slot; j > i; j--)
6560 if (clobbered_regs[j])
6561 break;
6562
6563 if (i == cfun_frame_layout.last_save_gpr_slot + 1)
6564 {
6565 /* Nothing to save/restore. */
6566 cfun_frame_layout.first_save_gpr = -1;
6567 cfun_frame_layout.first_restore_gpr = -1;
6568 cfun_frame_layout.last_save_gpr = -1;
6569 cfun_frame_layout.last_restore_gpr = -1;
6570 }
6571 else
6572 {
6573 /* Save / Restore from gpr i to j. */
6574 cfun_frame_layout.first_save_gpr = i;
6575 cfun_frame_layout.first_restore_gpr = i;
6576 cfun_frame_layout.last_save_gpr = j;
6577 cfun_frame_layout.last_restore_gpr = j;
6578 }
beee1f75 6579 }
9a2a66ae 6580
7ccc713a 6581 if (current_function_stdarg)
beee1f75 6582 {
67928721 6583 /* Varargs functions need to save gprs 2 to 6. */
6902d973 6584 if (cfun->va_list_gpr_size
6585 && current_function_args_info.gprs < GP_ARG_NUM_REG)
6586 {
6587 int min_gpr = current_function_args_info.gprs;
6588 int max_gpr = min_gpr + cfun->va_list_gpr_size;
6589 if (max_gpr > GP_ARG_NUM_REG)
6590 max_gpr = GP_ARG_NUM_REG;
6591
6592 if (cfun_frame_layout.first_save_gpr == -1
6593 || cfun_frame_layout.first_save_gpr > 2 + min_gpr)
5214e6ae 6594 {
6595 cfun_frame_layout.first_save_gpr = 2 + min_gpr;
6596 cfun_frame_layout.first_save_gpr_slot = 2 + min_gpr;
6597 }
6902d973 6598
6599 if (cfun_frame_layout.last_save_gpr == -1
6600 || cfun_frame_layout.last_save_gpr < 2 + max_gpr - 1)
5214e6ae 6601 {
6602 cfun_frame_layout.last_save_gpr = 2 + max_gpr - 1;
6603 cfun_frame_layout.last_save_gpr_slot = 2 + max_gpr - 1;
6604 }
6902d973 6605 }
beee1f75 6606
67928721 6607 /* Mark f0, f2 for 31 bit and f0-f4 for 64 bit to be saved. */
6902d973 6608 if (TARGET_HARD_FLOAT && cfun->va_list_fpr_size
6609 && current_function_args_info.fprs < FP_ARG_NUM_REG)
6610 {
6611 int min_fpr = current_function_args_info.fprs;
6612 int max_fpr = min_fpr + cfun->va_list_fpr_size;
6613 if (max_fpr > FP_ARG_NUM_REG)
6614 max_fpr = FP_ARG_NUM_REG;
6615
6616 /* ??? This is currently required to ensure proper location
6617 of the fpr save slots within the va_list save area. */
6618 if (TARGET_PACKED_STACK)
6619 min_fpr = 0;
6620
6621 for (i = min_fpr; i < max_fpr; i++)
6622 cfun_set_fpr_bit (i);
6623 }
67928721 6624 }
6625
6626 if (!TARGET_64BIT)
6627 for (i = 2; i < 4; i++)
3072d30e 6628 if (df_regs_ever_live_p (i + 16) && !global_regs[i + 16])
67928721 6629 cfun_set_fpr_bit (i);
6630}
6631
4fed3f99 6632/* Fill cfun->machine with info about frame of current function. */
67928721 6633
6634static void
4fed3f99 6635s390_frame_info (void)
67928721 6636{
6637 int i;
6638
6639 cfun_frame_layout.frame_size = get_frame_size ();
67928721 6640 if (!TARGET_64BIT && cfun_frame_layout.frame_size > 0x7fff0000)
3284a242 6641 fatal_error ("total size of local variables exceeds architecture limit");
67928721 6642
646a946e 6643 if (!TARGET_PACKED_STACK)
67928721 6644 {
6645 cfun_frame_layout.backchain_offset = 0;
6646 cfun_frame_layout.f0_offset = 16 * UNITS_PER_WORD;
6647 cfun_frame_layout.f4_offset = cfun_frame_layout.f0_offset + 2 * 8;
6648 cfun_frame_layout.f8_offset = -cfun_frame_layout.high_fprs * 8;
5214e6ae 6649 cfun_frame_layout.gprs_offset = (cfun_frame_layout.first_save_gpr_slot
67928721 6650 * UNITS_PER_WORD);
6651 }
646a946e 6652 else if (TARGET_BACKCHAIN) /* kernel stack layout */
67928721 6653 {
6654 cfun_frame_layout.backchain_offset = (STACK_POINTER_OFFSET
6655 - UNITS_PER_WORD);
6656 cfun_frame_layout.gprs_offset
6657 = (cfun_frame_layout.backchain_offset
5214e6ae 6658 - (STACK_POINTER_REGNUM - cfun_frame_layout.first_save_gpr_slot + 1)
67928721 6659 * UNITS_PER_WORD);
6660
6661 if (TARGET_64BIT)
6662 {
6663 cfun_frame_layout.f4_offset
6664 = (cfun_frame_layout.gprs_offset
6665 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6666
6667 cfun_frame_layout.f0_offset
6668 = (cfun_frame_layout.f4_offset
6669 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6670 }
6671 else
6672 {
99e8a714 6673 /* On 31 bit we have to care about alignment of the
6674 floating point regs to provide fastest access. */
67928721 6675 cfun_frame_layout.f0_offset
99e8a714 6676 = ((cfun_frame_layout.gprs_offset
6677 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1))
67928721 6678 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6679
6680 cfun_frame_layout.f4_offset
6681 = (cfun_frame_layout.f0_offset
6682 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6683 }
6684 }
6685 else /* no backchain */
6686 {
6687 cfun_frame_layout.f4_offset
6688 = (STACK_POINTER_OFFSET
6689 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6690
6691 cfun_frame_layout.f0_offset
6692 = (cfun_frame_layout.f4_offset
6693 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6694
6695 cfun_frame_layout.gprs_offset
6696 = cfun_frame_layout.f0_offset - cfun_gprs_save_area_size;
6697 }
6698
6699 if (current_function_is_leaf
6700 && !TARGET_TPF_PROFILING
6701 && cfun_frame_layout.frame_size == 0
6702 && !cfun_save_high_fprs_p
6703 && !current_function_calls_alloca
6704 && !current_function_stdarg)
6705 return;
6706
646a946e 6707 if (!TARGET_PACKED_STACK)
119114cb 6708 cfun_frame_layout.frame_size += (STACK_POINTER_OFFSET
6709 + current_function_outgoing_args_size
67928721 6710 + cfun_frame_layout.high_fprs * 8);
6711 else
6712 {
e5c64bfc 6713 if (TARGET_BACKCHAIN)
6714 cfun_frame_layout.frame_size += UNITS_PER_WORD;
99e8a714 6715
6716 /* No alignment trouble here because f8-f15 are only saved under
6717 64 bit. */
67928721 6718 cfun_frame_layout.f8_offset = (MIN (MIN (cfun_frame_layout.f0_offset,
6719 cfun_frame_layout.f4_offset),
6720 cfun_frame_layout.gprs_offset)
6721 - cfun_frame_layout.high_fprs * 8);
6722
6723 cfun_frame_layout.frame_size += cfun_frame_layout.high_fprs * 8;
6724
6725 for (i = 0; i < 8; i++)
6726 if (cfun_fpr_bit_p (i))
6727 cfun_frame_layout.frame_size += 8;
6728
6729 cfun_frame_layout.frame_size += cfun_gprs_save_area_size;
99e8a714 6730
6731 /* If under 31 bit an odd number of gprs has to be saved we have to adjust
6732 the frame size to sustain 8 byte alignment of stack frames. */
67928721 6733 cfun_frame_layout.frame_size = ((cfun_frame_layout.frame_size +
6734 STACK_BOUNDARY / BITS_PER_UNIT - 1)
6735 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1));
6736
6737 cfun_frame_layout.frame_size += current_function_outgoing_args_size;
beee1f75 6738 }
8b4a4127 6739}
6740
4fed3f99 6741/* Generate frame layout. Fills in register and frame data for the current
6742 function in cfun->machine. This routine can be called multiple times;
6743 it will re-do the complete frame layout every time. */
8b4a4127 6744
4fed3f99 6745static void
6746s390_init_frame_layout (void)
4673c1a0 6747{
4fed3f99 6748 HOST_WIDE_INT frame_size;
6749 int base_used;
1e639cb0 6750 int clobbered_regs[16];
beee1f75 6751
4fed3f99 6752 /* On S/390 machines, we may need to perform branch splitting, which
6753 will require both base and return address register. We have no
6754 choice but to assume we're going to need them until right at the
6755 end of the machine dependent reorg phase. */
6756 if (!TARGET_CPU_ZARCH)
6757 cfun->machine->split_branches_pending_p = true;
6758
6759 do
6760 {
6761 frame_size = cfun_frame_layout.frame_size;
6762
6763 /* Try to predict whether we'll need the base register. */
6764 base_used = cfun->machine->split_branches_pending_p
6765 || current_function_uses_const_pool
3ea2a559 6766 || (!DISP_IN_RANGE (frame_size)
6767 && !CONST_OK_FOR_K (frame_size));
4fed3f99 6768
6769 /* Decide which register to use as literal pool base. In small
6770 leaf functions, try to use an unused call-clobbered register
6771 as base register to avoid save/restore overhead. */
6772 if (!base_used)
6773 cfun->machine->base_reg = NULL_RTX;
3072d30e 6774 else if (current_function_is_leaf && !df_regs_ever_live_p (5))
4fed3f99 6775 cfun->machine->base_reg = gen_rtx_REG (Pmode, 5);
6776 else
6777 cfun->machine->base_reg = gen_rtx_REG (Pmode, BASE_REGNUM);
67928721 6778
1e639cb0 6779 s390_register_info (clobbered_regs);
4fed3f99 6780 s390_frame_info ();
6781 }
6782 while (frame_size != cfun_frame_layout.frame_size);
4673c1a0 6783}
6784
4fed3f99 6785/* Update frame layout. Recompute actual register save data based on
6786 current info and update regs_ever_live for the special registers.
6787 May be called multiple times, but may never cause *more* registers
6788 to be saved than s390_init_frame_layout allocated room for. */
6789
6790static void
6791s390_update_frame_layout (void)
6792{
1e639cb0 6793 int clobbered_regs[16];
4fed3f99 6794
1e639cb0 6795 s390_register_info (clobbered_regs);
4fed3f99 6796
3072d30e 6797 df_set_regs_ever_live (BASE_REGNUM,
6798 clobbered_regs[BASE_REGNUM] ? true : false);
6799 df_set_regs_ever_live (RETURN_REGNUM,
6800 clobbered_regs[RETURN_REGNUM] ? true : false);
6801 df_set_regs_ever_live (STACK_POINTER_REGNUM,
6802 clobbered_regs[STACK_POINTER_REGNUM] ? true : false);
4fed3f99 6803
6804 if (cfun->machine->base_reg)
3072d30e 6805 df_set_regs_ever_live (REGNO (cfun->machine->base_reg), true);
4fed3f99 6806}
6807
8f1128bb 6808/* Return true if it is legal to put a value with MODE into REGNO. */
6809
6810bool
6811s390_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
6812{
6813 switch (REGNO_REG_CLASS (regno))
6814 {
6815 case FP_REGS:
6816 if (REGNO_PAIR_OK (regno, mode))
6817 {
6818 if (mode == SImode || mode == DImode)
6819 return true;
6820
6821 if (FLOAT_MODE_P (mode) && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
6822 return true;
6823 }
6824 break;
6825 case ADDR_REGS:
6826 if (FRAME_REGNO_P (regno) && mode == Pmode)
6827 return true;
6828
6829 /* fallthrough */
6830 case GENERAL_REGS:
6831 if (REGNO_PAIR_OK (regno, mode))
6832 {
6833 if (TARGET_64BIT
36868490 6834 || (mode != TFmode && mode != TCmode && mode != TDmode))
8f1128bb 6835 return true;
6836 }
6837 break;
6838 case CC_REGS:
6839 if (GET_MODE_CLASS (mode) == MODE_CC)
6840 return true;
6841 break;
6842 case ACCESS_REGS:
6843 if (REGNO_PAIR_OK (regno, mode))
6844 {
6845 if (mode == SImode || mode == Pmode)
6846 return true;
6847 }
6848 break;
6849 default:
6850 return false;
6851 }
6852
6853 return false;
6854}
6855
d1a5573e 6856/* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6857
6858bool
6859s390_hard_regno_rename_ok (unsigned int old_reg, unsigned int new_reg)
6860{
6861 /* Once we've decided upon a register to use as base register, it must
6862 no longer be used for any other purpose. */
6863 if (cfun->machine->base_reg)
6864 if (REGNO (cfun->machine->base_reg) == old_reg
6865 || REGNO (cfun->machine->base_reg) == new_reg)
6866 return false;
6867
6868 return true;
6869}
6870
8f1128bb 6871/* Maximum number of registers to represent a value of mode MODE
6872 in a register of class CLASS. */
6873
6874bool
6875s390_class_max_nregs (enum reg_class class, enum machine_mode mode)
6876{
6877 switch (class)
6878 {
6879 case FP_REGS:
6880 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6881 return 2 * ((GET_MODE_SIZE (mode) / 2 + 8 - 1) / 8);
6882 else
6883 return (GET_MODE_SIZE (mode) + 8 - 1) / 8;
6884 case ACCESS_REGS:
6885 return (GET_MODE_SIZE (mode) + 4 - 1) / 4;
6886 default:
6887 break;
6888 }
6889 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6890}
6891
4fed3f99 6892/* Return true if register FROM can be eliminated via register TO. */
6893
6894bool
6895s390_can_eliminate (int from, int to)
6896{
d1a5573e 6897 /* On zSeries machines, we have not marked the base register as fixed.
6898 Instead, we have an elimination rule BASE_REGNUM -> BASE_REGNUM.
6899 If a function requires the base register, we say here that this
6900 elimination cannot be performed. This will cause reload to free
6901 up the base register (as if it were fixed). On the other hand,
6902 if the current function does *not* require the base register, we
6903 say here the elimination succeeds, which in turn allows reload
6904 to allocate the base register for any other purpose. */
6905 if (from == BASE_REGNUM && to == BASE_REGNUM)
6906 {
6907 if (TARGET_CPU_ZARCH)
6908 {
6909 s390_init_frame_layout ();
6910 return cfun->machine->base_reg == NULL_RTX;
6911 }
6912
6913 return false;
6914 }
6915
6916 /* Everything else must point into the stack frame. */
4fed3f99 6917 gcc_assert (to == STACK_POINTER_REGNUM
6918 || to == HARD_FRAME_POINTER_REGNUM);
6919
6920 gcc_assert (from == FRAME_POINTER_REGNUM
6921 || from == ARG_POINTER_REGNUM
6922 || from == RETURN_ADDRESS_POINTER_REGNUM);
6923
6924 /* Make sure we actually saved the return address. */
6925 if (from == RETURN_ADDRESS_POINTER_REGNUM)
6926 if (!current_function_calls_eh_return
6927 && !current_function_stdarg
6928 && !cfun_frame_layout.save_return_addr_p)
6929 return false;
6930
6931 return true;
6932}
6933
6934/* Return offset between register FROM and TO initially after prolog. */
7cbfc974 6935
6936HOST_WIDE_INT
4fed3f99 6937s390_initial_elimination_offset (int from, int to)
7cbfc974 6938{
4fed3f99 6939 HOST_WIDE_INT offset;
6940 int index;
7cbfc974 6941
4fed3f99 6942 /* ??? Why are we called for non-eliminable pairs? */
6943 if (!s390_can_eliminate (from, to))
6944 return 0;
6945
6946 switch (from)
6947 {
6948 case FRAME_POINTER_REGNUM:
119114cb 6949 offset = (get_frame_size()
6950 + STACK_POINTER_OFFSET
6951 + current_function_outgoing_args_size);
4fed3f99 6952 break;
67928721 6953
4fed3f99 6954 case ARG_POINTER_REGNUM:
6955 s390_init_frame_layout ();
6956 offset = cfun_frame_layout.frame_size + STACK_POINTER_OFFSET;
6957 break;
6958
6959 case RETURN_ADDRESS_POINTER_REGNUM:
6960 s390_init_frame_layout ();
5214e6ae 6961 index = RETURN_REGNUM - cfun_frame_layout.first_save_gpr_slot;
4fed3f99 6962 gcc_assert (index >= 0);
6963 offset = cfun_frame_layout.frame_size + cfun_frame_layout.gprs_offset;
6964 offset += index * UNITS_PER_WORD;
6965 break;
6966
d1a5573e 6967 case BASE_REGNUM:
6968 offset = 0;
6969 break;
6970
4fed3f99 6971 default:
6972 gcc_unreachable ();
6973 }
6974
6975 return offset;
7cbfc974 6976}
6977
8b4a4127 6978/* Emit insn to save fpr REGNUM at offset OFFSET relative
f81e845f 6979 to register BASE. Return generated insn. */
56769981 6980
4673c1a0 6981static rtx
b40da9a7 6982save_fpr (rtx base, int offset, int regnum)
4673c1a0 6983{
8b4a4127 6984 rtx addr;
6985 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
ce1d5a67 6986
6987 if (regnum >= 16 && regnum <= (16 + FP_ARG_NUM_REG))
6988 set_mem_alias_set (addr, get_varargs_alias_set ());
6989 else
6990 set_mem_alias_set (addr, get_frame_alias_set ());
4673c1a0 6991
8b4a4127 6992 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
6993}
4673c1a0 6994
8b4a4127 6995/* Emit insn to restore fpr REGNUM from offset OFFSET relative
f81e845f 6996 to register BASE. Return generated insn. */
4673c1a0 6997
8b4a4127 6998static rtx
b40da9a7 6999restore_fpr (rtx base, int offset, int regnum)
8b4a4127 7000{
7001 rtx addr;
7002 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
ce1d5a67 7003 set_mem_alias_set (addr, get_frame_alias_set ());
4673c1a0 7004
8b4a4127 7005 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
4673c1a0 7006}
7007
9a2a66ae 7008/* Generate insn to save registers FIRST to LAST into
f81e845f 7009 the register save area located at offset OFFSET
9a2a66ae 7010 relative to register BASE. */
4673c1a0 7011
9a2a66ae 7012static rtx
b40da9a7 7013save_gprs (rtx base, int offset, int first, int last)
4673c1a0 7014{
9a2a66ae 7015 rtx addr, insn, note;
7016 int i;
7017
67928721 7018 addr = plus_constant (base, offset);
9a2a66ae 7019 addr = gen_rtx_MEM (Pmode, addr);
ce1d5a67 7020
7021 set_mem_alias_set (addr, get_frame_alias_set ());
9a2a66ae 7022
7023 /* Special-case single register. */
7024 if (first == last)
7025 {
7026 if (TARGET_64BIT)
7027 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
7028 else
7029 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
7030
7031 RTX_FRAME_RELATED_P (insn) = 1;
7032 return insn;
7033 }
7034
7035
7036 insn = gen_store_multiple (addr,
7037 gen_rtx_REG (Pmode, first),
7038 GEN_INT (last - first + 1));
7039
ce1d5a67 7040 if (first <= 6 && current_function_stdarg)
7041 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
7042 {
7043 rtx mem = XEXP (XVECEXP (PATTERN (insn), 0, i), 0);
7044
7045 if (first + i <= 6)
7046 set_mem_alias_set (mem, get_varargs_alias_set ());
7047 }
9a2a66ae 7048
7049 /* We need to set the FRAME_RELATED flag on all SETs
7050 inside the store-multiple pattern.
7051
7052 However, we must not emit DWARF records for registers 2..5
f81e845f 7053 if they are stored for use by variable arguments ...
9a2a66ae 7054
3ce7ff97 7055 ??? Unfortunately, it is not enough to simply not the
9a2a66ae 7056 FRAME_RELATED flags for those SETs, because the first SET
7057 of the PARALLEL is always treated as if it had the flag
7058 set, even if it does not. Therefore we emit a new pattern
7059 without those registers as REG_FRAME_RELATED_EXPR note. */
7060
7061 if (first >= 6)
7062 {
7063 rtx pat = PATTERN (insn);
7064
7065 for (i = 0; i < XVECLEN (pat, 0); i++)
7066 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
7067 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
7068
7069 RTX_FRAME_RELATED_P (insn) = 1;
7070 }
7071 else if (last >= 6)
7072 {
67928721 7073 addr = plus_constant (base, offset + (6 - first) * UNITS_PER_WORD);
f81e845f 7074 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
9a2a66ae 7075 gen_rtx_REG (Pmode, 6),
7076 GEN_INT (last - 6 + 1));
7077 note = PATTERN (note);
7078
7079 REG_NOTES (insn) =
f81e845f 7080 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
9a2a66ae 7081 note, REG_NOTES (insn));
7082
7083 for (i = 0; i < XVECLEN (note, 0); i++)
7084 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
7085 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
7086
7087 RTX_FRAME_RELATED_P (insn) = 1;
7088 }
7089
7090 return insn;
8b4a4127 7091}
4673c1a0 7092
9a2a66ae 7093/* Generate insn to restore registers FIRST to LAST from
f81e845f 7094 the register save area located at offset OFFSET
9a2a66ae 7095 relative to register BASE. */
4673c1a0 7096
9a2a66ae 7097static rtx
b40da9a7 7098restore_gprs (rtx base, int offset, int first, int last)
8b4a4127 7099{
9a2a66ae 7100 rtx addr, insn;
7101
67928721 7102 addr = plus_constant (base, offset);
9a2a66ae 7103 addr = gen_rtx_MEM (Pmode, addr);
ce1d5a67 7104 set_mem_alias_set (addr, get_frame_alias_set ());
9a2a66ae 7105
7106 /* Special-case single register. */
7107 if (first == last)
7108 {
7109 if (TARGET_64BIT)
7110 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
7111 else
7112 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
7113
7114 return insn;
7115 }
7116
7117 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
7118 addr,
7119 GEN_INT (last - first + 1));
7120 return insn;
8b4a4127 7121}
4673c1a0 7122
20074f87 7123/* Return insn sequence to load the GOT register. */
12ef3745 7124
7125static GTY(()) rtx got_symbol;
20074f87 7126rtx
7127s390_load_got (void)
12ef3745 7128{
20074f87 7129 rtx insns;
7130
12ef3745 7131 if (!got_symbol)
7132 {
7133 got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
7134 SYMBOL_REF_FLAGS (got_symbol) = SYMBOL_FLAG_LOCAL;
7135 }
7136
20074f87 7137 start_sequence ();
7138
dafc8d45 7139 if (TARGET_CPU_ZARCH)
12ef3745 7140 {
20074f87 7141 emit_move_insn (pic_offset_table_rtx, got_symbol);
12ef3745 7142 }
7143 else
7144 {
20074f87 7145 rtx offset;
12ef3745 7146
f81e845f 7147 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, got_symbol),
12ef3745 7148 UNSPEC_LTREL_OFFSET);
7149 offset = gen_rtx_CONST (Pmode, offset);
7150 offset = force_const_mem (Pmode, offset);
7151
20074f87 7152 emit_move_insn (pic_offset_table_rtx, offset);
12ef3745 7153
f81e845f 7154 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (offset, 0)),
12ef3745 7155 UNSPEC_LTREL_BASE);
7156 offset = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset);
7157
20074f87 7158 emit_move_insn (pic_offset_table_rtx, offset);
12ef3745 7159 }
20074f87 7160
7161 insns = get_insns ();
7162 end_sequence ();
7163 return insns;
12ef3745 7164}
7165
8b4a4127 7166/* Expand the prologue into a bunch of separate insns. */
4673c1a0 7167
8b4a4127 7168void
b40da9a7 7169s390_emit_prologue (void)
8b4a4127 7170{
8b4a4127 7171 rtx insn, addr;
7172 rtx temp_reg;
7bbebc45 7173 int i;
67928721 7174 int offset;
7175 int next_fpr = 0;
4673c1a0 7176
4fed3f99 7177 /* Complete frame layout. */
beee1f75 7178
4fed3f99 7179 s390_update_frame_layout ();
8b4a4127 7180
20074f87 7181 /* Annotate all constant pool references to let the scheduler know
7182 they implicitly use the base register. */
7183
7184 push_topmost_sequence ();
7185
7186 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
7187 if (INSN_P (insn))
3072d30e 7188 {
7189 annotate_constant_pool_refs (&PATTERN (insn));
7190 df_insn_rescan (insn);
7191 }
20074f87 7192
7193 pop_topmost_sequence ();
7194
f81e845f 7195 /* Choose best register to use for temp use within prologue.
7196 See below for why TPF must use the register 1. */
7197
1e639cb0 7198 if (!has_hard_reg_initial_val (Pmode, RETURN_REGNUM)
7199 && !current_function_is_leaf
7200 && !TARGET_TPF_PROFILING)
8b4a4127 7201 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4673c1a0 7202 else
8b4a4127 7203 temp_reg = gen_rtx_REG (Pmode, 1);
4673c1a0 7204
8b4a4127 7205 /* Save call saved gprs. */
67928721 7206 if (cfun_frame_layout.first_save_gpr != -1)
4ac7fd98 7207 {
7208 insn = save_gprs (stack_pointer_rtx,
5214e6ae 7209 cfun_frame_layout.gprs_offset +
7210 UNITS_PER_WORD * (cfun_frame_layout.first_save_gpr
7211 - cfun_frame_layout.first_save_gpr_slot),
4ac7fd98 7212 cfun_frame_layout.first_save_gpr,
7213 cfun_frame_layout.last_save_gpr);
7214 emit_insn (insn);
7215 }
8b4a4127 7216
c2c1332a 7217 /* Dummy insn to mark literal pool slot. */
f81e845f 7218
4fed3f99 7219 if (cfun->machine->base_reg)
7220 emit_insn (gen_main_pool (cfun->machine->base_reg));
f81e845f 7221
67928721 7222 offset = cfun_frame_layout.f0_offset;
8b4a4127 7223
67928721 7224 /* Save f0 and f2. */
7225 for (i = 0; i < 2; i++)
7226 {
7227 if (cfun_fpr_bit_p (i))
7228 {
7229 save_fpr (stack_pointer_rtx, offset, i + 16);
7230 offset += 8;
7231 }
646a946e 7232 else if (!TARGET_PACKED_STACK)
67928721 7233 offset += 8;
7234 }
4673c1a0 7235
67928721 7236 /* Save f4 and f6. */
7237 offset = cfun_frame_layout.f4_offset;
7238 for (i = 2; i < 4; i++)
7239 {
7240 if (cfun_fpr_bit_p (i))
8b4a4127 7241 {
67928721 7242 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
7243 offset += 8;
7244
7245 /* If f4 and f6 are call clobbered they are saved due to stdargs and
7246 therefore are not frame related. */
7247 if (!call_really_used_regs[i + 16])
7248 RTX_FRAME_RELATED_P (insn) = 1;
8b4a4127 7249 }
646a946e 7250 else if (!TARGET_PACKED_STACK)
67928721 7251 offset += 8;
7252 }
7253
646a946e 7254 if (TARGET_PACKED_STACK
67928721 7255 && cfun_save_high_fprs_p
7256 && cfun_frame_layout.f8_offset + cfun_frame_layout.high_fprs * 8 > 0)
7257 {
7258 offset = (cfun_frame_layout.f8_offset
7259 + (cfun_frame_layout.high_fprs - 1) * 8);
7260
7261 for (i = 15; i > 7 && offset >= 0; i--)
7262 if (cfun_fpr_bit_p (i))
7263 {
7264 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
7265
7266 RTX_FRAME_RELATED_P (insn) = 1;
7267 offset -= 8;
7268 }
7269 if (offset >= cfun_frame_layout.f8_offset)
7270 next_fpr = i + 16;
7271 }
7272
646a946e 7273 if (!TARGET_PACKED_STACK)
67928721 7274 next_fpr = cfun_save_high_fprs_p ? 31 : 0;
4673c1a0 7275
8b4a4127 7276 /* Decrement stack pointer. */
4673c1a0 7277
67928721 7278 if (cfun_frame_layout.frame_size > 0)
8b4a4127 7279 {
67928721 7280 rtx frame_off = GEN_INT (-cfun_frame_layout.frame_size);
4673c1a0 7281
cbb300e8 7282 if (s390_stack_size)
7283 {
00d233e6 7284 HOST_WIDE_INT stack_guard;
cbb300e8 7285
00d233e6 7286 if (s390_stack_guard)
7287 stack_guard = s390_stack_guard;
cbb300e8 7288 else
00d233e6 7289 {
7290 /* If no value for stack guard is provided the smallest power of 2
7291 larger than the current frame size is chosen. */
7292 stack_guard = 1;
7293 while (stack_guard < cfun_frame_layout.frame_size)
7294 stack_guard <<= 1;
7295 }
cbb300e8 7296
00d233e6 7297 if (cfun_frame_layout.frame_size >= s390_stack_size)
7298 {
7299 warning (0, "frame size of function %qs is "
7300 HOST_WIDE_INT_PRINT_DEC
7301 " bytes exceeding user provided stack limit of "
7302 HOST_WIDE_INT_PRINT_DEC " bytes. "
7303 "An unconditional trap is added.",
7304 current_function_name(), cfun_frame_layout.frame_size,
7305 s390_stack_size);
7306 emit_insn (gen_trap ());
7307 }
7308 else
7309 {
7310 HOST_WIDE_INT stack_check_mask = ((s390_stack_size - 1)
7311 & ~(stack_guard - 1));
7312 rtx t = gen_rtx_AND (Pmode, stack_pointer_rtx,
7313 GEN_INT (stack_check_mask));
7314 if (TARGET_64BIT)
7315 gen_cmpdi (t, const0_rtx);
7316 else
7317 gen_cmpsi (t, const0_rtx);
7318
7319 emit_insn (gen_conditional_trap (gen_rtx_EQ (CCmode,
7320 gen_rtx_REG (CCmode,
7321 CC_REGNUM),
7322 const0_rtx),
7323 const0_rtx));
7324 }
cbb300e8 7325 }
7326
7327 if (s390_warn_framesize > 0
7328 && cfun_frame_layout.frame_size >= s390_warn_framesize)
c3ceba8e 7329 warning (0, "frame size of %qs is " HOST_WIDE_INT_PRINT_DEC " bytes",
cbb300e8 7330 current_function_name (), cfun_frame_layout.frame_size);
7331
7332 if (s390_warn_dynamicstack_p && cfun->calls_alloca)
c3ceba8e 7333 warning (0, "%qs uses dynamic stack allocation", current_function_name ());
cbb300e8 7334
8b4a4127 7335 /* Save incoming stack pointer into temp reg. */
e5c64bfc 7336 if (TARGET_BACKCHAIN || next_fpr)
67928721 7337 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
f81e845f 7338
1fc184ee 7339 /* Subtract frame size from stack pointer. */
8b4a4127 7340
51aa1e9c 7341 if (DISP_IN_RANGE (INTVAL (frame_off)))
7342 {
f81e845f 7343 insn = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
67928721 7344 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
b40da9a7 7345 frame_off));
51aa1e9c 7346 insn = emit_insn (insn);
7347 }
7348 else
7349 {
cb888f33 7350 if (!CONST_OK_FOR_K (INTVAL (frame_off)))
51aa1e9c 7351 frame_off = force_const_mem (Pmode, frame_off);
7352
7353 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
20074f87 7354 annotate_constant_pool_refs (&PATTERN (insn));
51aa1e9c 7355 }
8b4a4127 7356
8b4a4127 7357 RTX_FRAME_RELATED_P (insn) = 1;
f81e845f 7358 REG_NOTES (insn) =
8b4a4127 7359 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7360 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
67928721 7361 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
7362 GEN_INT (-cfun_frame_layout.frame_size))),
8b4a4127 7363 REG_NOTES (insn));
7364
7365 /* Set backchain. */
f81e845f 7366
e5c64bfc 7367 if (TARGET_BACKCHAIN)
4673c1a0 7368 {
67928721 7369 if (cfun_frame_layout.backchain_offset)
7370 addr = gen_rtx_MEM (Pmode,
7371 plus_constant (stack_pointer_rtx,
7372 cfun_frame_layout.backchain_offset));
7373 else
7374 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
ce1d5a67 7375 set_mem_alias_set (addr, get_frame_alias_set ());
8b4a4127 7376 insn = emit_insn (gen_move_insn (addr, temp_reg));
4673c1a0 7377 }
90524d70 7378
7379 /* If we support asynchronous exceptions (e.g. for Java),
7380 we need to make sure the backchain pointer is set up
7381 before any possibly trapping memory access. */
7382
e5c64bfc 7383 if (TARGET_BACKCHAIN && flag_non_call_exceptions)
90524d70 7384 {
7385 addr = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
7386 emit_insn (gen_rtx_CLOBBER (VOIDmode, addr));
7387 }
8b4a4127 7388 }
4673c1a0 7389
8b4a4127 7390 /* Save fprs 8 - 15 (64 bit ABI). */
f81e845f 7391
67928721 7392 if (cfun_save_high_fprs_p && next_fpr)
8b4a4127 7393 {
67928721 7394 insn = emit_insn (gen_add2_insn (temp_reg,
7395 GEN_INT (cfun_frame_layout.f8_offset)));
7396
7397 offset = 0;
4673c1a0 7398
67928721 7399 for (i = 24; i <= next_fpr; i++)
7400 if (cfun_fpr_bit_p (i - 16))
8b4a4127 7401 {
f81e845f 7402 rtx addr = plus_constant (stack_pointer_rtx,
67928721 7403 cfun_frame_layout.frame_size
7404 + cfun_frame_layout.f8_offset
7405 + offset);
7406
7407 insn = save_fpr (temp_reg, offset, i);
7408 offset += 8;
8b4a4127 7409 RTX_FRAME_RELATED_P (insn) = 1;
f81e845f 7410 REG_NOTES (insn) =
8b4a4127 7411 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
67928721 7412 gen_rtx_SET (VOIDmode,
7413 gen_rtx_MEM (DFmode, addr),
7414 gen_rtx_REG (DFmode, i)),
7415 REG_NOTES (insn));
8b4a4127 7416 }
7417 }
f81e845f 7418
8b4a4127 7419 /* Set frame pointer, if needed. */
f81e845f 7420
5a5e802f 7421 if (frame_pointer_needed)
8b4a4127 7422 {
7423 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
7424 RTX_FRAME_RELATED_P (insn) = 1;
7425 }
4673c1a0 7426
8b4a4127 7427 /* Set up got pointer, if needed. */
f81e845f 7428
3072d30e 7429 if (flag_pic && df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM))
20074f87 7430 {
7431 rtx insns = s390_load_got ();
7432
7433 for (insn = insns; insn; insn = NEXT_INSN (insn))
3072d30e 7434 annotate_constant_pool_refs (&PATTERN (insn));
20074f87 7435
7436 emit_insn (insns);
7437 }
f81e845f 7438
de253666 7439 if (TARGET_TPF_PROFILING)
f81e845f 7440 {
7441 /* Generate a BAS instruction to serve as a function
7442 entry intercept to facilitate the use of tracing
346fecd5 7443 algorithms located at the branch target. */
7444 emit_insn (gen_prologue_tpf ());
f81e845f 7445
7446 /* Emit a blockage here so that all code
7447 lies between the profiling mechanisms. */
7448 emit_insn (gen_blockage ());
7449 }
8b4a4127 7450}
4673c1a0 7451
d2833c15 7452/* Expand the epilogue into a bunch of separate insns. */
4673c1a0 7453
8b4a4127 7454void
7346ca58 7455s390_emit_epilogue (bool sibcall)
8b4a4127 7456{
8b4a4127 7457 rtx frame_pointer, return_reg;
abd8f04d 7458 int area_bottom, area_top, offset = 0;
67928721 7459 int next_offset;
8b4a4127 7460 rtvec p;
78c2b526 7461 int i;
4673c1a0 7462
de253666 7463 if (TARGET_TPF_PROFILING)
f81e845f 7464 {
7465
7466 /* Generate a BAS instruction to serve as a function
7467 entry intercept to facilitate the use of tracing
346fecd5 7468 algorithms located at the branch target. */
f81e845f 7469
f81e845f 7470 /* Emit a blockage here so that all code
7471 lies between the profiling mechanisms. */
7472 emit_insn (gen_blockage ());
7473
346fecd5 7474 emit_insn (gen_epilogue_tpf ());
f81e845f 7475 }
7476
8b4a4127 7477 /* Check whether to use frame or stack pointer for restore. */
4673c1a0 7478
67928721 7479 frame_pointer = (frame_pointer_needed
7480 ? hard_frame_pointer_rtx : stack_pointer_rtx);
4673c1a0 7481
67928721 7482 s390_frame_area (&area_bottom, &area_top);
4673c1a0 7483
f81e845f 7484 /* Check whether we can access the register save area.
8b4a4127 7485 If not, increment the frame pointer as required. */
4673c1a0 7486
8b4a4127 7487 if (area_top <= area_bottom)
7488 {
7489 /* Nothing to restore. */
7490 }
67928721 7491 else if (DISP_IN_RANGE (cfun_frame_layout.frame_size + area_bottom)
7492 && DISP_IN_RANGE (cfun_frame_layout.frame_size + area_top - 1))
8b4a4127 7493 {
7494 /* Area is in range. */
67928721 7495 offset = cfun_frame_layout.frame_size;
8b4a4127 7496 }
7497 else
7498 {
7499 rtx insn, frame_off;
4673c1a0 7500
f81e845f 7501 offset = area_bottom < 0 ? -area_bottom : 0;
67928721 7502 frame_off = GEN_INT (cfun_frame_layout.frame_size - offset);
4673c1a0 7503
51aa1e9c 7504 if (DISP_IN_RANGE (INTVAL (frame_off)))
7505 {
f81e845f 7506 insn = gen_rtx_SET (VOIDmode, frame_pointer,
51aa1e9c 7507 gen_rtx_PLUS (Pmode, frame_pointer, frame_off));
7508 insn = emit_insn (insn);
7509 }
7510 else
7511 {
cb888f33 7512 if (!CONST_OK_FOR_K (INTVAL (frame_off)))
51aa1e9c 7513 frame_off = force_const_mem (Pmode, frame_off);
4673c1a0 7514
51aa1e9c 7515 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
20074f87 7516 annotate_constant_pool_refs (&PATTERN (insn));
51aa1e9c 7517 }
8b4a4127 7518 }
4673c1a0 7519
8b4a4127 7520 /* Restore call saved fprs. */
7521
7522 if (TARGET_64BIT)
4673c1a0 7523 {
67928721 7524 if (cfun_save_high_fprs_p)
7525 {
7526 next_offset = cfun_frame_layout.f8_offset;
7527 for (i = 24; i < 32; i++)
7528 {
7529 if (cfun_fpr_bit_p (i - 16))
7530 {
7531 restore_fpr (frame_pointer,
7532 offset + next_offset, i);
7533 next_offset += 8;
7534 }
7535 }
7536 }
7537
4673c1a0 7538 }
7539 else
7540 {
67928721 7541 next_offset = cfun_frame_layout.f4_offset;
78c2b526 7542 for (i = 18; i < 20; i++)
67928721 7543 {
7544 if (cfun_fpr_bit_p (i - 16))
7545 {
7546 restore_fpr (frame_pointer,
7547 offset + next_offset, i);
7548 next_offset += 8;
7549 }
646a946e 7550 else if (!TARGET_PACKED_STACK)
67928721 7551 next_offset += 8;
7552 }
7553
8b4a4127 7554 }
4673c1a0 7555
8b4a4127 7556 /* Return register. */
7557
f81e845f 7558 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
8b4a4127 7559
7560 /* Restore call saved gprs. */
7561
67928721 7562 if (cfun_frame_layout.first_restore_gpr != -1)
8b4a4127 7563 {
9a2a66ae 7564 rtx insn, addr;
43935856 7565 int i;
7566
f81e845f 7567 /* Check for global register and save them
43935856 7568 to stack location from where they get restored. */
7569
67928721 7570 for (i = cfun_frame_layout.first_restore_gpr;
7571 i <= cfun_frame_layout.last_restore_gpr;
43935856 7572 i++)
7573 {
f81e845f 7574 /* These registers are special and need to be
ab60b1c9 7575 restored in any case. */
f81e845f 7576 if (i == STACK_POINTER_REGNUM
ab60b1c9 7577 || i == RETURN_REGNUM
a0f191f4 7578 || i == BASE_REGNUM
abd8f04d 7579 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
ab60b1c9 7580 continue;
7581
43935856 7582 if (global_regs[i])
7583 {
f81e845f 7584 addr = plus_constant (frame_pointer,
67928721 7585 offset + cfun_frame_layout.gprs_offset
5214e6ae 7586 + (i - cfun_frame_layout.first_save_gpr_slot)
67928721 7587 * UNITS_PER_WORD);
43935856 7588 addr = gen_rtx_MEM (Pmode, addr);
ce1d5a67 7589 set_mem_alias_set (addr, get_frame_alias_set ());
43935856 7590 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
f81e845f 7591 }
43935856 7592 }
8b4a4127 7593
7346ca58 7594 if (! sibcall)
4673c1a0 7595 {
7346ca58 7596 /* Fetch return address from stack before load multiple,
7597 this will do good for scheduling. */
f588eb9f 7598
67928721 7599 if (cfun_frame_layout.save_return_addr_p
7600 || (cfun_frame_layout.first_restore_gpr < BASE_REGNUM
7601 && cfun_frame_layout.last_restore_gpr > RETURN_REGNUM))
7346ca58 7602 {
7603 int return_regnum = find_unused_clobbered_reg();
7604 if (!return_regnum)
7605 return_regnum = 4;
7606 return_reg = gen_rtx_REG (Pmode, return_regnum);
f588eb9f 7607
7346ca58 7608 addr = plus_constant (frame_pointer,
67928721 7609 offset + cfun_frame_layout.gprs_offset
7610 + (RETURN_REGNUM
5214e6ae 7611 - cfun_frame_layout.first_save_gpr_slot)
67928721 7612 * UNITS_PER_WORD);
7346ca58 7613 addr = gen_rtx_MEM (Pmode, addr);
ce1d5a67 7614 set_mem_alias_set (addr, get_frame_alias_set ());
7346ca58 7615 emit_move_insn (return_reg, addr);
7616 }
4673c1a0 7617 }
8b4a4127 7618
67928721 7619 insn = restore_gprs (frame_pointer,
7620 offset + cfun_frame_layout.gprs_offset
7621 + (cfun_frame_layout.first_restore_gpr
5214e6ae 7622 - cfun_frame_layout.first_save_gpr_slot)
67928721 7623 * UNITS_PER_WORD,
7624 cfun_frame_layout.first_restore_gpr,
7625 cfun_frame_layout.last_restore_gpr);
9a2a66ae 7626 emit_insn (insn);
8b4a4127 7627 }
4673c1a0 7628
7346ca58 7629 if (! sibcall)
7630 {
f81e845f 7631
7346ca58 7632 /* Return to caller. */
f588eb9f 7633
7346ca58 7634 p = rtvec_alloc (2);
f588eb9f 7635
7346ca58 7636 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
7637 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
7638 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
7639 }
4673c1a0 7640}
7641
4673c1a0 7642
f81e845f 7643/* Return the size in bytes of a function argument of
56769981 7644 type TYPE and/or mode MODE. At least one of TYPE or
7645 MODE must be specified. */
4673c1a0 7646
7647static int
b40da9a7 7648s390_function_arg_size (enum machine_mode mode, tree type)
4673c1a0 7649{
7650 if (type)
7651 return int_size_in_bytes (type);
7652
0c034860 7653 /* No type info available for some library calls ... */
4673c1a0 7654 if (mode != BLKmode)
7655 return GET_MODE_SIZE (mode);
7656
7657 /* If we have neither type nor mode, abort */
32eda510 7658 gcc_unreachable ();
4673c1a0 7659}
7660
59652f3f 7661/* Return true if a function argument of type TYPE and mode MODE
7662 is to be passed in a floating-point register, if available. */
7663
7664static bool
b40da9a7 7665s390_function_arg_float (enum machine_mode mode, tree type)
59652f3f 7666{
201e502c 7667 int size = s390_function_arg_size (mode, type);
7668 if (size > 8)
7669 return false;
7670
59652f3f 7671 /* Soft-float changes the ABI: no floating-point registers are used. */
7672 if (TARGET_SOFT_FLOAT)
7673 return false;
7674
7675 /* No type info available for some library calls ... */
7676 if (!type)
36868490 7677 return mode == SFmode || mode == DFmode || mode == SDmode || mode == DDmode;
59652f3f 7678
7679 /* The ABI says that record types with a single member are treated
7680 just like that member would be. */
7681 while (TREE_CODE (type) == RECORD_TYPE)
7682 {
7683 tree field, single = NULL_TREE;
7684
7685 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
7686 {
7687 if (TREE_CODE (field) != FIELD_DECL)
7688 continue;
7689
7690 if (single == NULL_TREE)
7691 single = TREE_TYPE (field);
7692 else
7693 return false;
7694 }
7695
7696 if (single == NULL_TREE)
7697 return false;
7698 else
7699 type = single;
7700 }
7701
7702 return TREE_CODE (type) == REAL_TYPE;
7703}
7704
201e502c 7705/* Return true if a function argument of type TYPE and mode MODE
7706 is to be passed in an integer register, or a pair of integer
7707 registers, if available. */
7708
7709static bool
7710s390_function_arg_integer (enum machine_mode mode, tree type)
7711{
7712 int size = s390_function_arg_size (mode, type);
7713 if (size > 8)
7714 return false;
7715
7716 /* No type info available for some library calls ... */
7717 if (!type)
7718 return GET_MODE_CLASS (mode) == MODE_INT
36868490 7719 || (TARGET_SOFT_FLOAT && SCALAR_FLOAT_MODE_P (mode));
201e502c 7720
7721 /* We accept small integral (and similar) types. */
7722 if (INTEGRAL_TYPE_P (type)
f588eb9f 7723 || POINTER_TYPE_P (type)
201e502c 7724 || TREE_CODE (type) == OFFSET_TYPE
7725 || (TARGET_SOFT_FLOAT && TREE_CODE (type) == REAL_TYPE))
7726 return true;
7727
7728 /* We also accept structs of size 1, 2, 4, 8 that are not
f588eb9f 7729 passed in floating-point registers. */
201e502c 7730 if (AGGREGATE_TYPE_P (type)
7731 && exact_log2 (size) >= 0
7732 && !s390_function_arg_float (mode, type))
7733 return true;
7734
7735 return false;
7736}
7737
56769981 7738/* Return 1 if a function argument of type TYPE and mode MODE
7739 is to be passed by reference. The ABI specifies that only
7740 structures of size 1, 2, 4, or 8 bytes are passed by value,
7741 all other structures (and complex numbers) are passed by
7742 reference. */
7743
b981d932 7744static bool
7745s390_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
7746 enum machine_mode mode, tree type,
7747 bool named ATTRIBUTE_UNUSED)
4673c1a0 7748{
7749 int size = s390_function_arg_size (mode, type);
201e502c 7750 if (size > 8)
7751 return true;
4673c1a0 7752
7753 if (type)
7754 {
201e502c 7755 if (AGGREGATE_TYPE_P (type) && exact_log2 (size) < 0)
4673c1a0 7756 return 1;
7757
201e502c 7758 if (TREE_CODE (type) == COMPLEX_TYPE
7759 || TREE_CODE (type) == VECTOR_TYPE)
4673c1a0 7760 return 1;
7761 }
f81e845f 7762
4673c1a0 7763 return 0;
4673c1a0 7764}
7765
7766/* Update the data in CUM to advance over an argument of mode MODE and
7767 data type TYPE. (TYPE is null for libcalls where that information
56769981 7768 may not be available.). The boolean NAMED specifies whether the
7769 argument is a named argument (as opposed to an unnamed argument
7770 matching an ellipsis). */
4673c1a0 7771
7772void
b40da9a7 7773s390_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
7774 tree type, int named ATTRIBUTE_UNUSED)
4673c1a0 7775{
b981d932 7776 if (s390_function_arg_float (mode, type))
4673c1a0 7777 {
59652f3f 7778 cum->fprs += 1;
4673c1a0 7779 }
201e502c 7780 else if (s390_function_arg_integer (mode, type))
4673c1a0 7781 {
7782 int size = s390_function_arg_size (mode, type);
7783 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
7784 }
201e502c 7785 else
32eda510 7786 gcc_unreachable ();
4673c1a0 7787}
7788
56769981 7789/* Define where to put the arguments to a function.
7790 Value is zero to push the argument on the stack,
7791 or a hard register in which to store the argument.
7792
7793 MODE is the argument's machine mode.
7794 TYPE is the data type of the argument (as a tree).
7795 This is null for libcalls where that information may
7796 not be available.
7797 CUM is a variable of type CUMULATIVE_ARGS which gives info about
7798 the preceding args and about the function being called.
7799 NAMED is nonzero if this argument is a named parameter
f81e845f 7800 (otherwise it is an extra parameter matching an ellipsis).
56769981 7801
7802 On S/390, we use general purpose registers 2 through 6 to
7803 pass integer, pointer, and certain structure arguments, and
7804 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
7805 to pass floating point arguments. All remaining arguments
7806 are pushed to the stack. */
4673c1a0 7807
7808rtx
b40da9a7 7809s390_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
7810 int named ATTRIBUTE_UNUSED)
4673c1a0 7811{
59652f3f 7812 if (s390_function_arg_float (mode, type))
4673c1a0 7813 {
6902d973 7814 if (cum->fprs + 1 > FP_ARG_NUM_REG)
4673c1a0 7815 return 0;
7816 else
1a83b3ff 7817 return gen_rtx_REG (mode, cum->fprs + 16);
4673c1a0 7818 }
201e502c 7819 else if (s390_function_arg_integer (mode, type))
4673c1a0 7820 {
7821 int size = s390_function_arg_size (mode, type);
7822 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
7823
6902d973 7824 if (cum->gprs + n_gprs > GP_ARG_NUM_REG)
4673c1a0 7825 return 0;
7826 else
1a83b3ff 7827 return gen_rtx_REG (mode, cum->gprs + 2);
4673c1a0 7828 }
201e502c 7829
7830 /* After the real arguments, expand_call calls us once again
7831 with a void_type_node type. Whatever we return here is
7832 passed as operand 2 to the call expanders.
7833
7834 We don't need this feature ... */
7835 else if (type == void_type_node)
7836 return const0_rtx;
7837
32eda510 7838 gcc_unreachable ();
201e502c 7839}
7840
7841/* Return true if return values of type TYPE should be returned
7842 in a memory buffer whose address is passed by the caller as
7843 hidden first argument. */
7844
7845static bool
7846s390_return_in_memory (tree type, tree fundecl ATTRIBUTE_UNUSED)
7847{
7848 /* We accept small integral (and similar) types. */
7849 if (INTEGRAL_TYPE_P (type)
f588eb9f 7850 || POINTER_TYPE_P (type)
201e502c 7851 || TREE_CODE (type) == OFFSET_TYPE
7852 || TREE_CODE (type) == REAL_TYPE)
7853 return int_size_in_bytes (type) > 8;
7854
7855 /* Aggregates and similar constructs are always returned
7856 in memory. */
7857 if (AGGREGATE_TYPE_P (type)
7858 || TREE_CODE (type) == COMPLEX_TYPE
7859 || TREE_CODE (type) == VECTOR_TYPE)
7860 return true;
7861
7862 /* ??? We get called on all sorts of random stuff from
7863 aggregate_value_p. We can't abort, but it's not clear
7864 what's safe to return. Pretend it's a struct I guess. */
7865 return true;
7866}
7867
7868/* Define where to return a (scalar) value of type TYPE.
7869 If TYPE is null, define where to return a (scalar)
7870 value of mode MODE from a libcall. */
7871
7872rtx
7873s390_function_value (tree type, enum machine_mode mode)
7874{
7875 if (type)
7876 {
78a8ed03 7877 int unsignedp = TYPE_UNSIGNED (type);
201e502c 7878 mode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
7879 }
7880
36868490 7881 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT || SCALAR_FLOAT_MODE_P (mode));
32eda510 7882 gcc_assert (GET_MODE_SIZE (mode) <= 8);
201e502c 7883
36868490 7884 if (TARGET_HARD_FLOAT && SCALAR_FLOAT_MODE_P (mode))
201e502c 7885 return gen_rtx_REG (mode, 16);
7886 else
7887 return gen_rtx_REG (mode, 2);
4673c1a0 7888}
7889
7890
56769981 7891/* Create and return the va_list datatype.
7892
7893 On S/390, va_list is an array type equivalent to
7894
7895 typedef struct __va_list_tag
7896 {
7897 long __gpr;
7898 long __fpr;
7899 void *__overflow_arg_area;
7900 void *__reg_save_area;
56769981 7901 } va_list[1];
7902
7903 where __gpr and __fpr hold the number of general purpose
7904 or floating point arguments used up to now, respectively,
f81e845f 7905 __overflow_arg_area points to the stack location of the
56769981 7906 next argument passed on the stack, and __reg_save_area
7907 always points to the start of the register area in the
7908 call frame of the current function. The function prologue
7909 saves all registers used for argument passing into this
7910 area if the function uses variable arguments. */
4673c1a0 7911
2e15d750 7912static tree
7913s390_build_builtin_va_list (void)
4673c1a0 7914{
7915 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
7916
5ebb663d 7917 record = lang_hooks.types.make_type (RECORD_TYPE);
4673c1a0 7918
7919 type_decl =
7920 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
7921
f81e845f 7922 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
4673c1a0 7923 long_integer_type_node);
f81e845f 7924 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
4673c1a0 7925 long_integer_type_node);
7926 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
7927 ptr_type_node);
7928 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
7929 ptr_type_node);
7930
6902d973 7931 va_list_gpr_counter_field = f_gpr;
7932 va_list_fpr_counter_field = f_fpr;
7933
4673c1a0 7934 DECL_FIELD_CONTEXT (f_gpr) = record;
7935 DECL_FIELD_CONTEXT (f_fpr) = record;
7936 DECL_FIELD_CONTEXT (f_ovf) = record;
7937 DECL_FIELD_CONTEXT (f_sav) = record;
7938
7939 TREE_CHAIN (record) = type_decl;
7940 TYPE_NAME (record) = type_decl;
7941 TYPE_FIELDS (record) = f_gpr;
7942 TREE_CHAIN (f_gpr) = f_fpr;
7943 TREE_CHAIN (f_fpr) = f_ovf;
7944 TREE_CHAIN (f_ovf) = f_sav;
7945
7946 layout_type (record);
7947
7948 /* The correct type is an array type of one element. */
7949 return build_array_type (record, build_index_type (size_zero_node));
7950}
7951
56769981 7952/* Implement va_start by filling the va_list structure VALIST.
7ccc713a 7953 STDARG_P is always true, and ignored.
7954 NEXTARG points to the first anonymous stack argument.
56769981 7955
8ef587dc 7956 The following global variables are used to initialize
56769981 7957 the va_list structure:
7958
7959 current_function_args_info:
7960 holds number of gprs and fprs used for named arguments.
7961 current_function_arg_offset_rtx:
7962 holds the offset of the first anonymous stack argument
7963 (relative to the virtual arg pointer). */
4673c1a0 7964
7965void
b40da9a7 7966s390_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
4673c1a0 7967{
7968 HOST_WIDE_INT n_gpr, n_fpr;
7969 int off;
7970 tree f_gpr, f_fpr, f_ovf, f_sav;
7971 tree gpr, fpr, ovf, sav, t;
7972
7973 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
7974 f_fpr = TREE_CHAIN (f_gpr);
7975 f_ovf = TREE_CHAIN (f_fpr);
7976 f_sav = TREE_CHAIN (f_ovf);
7977
bfe827ee 7978 valist = build_va_arg_indirect_ref (valist);
ed03eadb 7979 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
7980 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
7981 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
7982 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
4673c1a0 7983
7984 /* Count number of gp and fp argument registers used. */
7985
7986 n_gpr = current_function_args_info.gprs;
7987 n_fpr = current_function_args_info.fprs;
7988
6902d973 7989 if (cfun->va_list_gpr_size)
7990 {
35cc02b5 7991 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gpr), gpr,
ed03eadb 7992 build_int_cst (NULL_TREE, n_gpr));
6902d973 7993 TREE_SIDE_EFFECTS (t) = 1;
7994 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7995 }
4673c1a0 7996
6902d973 7997 if (cfun->va_list_fpr_size)
7998 {
35cc02b5 7999 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (fpr), fpr,
ed03eadb 8000 build_int_cst (NULL_TREE, n_fpr));
6902d973 8001 TREE_SIDE_EFFECTS (t) = 1;
8002 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8003 }
4673c1a0 8004
8005 /* Find the overflow area. */
6902d973 8006 if (n_gpr + cfun->va_list_gpr_size > GP_ARG_NUM_REG
8007 || n_fpr + cfun->va_list_fpr_size > FP_ARG_NUM_REG)
8008 {
8009 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
4673c1a0 8010
6902d973 8011 off = INTVAL (current_function_arg_offset_rtx);
8012 off = off < 0 ? 0 : off;
8013 if (TARGET_DEBUG_ARG)
8014 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
8015 (int)n_gpr, (int)n_fpr, off);
4673c1a0 8016
0de36bdb 8017 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t, size_int (off));
4673c1a0 8018
35cc02b5 8019 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
6902d973 8020 TREE_SIDE_EFFECTS (t) = 1;
8021 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8022 }
4673c1a0 8023
8024 /* Find the register save area. */
6902d973 8025 if ((cfun->va_list_gpr_size && n_gpr < GP_ARG_NUM_REG)
8026 || (cfun->va_list_fpr_size && n_fpr < FP_ARG_NUM_REG))
8027 {
8028 t = make_tree (TREE_TYPE (sav), return_address_pointer_rtx);
0de36bdb 8029 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
8030 size_int (-RETURN_REGNUM * UNITS_PER_WORD));
5724df29 8031
35cc02b5 8032 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (sav), sav, t);
6902d973 8033 TREE_SIDE_EFFECTS (t) = 1;
8034 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8035 }
4673c1a0 8036}
8037
f81e845f 8038/* Implement va_arg by updating the va_list structure
56769981 8039 VALIST as required to retrieve an argument of type
f81e845f 8040 TYPE, and returning that argument.
8041
56769981 8042 Generates code equivalent to:
f81e845f 8043
4673c1a0 8044 if (integral value) {
8045 if (size <= 4 && args.gpr < 5 ||
f81e845f 8046 size > 4 && args.gpr < 4 )
4673c1a0 8047 ret = args.reg_save_area[args.gpr+8]
8048 else
8049 ret = *args.overflow_arg_area++;
8050 } else if (float value) {
8051 if (args.fgpr < 2)
8052 ret = args.reg_save_area[args.fpr+64]
8053 else
8054 ret = *args.overflow_arg_area++;
8055 } else if (aggregate value) {
8056 if (args.gpr < 5)
8057 ret = *args.reg_save_area[args.gpr]
8058 else
8059 ret = **args.overflow_arg_area++;
8060 } */
8061
875862bf 8062static tree
d93e0d9f 8063s390_gimplify_va_arg (tree valist, tree type, tree *pre_p,
8064 tree *post_p ATTRIBUTE_UNUSED)
4673c1a0 8065{
8066 tree f_gpr, f_fpr, f_ovf, f_sav;
8067 tree gpr, fpr, ovf, sav, reg, t, u;
8068 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
d93e0d9f 8069 tree lab_false, lab_over, addr;
4673c1a0 8070
8071 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
8072 f_fpr = TREE_CHAIN (f_gpr);
8073 f_ovf = TREE_CHAIN (f_fpr);
8074 f_sav = TREE_CHAIN (f_ovf);
8075
bfe827ee 8076 valist = build_va_arg_indirect_ref (valist);
ed03eadb 8077 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
8078 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
8079 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
8080 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
4673c1a0 8081
8082 size = int_size_in_bytes (type);
8083
b981d932 8084 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
4673c1a0 8085 {
8086 if (TARGET_DEBUG_ARG)
8087 {
8088 fprintf (stderr, "va_arg: aggregate type");
8089 debug_tree (type);
8090 }
8091
8092 /* Aggregates are passed by reference. */
8093 indirect_p = 1;
8094 reg = gpr;
8095 n_reg = 1;
99e8a714 8096
646a946e 8097 /* kernel stack layout on 31 bit: It is assumed here that no padding
99e8a714 8098 will be added by s390_frame_info because for va_args always an even
8099 number of gprs has to be saved r15-r2 = 14 regs. */
5724df29 8100 sav_ofs = 2 * UNITS_PER_WORD;
4673c1a0 8101 sav_scale = UNITS_PER_WORD;
8102 size = UNITS_PER_WORD;
6902d973 8103 max_reg = GP_ARG_NUM_REG - n_reg;
4673c1a0 8104 }
59652f3f 8105 else if (s390_function_arg_float (TYPE_MODE (type), type))
4673c1a0 8106 {
8107 if (TARGET_DEBUG_ARG)
8108 {
8109 fprintf (stderr, "va_arg: float type");
8110 debug_tree (type);
8111 }
8112
8113 /* FP args go in FP registers, if present. */
8114 indirect_p = 0;
8115 reg = fpr;
8116 n_reg = 1;
5724df29 8117 sav_ofs = 16 * UNITS_PER_WORD;
4673c1a0 8118 sav_scale = 8;
6902d973 8119 max_reg = FP_ARG_NUM_REG - n_reg;
4673c1a0 8120 }
8121 else
8122 {
8123 if (TARGET_DEBUG_ARG)
8124 {
8125 fprintf (stderr, "va_arg: other type");
8126 debug_tree (type);
8127 }
8128
8129 /* Otherwise into GP registers. */
8130 indirect_p = 0;
8131 reg = gpr;
8132 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
99e8a714 8133
646a946e 8134 /* kernel stack layout on 31 bit: It is assumed here that no padding
8135 will be added by s390_frame_info because for va_args always an even
8136 number of gprs has to be saved r15-r2 = 14 regs. */
5724df29 8137 sav_ofs = 2 * UNITS_PER_WORD;
f81e845f 8138
c87fa2c9 8139 if (size < UNITS_PER_WORD)
8140 sav_ofs += UNITS_PER_WORD - size;
4673c1a0 8141
8142 sav_scale = UNITS_PER_WORD;
6902d973 8143 max_reg = GP_ARG_NUM_REG - n_reg;
4673c1a0 8144 }
8145
8146 /* Pull the value out of the saved registers ... */
8147
d93e0d9f 8148 lab_false = create_artificial_label ();
8149 lab_over = create_artificial_label ();
8150 addr = create_tmp_var (ptr_type_node, "addr");
ce1d5a67 8151 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
4673c1a0 8152
3c2ee2c9 8153 t = fold_convert (TREE_TYPE (reg), size_int (max_reg));
d93e0d9f 8154 t = build2 (GT_EXPR, boolean_type_node, reg, t);
8155 u = build1 (GOTO_EXPR, void_type_node, lab_false);
8156 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
8157 gimplify_and_add (t, pre_p);
4673c1a0 8158
0de36bdb 8159 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav,
8160 size_int (sav_ofs));
3c2ee2c9 8161 u = build2 (MULT_EXPR, TREE_TYPE (reg), reg,
8162 fold_convert (TREE_TYPE (reg), size_int (sav_scale)));
0de36bdb 8163 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, fold_convert (sizetype, u));
4673c1a0 8164
35cc02b5 8165 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
d93e0d9f 8166 gimplify_and_add (t, pre_p);
4673c1a0 8167
d93e0d9f 8168 t = build1 (GOTO_EXPR, void_type_node, lab_over);
8169 gimplify_and_add (t, pre_p);
4673c1a0 8170
d93e0d9f 8171 t = build1 (LABEL_EXPR, void_type_node, lab_false);
8172 append_to_statement_list (t, pre_p);
4673c1a0 8173
4673c1a0 8174
8175 /* ... Otherwise out of the overflow area. */
8176
875862bf 8177 t = ovf;
8178 if (size < UNITS_PER_WORD)
0de36bdb 8179 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
8180 size_int (UNITS_PER_WORD - size));
875862bf 8181
8182 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
8183
35cc02b5 8184 u = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
875862bf 8185 gimplify_and_add (u, pre_p);
8186
0de36bdb 8187 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
8188 size_int (size));
35cc02b5 8189 t = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, ovf, t);
875862bf 8190 gimplify_and_add (t, pre_p);
8191
8192 t = build1 (LABEL_EXPR, void_type_node, lab_over);
8193 append_to_statement_list (t, pre_p);
8194
8195
8196 /* Increment register save count. */
8197
8198 u = build2 (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
8199 fold_convert (TREE_TYPE (reg), size_int (n_reg)));
8200 gimplify_and_add (u, pre_p);
8201
8202 if (indirect_p)
8203 {
8204 t = build_pointer_type (build_pointer_type (type));
8205 addr = fold_convert (t, addr);
8206 addr = build_va_arg_indirect_ref (addr);
8207 }
8208 else
8209 {
8210 t = build_pointer_type (type);
8211 addr = fold_convert (t, addr);
8212 }
8213
8214 return build_va_arg_indirect_ref (addr);
8215}
8216
8217
8218/* Builtins. */
8219
8220enum s390_builtin
8221{
8222 S390_BUILTIN_THREAD_POINTER,
8223 S390_BUILTIN_SET_THREAD_POINTER,
8224
8225 S390_BUILTIN_max
8226};
8227
8228static unsigned int const code_for_builtin_64[S390_BUILTIN_max] = {
8229 CODE_FOR_get_tp_64,
8230 CODE_FOR_set_tp_64
8231};
8232
8233static unsigned int const code_for_builtin_31[S390_BUILTIN_max] = {
8234 CODE_FOR_get_tp_31,
8235 CODE_FOR_set_tp_31
8236};
8237
8238static void
8239s390_init_builtins (void)
8240{
8241 tree ftype;
8242
8243 ftype = build_function_type (ptr_type_node, void_list_node);
54be5d7e 8244 add_builtin_function ("__builtin_thread_pointer", ftype,
8245 S390_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
8246 NULL, NULL_TREE);
875862bf 8247
8248 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
54be5d7e 8249 add_builtin_function ("__builtin_set_thread_pointer", ftype,
8250 S390_BUILTIN_SET_THREAD_POINTER, BUILT_IN_MD,
8251 NULL, NULL_TREE);
875862bf 8252}
8253
8254/* Expand an expression EXP that calls a built-in function,
8255 with result going to TARGET if that's convenient
8256 (and in mode MODE if that's convenient).
8257 SUBTARGET may be used as the target for computing one of EXP's operands.
8258 IGNORE is nonzero if the value is to be ignored. */
8259
8260static rtx
8261s390_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
8262 enum machine_mode mode ATTRIBUTE_UNUSED,
8263 int ignore ATTRIBUTE_UNUSED)
8264{
8265#define MAX_ARGS 2
8266
8267 unsigned int const *code_for_builtin =
8268 TARGET_64BIT ? code_for_builtin_64 : code_for_builtin_31;
8269
c2f47e15 8270 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
875862bf 8271 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
875862bf 8272 enum insn_code icode;
8273 rtx op[MAX_ARGS], pat;
8274 int arity;
8275 bool nonvoid;
c2f47e15 8276 tree arg;
8277 call_expr_arg_iterator iter;
875862bf 8278
8279 if (fcode >= S390_BUILTIN_max)
8280 internal_error ("bad builtin fcode");
8281 icode = code_for_builtin[fcode];
8282 if (icode == 0)
8283 internal_error ("bad builtin fcode");
8284
8285 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
8286
c2f47e15 8287 arity = 0;
8288 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
875862bf 8289 {
8290 const struct insn_operand_data *insn_op;
8291
875862bf 8292 if (arg == error_mark_node)
8293 return NULL_RTX;
8294 if (arity > MAX_ARGS)
8295 return NULL_RTX;
8296
8297 insn_op = &insn_data[icode].operand[arity + nonvoid];
8298
8299 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, 0);
8300
8301 if (!(*insn_op->predicate) (op[arity], insn_op->mode))
8302 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
c2f47e15 8303 arity++;
875862bf 8304 }
8305
8306 if (nonvoid)
8307 {
8308 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8309 if (!target
8310 || GET_MODE (target) != tmode
8311 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
8312 target = gen_reg_rtx (tmode);
8313 }
8314
8315 switch (arity)
8316 {
8317 case 0:
8318 pat = GEN_FCN (icode) (target);
8319 break;
8320 case 1:
8321 if (nonvoid)
8322 pat = GEN_FCN (icode) (target, op[0]);
8323 else
8324 pat = GEN_FCN (icode) (op[0]);
8325 break;
8326 case 2:
8327 pat = GEN_FCN (icode) (target, op[0], op[1]);
8328 break;
8329 default:
32eda510 8330 gcc_unreachable ();
875862bf 8331 }
8332 if (!pat)
8333 return NULL_RTX;
8334 emit_insn (pat);
8335
8336 if (nonvoid)
8337 return target;
8338 else
8339 return const0_rtx;
8340}
8341
8342
8343/* Output assembly code for the trampoline template to
8344 stdio stream FILE.
8345
8346 On S/390, we use gpr 1 internally in the trampoline code;
8347 gpr 0 is used to hold the static chain. */
8348
8349void
8350s390_trampoline_template (FILE *file)
8351{
8352 rtx op[2];
8353 op[0] = gen_rtx_REG (Pmode, 0);
8354 op[1] = gen_rtx_REG (Pmode, 1);
8355
8356 if (TARGET_64BIT)
8357 {
8358 output_asm_insn ("basr\t%1,0", op);
8359 output_asm_insn ("lmg\t%0,%1,14(%1)", op);
8360 output_asm_insn ("br\t%1", op);
8361 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 10));
8362 }
8363 else
8364 {
8365 output_asm_insn ("basr\t%1,0", op);
8366 output_asm_insn ("lm\t%0,%1,6(%1)", op);
8367 output_asm_insn ("br\t%1", op);
8368 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 8));
8369 }
8370}
8371
8372/* Emit RTL insns to initialize the variable parts of a trampoline.
8373 FNADDR is an RTX for the address of the function's pure code.
8374 CXT is an RTX for the static chain value for the function. */
8375
8376void
8377s390_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
8378{
8379 emit_move_insn (gen_rtx_MEM (Pmode,
8380 memory_address (Pmode,
8381 plus_constant (addr, (TARGET_64BIT ? 16 : 8)))), cxt);
8382 emit_move_insn (gen_rtx_MEM (Pmode,
8383 memory_address (Pmode,
8384 plus_constant (addr, (TARGET_64BIT ? 24 : 12)))), fnaddr);
8385}
8386
875862bf 8387/* Output assembler code to FILE to increment profiler label # LABELNO
8388 for profiling a function entry. */
8389
8390void
8391s390_function_profiler (FILE *file, int labelno)
8392{
8393 rtx op[7];
8394
8395 char label[128];
8396 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
8397
8398 fprintf (file, "# function profiler \n");
8399
8400 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
8401 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8402 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
8403
8404 op[2] = gen_rtx_REG (Pmode, 1);
8405 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
8406 SYMBOL_REF_FLAGS (op[3]) = SYMBOL_FLAG_LOCAL;
8407
8408 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
8409 if (flag_pic)
8410 {
8411 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), UNSPEC_PLT);
8412 op[4] = gen_rtx_CONST (Pmode, op[4]);
8413 }
8414
8415 if (TARGET_64BIT)
8416 {
8417 output_asm_insn ("stg\t%0,%1", op);
8418 output_asm_insn ("larl\t%2,%3", op);
8419 output_asm_insn ("brasl\t%0,%4", op);
8420 output_asm_insn ("lg\t%0,%1", op);
8421 }
8422 else if (!flag_pic)
8423 {
8424 op[6] = gen_label_rtx ();
8425
8426 output_asm_insn ("st\t%0,%1", op);
8427 output_asm_insn ("bras\t%2,%l6", op);
8428 output_asm_insn (".long\t%4", op);
8429 output_asm_insn (".long\t%3", op);
8430 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
8431 output_asm_insn ("l\t%0,0(%2)", op);
8432 output_asm_insn ("l\t%2,4(%2)", op);
8433 output_asm_insn ("basr\t%0,%0", op);
8434 output_asm_insn ("l\t%0,%1", op);
8435 }
8436 else
8437 {
8438 op[5] = gen_label_rtx ();
8439 op[6] = gen_label_rtx ();
8440
8441 output_asm_insn ("st\t%0,%1", op);
8442 output_asm_insn ("bras\t%2,%l6", op);
8443 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[5]));
8444 output_asm_insn (".long\t%4-%l5", op);
8445 output_asm_insn (".long\t%3-%l5", op);
8446 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
8447 output_asm_insn ("lr\t%0,%2", op);
8448 output_asm_insn ("a\t%0,0(%2)", op);
8449 output_asm_insn ("a\t%2,4(%2)", op);
8450 output_asm_insn ("basr\t%0,%0", op);
8451 output_asm_insn ("l\t%0,%1", op);
8452 }
8453}
8454
8455/* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
8456 into its SYMBOL_REF_FLAGS. */
8457
8458static void
8459s390_encode_section_info (tree decl, rtx rtl, int first)
8460{
8461 default_encode_section_info (decl, rtl, first);
8462
8463 /* If a variable has a forced alignment to < 2 bytes, mark it with
8464 SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL operand. */
8465 if (TREE_CODE (decl) == VAR_DECL
8466 && DECL_USER_ALIGN (decl) && DECL_ALIGN (decl) < 16)
8467 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_ALIGN1;
8468}
8469
8470/* Output thunk to FILE that implements a C++ virtual function call (with
8471 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
8472 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
8473 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
8474 relative to the resulting this pointer. */
8475
8476static void
8477s390_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
8478 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
8479 tree function)
8480{
8481 rtx op[10];
8482 int nonlocal = 0;
8483
8484 /* Operand 0 is the target function. */
8485 op[0] = XEXP (DECL_RTL (function), 0);
8486 if (flag_pic && !SYMBOL_REF_LOCAL_P (op[0]))
8487 {
8488 nonlocal = 1;
8489 op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]),
8490 TARGET_64BIT ? UNSPEC_PLT : UNSPEC_GOT);
8491 op[0] = gen_rtx_CONST (Pmode, op[0]);
8492 }
8493
8494 /* Operand 1 is the 'this' pointer. */
8495 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
8496 op[1] = gen_rtx_REG (Pmode, 3);
8497 else
8498 op[1] = gen_rtx_REG (Pmode, 2);
8499
8500 /* Operand 2 is the delta. */
8501 op[2] = GEN_INT (delta);
8502
8503 /* Operand 3 is the vcall_offset. */
8504 op[3] = GEN_INT (vcall_offset);
8505
8506 /* Operand 4 is the temporary register. */
8507 op[4] = gen_rtx_REG (Pmode, 1);
8508
8509 /* Operands 5 to 8 can be used as labels. */
8510 op[5] = NULL_RTX;
8511 op[6] = NULL_RTX;
8512 op[7] = NULL_RTX;
8513 op[8] = NULL_RTX;
8514
8515 /* Operand 9 can be used for temporary register. */
8516 op[9] = NULL_RTX;
8517
8518 /* Generate code. */
8519 if (TARGET_64BIT)
8520 {
8521 /* Setup literal pool pointer if required. */
8522 if ((!DISP_IN_RANGE (delta)
163277cf 8523 && !CONST_OK_FOR_K (delta)
8524 && !CONST_OK_FOR_Os (delta))
875862bf 8525 || (!DISP_IN_RANGE (vcall_offset)
163277cf 8526 && !CONST_OK_FOR_K (vcall_offset)
8527 && !CONST_OK_FOR_Os (vcall_offset)))
875862bf 8528 {
8529 op[5] = gen_label_rtx ();
8530 output_asm_insn ("larl\t%4,%5", op);
8531 }
8532
8533 /* Add DELTA to this pointer. */
8534 if (delta)
8535 {
cb888f33 8536 if (CONST_OK_FOR_J (delta))
875862bf 8537 output_asm_insn ("la\t%1,%2(%1)", op);
8538 else if (DISP_IN_RANGE (delta))
8539 output_asm_insn ("lay\t%1,%2(%1)", op);
cb888f33 8540 else if (CONST_OK_FOR_K (delta))
875862bf 8541 output_asm_insn ("aghi\t%1,%2", op);
163277cf 8542 else if (CONST_OK_FOR_Os (delta))
8543 output_asm_insn ("agfi\t%1,%2", op);
875862bf 8544 else
8545 {
8546 op[6] = gen_label_rtx ();
8547 output_asm_insn ("agf\t%1,%6-%5(%4)", op);
8548 }
8549 }
8550
8551 /* Perform vcall adjustment. */
8552 if (vcall_offset)
8553 {
8554 if (DISP_IN_RANGE (vcall_offset))
8555 {
8556 output_asm_insn ("lg\t%4,0(%1)", op);
8557 output_asm_insn ("ag\t%1,%3(%4)", op);
8558 }
cb888f33 8559 else if (CONST_OK_FOR_K (vcall_offset))
875862bf 8560 {
8561 output_asm_insn ("lghi\t%4,%3", op);
8562 output_asm_insn ("ag\t%4,0(%1)", op);
8563 output_asm_insn ("ag\t%1,0(%4)", op);
8564 }
163277cf 8565 else if (CONST_OK_FOR_Os (vcall_offset))
8566 {
8567 output_asm_insn ("lgfi\t%4,%3", op);
8568 output_asm_insn ("ag\t%4,0(%1)", op);
8569 output_asm_insn ("ag\t%1,0(%4)", op);
8570 }
875862bf 8571 else
8572 {
8573 op[7] = gen_label_rtx ();
8574 output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
8575 output_asm_insn ("ag\t%4,0(%1)", op);
8576 output_asm_insn ("ag\t%1,0(%4)", op);
8577 }
8578 }
8579
8580 /* Jump to target. */
8581 output_asm_insn ("jg\t%0", op);
8582
8583 /* Output literal pool if required. */
8584 if (op[5])
8585 {
8586 output_asm_insn (".align\t4", op);
8587 targetm.asm_out.internal_label (file, "L",
8588 CODE_LABEL_NUMBER (op[5]));
8589 }
8590 if (op[6])
8591 {
8592 targetm.asm_out.internal_label (file, "L",
8593 CODE_LABEL_NUMBER (op[6]));
8594 output_asm_insn (".long\t%2", op);
8595 }
8596 if (op[7])
8597 {
8598 targetm.asm_out.internal_label (file, "L",
8599 CODE_LABEL_NUMBER (op[7]));
8600 output_asm_insn (".long\t%3", op);
8601 }
8602 }
8603 else
8604 {
8605 /* Setup base pointer if required. */
8606 if (!vcall_offset
8607 || (!DISP_IN_RANGE (delta)
163277cf 8608 && !CONST_OK_FOR_K (delta)
8609 && !CONST_OK_FOR_Os (delta))
875862bf 8610 || (!DISP_IN_RANGE (delta)
163277cf 8611 && !CONST_OK_FOR_K (vcall_offset)
8612 && !CONST_OK_FOR_Os (vcall_offset)))
875862bf 8613 {
8614 op[5] = gen_label_rtx ();
8615 output_asm_insn ("basr\t%4,0", op);
8616 targetm.asm_out.internal_label (file, "L",
8617 CODE_LABEL_NUMBER (op[5]));
8618 }
8619
8620 /* Add DELTA to this pointer. */
8621 if (delta)
8622 {
cb888f33 8623 if (CONST_OK_FOR_J (delta))
875862bf 8624 output_asm_insn ("la\t%1,%2(%1)", op);
8625 else if (DISP_IN_RANGE (delta))
8626 output_asm_insn ("lay\t%1,%2(%1)", op);
cb888f33 8627 else if (CONST_OK_FOR_K (delta))
875862bf 8628 output_asm_insn ("ahi\t%1,%2", op);
163277cf 8629 else if (CONST_OK_FOR_Os (delta))
8630 output_asm_insn ("afi\t%1,%2", op);
875862bf 8631 else
8632 {
8633 op[6] = gen_label_rtx ();
8634 output_asm_insn ("a\t%1,%6-%5(%4)", op);
8635 }
8636 }
8637
8638 /* Perform vcall adjustment. */
8639 if (vcall_offset)
8640 {
cb888f33 8641 if (CONST_OK_FOR_J (vcall_offset))
875862bf 8642 {
0451e449 8643 output_asm_insn ("l\t%4,0(%1)", op);
875862bf 8644 output_asm_insn ("a\t%1,%3(%4)", op);
8645 }
8646 else if (DISP_IN_RANGE (vcall_offset))
8647 {
0451e449 8648 output_asm_insn ("l\t%4,0(%1)", op);
875862bf 8649 output_asm_insn ("ay\t%1,%3(%4)", op);
8650 }
cb888f33 8651 else if (CONST_OK_FOR_K (vcall_offset))
875862bf 8652 {
8653 output_asm_insn ("lhi\t%4,%3", op);
8654 output_asm_insn ("a\t%4,0(%1)", op);
8655 output_asm_insn ("a\t%1,0(%4)", op);
8656 }
163277cf 8657 else if (CONST_OK_FOR_Os (vcall_offset))
8658 {
8659 output_asm_insn ("iilf\t%4,%3", op);
8660 output_asm_insn ("a\t%4,0(%1)", op);
8661 output_asm_insn ("a\t%1,0(%4)", op);
8662 }
875862bf 8663 else
8664 {
8665 op[7] = gen_label_rtx ();
8666 output_asm_insn ("l\t%4,%7-%5(%4)", op);
8667 output_asm_insn ("a\t%4,0(%1)", op);
8668 output_asm_insn ("a\t%1,0(%4)", op);
8669 }
4673c1a0 8670
875862bf 8671 /* We had to clobber the base pointer register.
8672 Re-setup the base pointer (with a different base). */
8673 op[5] = gen_label_rtx ();
8674 output_asm_insn ("basr\t%4,0", op);
8675 targetm.asm_out.internal_label (file, "L",
8676 CODE_LABEL_NUMBER (op[5]));
8677 }
4673c1a0 8678
875862bf 8679 /* Jump to target. */
8680 op[8] = gen_label_rtx ();
4673c1a0 8681
875862bf 8682 if (!flag_pic)
8683 output_asm_insn ("l\t%4,%8-%5(%4)", op);
8684 else if (!nonlocal)
8685 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8686 /* We cannot call through .plt, since .plt requires %r12 loaded. */
8687 else if (flag_pic == 1)
8688 {
8689 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8690 output_asm_insn ("l\t%4,%0(%4)", op);
8691 }
8692 else if (flag_pic == 2)
8693 {
8694 op[9] = gen_rtx_REG (Pmode, 0);
8695 output_asm_insn ("l\t%9,%8-4-%5(%4)", op);
8696 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8697 output_asm_insn ("ar\t%4,%9", op);
8698 output_asm_insn ("l\t%4,0(%4)", op);
8699 }
4673c1a0 8700
875862bf 8701 output_asm_insn ("br\t%4", op);
4673c1a0 8702
875862bf 8703 /* Output literal pool. */
8704 output_asm_insn (".align\t4", op);
4673c1a0 8705
875862bf 8706 if (nonlocal && flag_pic == 2)
8707 output_asm_insn (".long\t%0", op);
8708 if (nonlocal)
8709 {
8710 op[0] = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
8711 SYMBOL_REF_FLAGS (op[0]) = SYMBOL_FLAG_LOCAL;
8712 }
d93e0d9f 8713
875862bf 8714 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[8]));
8715 if (!flag_pic)
8716 output_asm_insn (".long\t%0", op);
8717 else
8718 output_asm_insn (".long\t%0-%5", op);
4673c1a0 8719
875862bf 8720 if (op[6])
8721 {
8722 targetm.asm_out.internal_label (file, "L",
8723 CODE_LABEL_NUMBER (op[6]));
8724 output_asm_insn (".long\t%2", op);
8725 }
8726 if (op[7])
8727 {
8728 targetm.asm_out.internal_label (file, "L",
8729 CODE_LABEL_NUMBER (op[7]));
8730 output_asm_insn (".long\t%3", op);
8731 }
4673c1a0 8732 }
4673c1a0 8733}
8734
875862bf 8735static bool
8736s390_valid_pointer_mode (enum machine_mode mode)
8737{
8738 return (mode == SImode || (TARGET_64BIT && mode == DImode));
8739}
56769981 8740
347301d6 8741/* Checks whether the given CALL_EXPR would use a caller
875862bf 8742 saved register. This is used to decide whether sibling call
8743 optimization could be performed on the respective function
8744 call. */
be00aaa8 8745
875862bf 8746static bool
347301d6 8747s390_call_saved_register_used (tree call_expr)
be00aaa8 8748{
875862bf 8749 CUMULATIVE_ARGS cum;
8750 tree parameter;
8751 enum machine_mode mode;
8752 tree type;
8753 rtx parm_rtx;
347301d6 8754 int reg, i;
be00aaa8 8755
875862bf 8756 INIT_CUMULATIVE_ARGS (cum, NULL, NULL, 0, 0);
be00aaa8 8757
347301d6 8758 for (i = 0; i < call_expr_nargs (call_expr); i++)
875862bf 8759 {
347301d6 8760 parameter = CALL_EXPR_ARG (call_expr, i);
32eda510 8761 gcc_assert (parameter);
be00aaa8 8762
875862bf 8763 /* For an undeclared variable passed as parameter we will get
8764 an ERROR_MARK node here. */
8765 if (TREE_CODE (parameter) == ERROR_MARK)
8766 return true;
be00aaa8 8767
32eda510 8768 type = TREE_TYPE (parameter);
8769 gcc_assert (type);
be00aaa8 8770
32eda510 8771 mode = TYPE_MODE (type);
8772 gcc_assert (mode);
be00aaa8 8773
875862bf 8774 if (pass_by_reference (&cum, mode, type, true))
8775 {
8776 mode = Pmode;
8777 type = build_pointer_type (type);
8778 }
be00aaa8 8779
875862bf 8780 parm_rtx = s390_function_arg (&cum, mode, type, 0);
be00aaa8 8781
875862bf 8782 s390_function_arg_advance (&cum, mode, type, 0);
be00aaa8 8783
875862bf 8784 if (parm_rtx && REG_P (parm_rtx))
8785 {
8786 for (reg = 0;
8787 reg < HARD_REGNO_NREGS (REGNO (parm_rtx), GET_MODE (parm_rtx));
8788 reg++)
8789 if (! call_used_regs[reg + REGNO (parm_rtx)])
8790 return true;
8791 }
8792 }
8793 return false;
8794}
be00aaa8 8795
875862bf 8796/* Return true if the given call expression can be
8797 turned into a sibling call.
8798 DECL holds the declaration of the function to be called whereas
8799 EXP is the call expression itself. */
be00aaa8 8800
875862bf 8801static bool
8802s390_function_ok_for_sibcall (tree decl, tree exp)
8803{
8804 /* The TPF epilogue uses register 1. */
8805 if (TARGET_TPF_PROFILING)
8806 return false;
be00aaa8 8807
875862bf 8808 /* The 31 bit PLT code uses register 12 (GOT pointer - caller saved)
8809 which would have to be restored before the sibcall. */
a47b0dc3 8810 if (!TARGET_64BIT && flag_pic && decl && !targetm.binds_local_p (decl))
875862bf 8811 return false;
be00aaa8 8812
875862bf 8813 /* Register 6 on s390 is available as an argument register but unfortunately
8814 "caller saved". This makes functions needing this register for arguments
8815 not suitable for sibcalls. */
347301d6 8816 return !s390_call_saved_register_used (exp);
875862bf 8817}
be00aaa8 8818
875862bf 8819/* Return the fixed registers used for condition codes. */
be00aaa8 8820
875862bf 8821static bool
8822s390_fixed_condition_code_regs (unsigned int *p1, unsigned int *p2)
8823{
8824 *p1 = CC_REGNUM;
8825 *p2 = INVALID_REGNUM;
8826
8827 return true;
8828}
be00aaa8 8829
875862bf 8830/* This function is used by the call expanders of the machine description.
8831 It emits the call insn itself together with the necessary operations
8832 to adjust the target address and returns the emitted insn.
8833 ADDR_LOCATION is the target address rtx
8834 TLS_CALL the location of the thread-local symbol
8835 RESULT_REG the register where the result of the call should be stored
8836 RETADDR_REG the register where the return address should be stored
8837 If this parameter is NULL_RTX the call is considered
8838 to be a sibling call. */
be00aaa8 8839
875862bf 8840rtx
8841s390_emit_call (rtx addr_location, rtx tls_call, rtx result_reg,
8842 rtx retaddr_reg)
4673c1a0 8843{
875862bf 8844 bool plt_call = false;
8845 rtx insn;
8846 rtx call;
8847 rtx clobber;
8848 rtvec vec;
4a1c604e 8849
875862bf 8850 /* Direct function calls need special treatment. */
8851 if (GET_CODE (addr_location) == SYMBOL_REF)
4673c1a0 8852 {
875862bf 8853 /* When calling a global routine in PIC mode, we must
8854 replace the symbol itself with the PLT stub. */
8855 if (flag_pic && !SYMBOL_REF_LOCAL_P (addr_location))
8856 {
8857 addr_location = gen_rtx_UNSPEC (Pmode,
8858 gen_rtvec (1, addr_location),
8859 UNSPEC_PLT);
8860 addr_location = gen_rtx_CONST (Pmode, addr_location);
8861 plt_call = true;
8862 }
8863
8864 /* Unless we can use the bras(l) insn, force the
8865 routine address into a register. */
8866 if (!TARGET_SMALL_EXEC && !TARGET_CPU_ZARCH)
8867 {
8868 if (flag_pic)
8869 addr_location = legitimize_pic_address (addr_location, 0);
8870 else
8871 addr_location = force_reg (Pmode, addr_location);
8872 }
4673c1a0 8873 }
875862bf 8874
8875 /* If it is already an indirect call or the code above moved the
8876 SYMBOL_REF to somewhere else make sure the address can be found in
8877 register 1. */
8878 if (retaddr_reg == NULL_RTX
8879 && GET_CODE (addr_location) != SYMBOL_REF
8880 && !plt_call)
4673c1a0 8881 {
875862bf 8882 emit_move_insn (gen_rtx_REG (Pmode, SIBCALL_REGNUM), addr_location);
8883 addr_location = gen_rtx_REG (Pmode, SIBCALL_REGNUM);
4673c1a0 8884 }
4673c1a0 8885
875862bf 8886 addr_location = gen_rtx_MEM (QImode, addr_location);
8887 call = gen_rtx_CALL (VOIDmode, addr_location, const0_rtx);
8b4a4127 8888
875862bf 8889 if (result_reg != NULL_RTX)
8890 call = gen_rtx_SET (VOIDmode, result_reg, call);
8b4a4127 8891
875862bf 8892 if (retaddr_reg != NULL_RTX)
8893 {
8894 clobber = gen_rtx_CLOBBER (VOIDmode, retaddr_reg);
f81e845f 8895
875862bf 8896 if (tls_call != NULL_RTX)
8897 vec = gen_rtvec (3, call, clobber,
8898 gen_rtx_USE (VOIDmode, tls_call));
8899 else
8900 vec = gen_rtvec (2, call, clobber);
8b4a4127 8901
875862bf 8902 call = gen_rtx_PARALLEL (VOIDmode, vec);
8903 }
8b4a4127 8904
875862bf 8905 insn = emit_call_insn (call);
8b4a4127 8906
875862bf 8907 /* 31-bit PLT stubs and tls calls use the GOT register implicitly. */
8908 if ((!TARGET_64BIT && plt_call) || tls_call != NULL_RTX)
8909 {
8910 /* s390_function_ok_for_sibcall should
8911 have denied sibcalls in this case. */
32eda510 8912 gcc_assert (retaddr_reg != NULL_RTX);
8b4a4127 8913
875862bf 8914 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
8915 }
8916 return insn;
8917}
8b4a4127 8918
875862bf 8919/* Implement CONDITIONAL_REGISTER_USAGE. */
8b4a4127 8920
875862bf 8921void
8922s390_conditional_register_usage (void)
8923{
8924 int i;
8b4a4127 8925
8b4a4127 8926 if (flag_pic)
8927 {
875862bf 8928 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
8929 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
8b4a4127 8930 }
875862bf 8931 if (TARGET_CPU_ZARCH)
8b4a4127 8932 {
d1a5573e 8933 fixed_regs[BASE_REGNUM] = 0;
8934 call_used_regs[BASE_REGNUM] = 0;
875862bf 8935 fixed_regs[RETURN_REGNUM] = 0;
8936 call_used_regs[RETURN_REGNUM] = 0;
8b4a4127 8937 }
875862bf 8938 if (TARGET_64BIT)
8b4a4127 8939 {
875862bf 8940 for (i = 24; i < 32; i++)
8941 call_used_regs[i] = call_really_used_regs[i] = 0;
8b4a4127 8942 }
8943 else
8944 {
875862bf 8945 for (i = 18; i < 20; i++)
8946 call_used_regs[i] = call_really_used_regs[i] = 0;
8947 }
8b4a4127 8948
875862bf 8949 if (TARGET_SOFT_FLOAT)
8950 {
8951 for (i = 16; i < 32; i++)
8952 call_used_regs[i] = fixed_regs[i] = 1;
8b4a4127 8953 }
8954}
8955
875862bf 8956/* Corresponding function to eh_return expander. */
7811991d 8957
875862bf 8958static GTY(()) rtx s390_tpf_eh_return_symbol;
8959void
8960s390_emit_tpf_eh_return (rtx target)
7811991d 8961{
875862bf 8962 rtx insn, reg;
525d1294 8963
875862bf 8964 if (!s390_tpf_eh_return_symbol)
8965 s390_tpf_eh_return_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tpf_eh_return");
8966
8967 reg = gen_rtx_REG (Pmode, 2);
8968
8969 emit_move_insn (reg, target);
8970 insn = s390_emit_call (s390_tpf_eh_return_symbol, NULL_RTX, reg,
8971 gen_rtx_REG (Pmode, RETURN_REGNUM));
8972 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), reg);
8973
8974 emit_move_insn (EH_RETURN_HANDLER_RTX, reg);
be00aaa8 8975}
8976
875862bf 8977/* Rework the prologue/epilogue to avoid saving/restoring
8978 registers unnecessarily. */
c20f8a1d 8979
6988553d 8980static void
875862bf 8981s390_optimize_prologue (void)
c6933ba6 8982{
875862bf 8983 rtx insn, new_insn, next_insn;
c20f8a1d 8984
875862bf 8985 /* Do a final recompute of the frame-related data. */
c20f8a1d 8986
875862bf 8987 s390_update_frame_layout ();
c20f8a1d 8988
875862bf 8989 /* If all special registers are in fact used, there's nothing we
8990 can do, so no point in walking the insn list. */
c20f8a1d 8991
875862bf 8992 if (cfun_frame_layout.first_save_gpr <= BASE_REGNUM
8993 && cfun_frame_layout.last_save_gpr >= BASE_REGNUM
8994 && (TARGET_CPU_ZARCH
8995 || (cfun_frame_layout.first_save_gpr <= RETURN_REGNUM
8996 && cfun_frame_layout.last_save_gpr >= RETURN_REGNUM)))
8997 return;
c20f8a1d 8998
875862bf 8999 /* Search for prologue/epilogue insns and replace them. */
c20f8a1d 9000
875862bf 9001 for (insn = get_insns (); insn; insn = next_insn)
9002 {
9003 int first, last, off;
9004 rtx set, base, offset;
c20f8a1d 9005
875862bf 9006 next_insn = NEXT_INSN (insn);
d7bec695 9007
875862bf 9008 if (GET_CODE (insn) != INSN)
9009 continue;
c20f8a1d 9010
875862bf 9011 if (GET_CODE (PATTERN (insn)) == PARALLEL
9012 && store_multiple_operation (PATTERN (insn), VOIDmode))
c20f8a1d 9013 {
875862bf 9014 set = XVECEXP (PATTERN (insn), 0, 0);
9015 first = REGNO (SET_SRC (set));
9016 last = first + XVECLEN (PATTERN (insn), 0) - 1;
9017 offset = const0_rtx;
9018 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
9019 off = INTVAL (offset);
c20f8a1d 9020
875862bf 9021 if (GET_CODE (base) != REG || off < 0)
9022 continue;
43944aa4 9023 if (cfun_frame_layout.first_save_gpr != -1
9024 && (cfun_frame_layout.first_save_gpr < first
9025 || cfun_frame_layout.last_save_gpr > last))
9026 continue;
875862bf 9027 if (REGNO (base) != STACK_POINTER_REGNUM
9028 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9029 continue;
9030 if (first > BASE_REGNUM || last < BASE_REGNUM)
9031 continue;
9032
9033 if (cfun_frame_layout.first_save_gpr != -1)
c20f8a1d 9034 {
875862bf 9035 new_insn = save_gprs (base,
9036 off + (cfun_frame_layout.first_save_gpr
9037 - first) * UNITS_PER_WORD,
9038 cfun_frame_layout.first_save_gpr,
9039 cfun_frame_layout.last_save_gpr);
9040 new_insn = emit_insn_before (new_insn, insn);
9041 INSN_ADDRESSES_NEW (new_insn, -1);
c20f8a1d 9042 }
c20f8a1d 9043
875862bf 9044 remove_insn (insn);
9045 continue;
c20f8a1d 9046 }
9047
43944aa4 9048 if (cfun_frame_layout.first_save_gpr == -1
9049 && GET_CODE (PATTERN (insn)) == SET
875862bf 9050 && GET_CODE (SET_SRC (PATTERN (insn))) == REG
9051 && (REGNO (SET_SRC (PATTERN (insn))) == BASE_REGNUM
9052 || (!TARGET_CPU_ZARCH
9053 && REGNO (SET_SRC (PATTERN (insn))) == RETURN_REGNUM))
9054 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
c20f8a1d 9055 {
875862bf 9056 set = PATTERN (insn);
9057 first = REGNO (SET_SRC (set));
9058 offset = const0_rtx;
9059 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
9060 off = INTVAL (offset);
c20f8a1d 9061
875862bf 9062 if (GET_CODE (base) != REG || off < 0)
9063 continue;
9064 if (REGNO (base) != STACK_POINTER_REGNUM
9065 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9066 continue;
c20f8a1d 9067
875862bf 9068 remove_insn (insn);
9069 continue;
c20f8a1d 9070 }
9071
875862bf 9072 if (GET_CODE (PATTERN (insn)) == PARALLEL
9073 && load_multiple_operation (PATTERN (insn), VOIDmode))
d7bec695 9074 {
875862bf 9075 set = XVECEXP (PATTERN (insn), 0, 0);
9076 first = REGNO (SET_DEST (set));
9077 last = first + XVECLEN (PATTERN (insn), 0) - 1;
9078 offset = const0_rtx;
9079 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
9080 off = INTVAL (offset);
d7bec695 9081
875862bf 9082 if (GET_CODE (base) != REG || off < 0)
9083 continue;
43944aa4 9084 if (cfun_frame_layout.first_restore_gpr != -1
9085 && (cfun_frame_layout.first_restore_gpr < first
9086 || cfun_frame_layout.last_restore_gpr > last))
9087 continue;
875862bf 9088 if (REGNO (base) != STACK_POINTER_REGNUM
9089 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9090 continue;
9091 if (first > BASE_REGNUM || last < BASE_REGNUM)
9092 continue;
c20f8a1d 9093
875862bf 9094 if (cfun_frame_layout.first_restore_gpr != -1)
9095 {
9096 new_insn = restore_gprs (base,
9097 off + (cfun_frame_layout.first_restore_gpr
9098 - first) * UNITS_PER_WORD,
9099 cfun_frame_layout.first_restore_gpr,
9100 cfun_frame_layout.last_restore_gpr);
9101 new_insn = emit_insn_before (new_insn, insn);
9102 INSN_ADDRESSES_NEW (new_insn, -1);
9103 }
d7bec695 9104
875862bf 9105 remove_insn (insn);
9106 continue;
d7bec695 9107 }
9108
43944aa4 9109 if (cfun_frame_layout.first_restore_gpr == -1
9110 && GET_CODE (PATTERN (insn)) == SET
875862bf 9111 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
9112 && (REGNO (SET_DEST (PATTERN (insn))) == BASE_REGNUM
9113 || (!TARGET_CPU_ZARCH
9114 && REGNO (SET_DEST (PATTERN (insn))) == RETURN_REGNUM))
9115 && GET_CODE (SET_SRC (PATTERN (insn))) == MEM)
c20f8a1d 9116 {
875862bf 9117 set = PATTERN (insn);
9118 first = REGNO (SET_DEST (set));
9119 offset = const0_rtx;
9120 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
9121 off = INTVAL (offset);
f81e845f 9122
875862bf 9123 if (GET_CODE (base) != REG || off < 0)
9124 continue;
9125 if (REGNO (base) != STACK_POINTER_REGNUM
9126 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9127 continue;
5a5e802f 9128
875862bf 9129 remove_insn (insn);
9130 continue;
9131 }
9132 }
5a5e802f 9133}
9134
875862bf 9135/* Perform machine-dependent processing. */
7346ca58 9136
875862bf 9137static void
9138s390_reorg (void)
7346ca58 9139{
875862bf 9140 bool pool_overflow = false;
7346ca58 9141
875862bf 9142 /* Make sure all splits have been performed; splits after
9143 machine_dependent_reorg might confuse insn length counts. */
9144 split_all_insns_noflow ();
f588eb9f 9145
86b779d2 9146 /* From here on decomposed literal pool addresses must be accepted. */
9147 cfun->machine->decomposed_literal_pool_addresses_ok_p = true;
7346ca58 9148
875862bf 9149 /* Install the main literal pool and the associated base
9150 register load insns.
f588eb9f 9151
875862bf 9152 In addition, there are two problematic situations we need
9153 to correct:
7346ca58 9154
875862bf 9155 - the literal pool might be > 4096 bytes in size, so that
9156 some of its elements cannot be directly accessed
7346ca58 9157
875862bf 9158 - a branch target might be > 64K away from the branch, so that
9159 it is not possible to use a PC-relative instruction.
7346ca58 9160
875862bf 9161 To fix those, we split the single literal pool into multiple
9162 pool chunks, reloading the pool base register at various
9163 points throughout the function to ensure it always points to
9164 the pool chunk the following code expects, and / or replace
9165 PC-relative branches by absolute branches.
7346ca58 9166
875862bf 9167 However, the two problems are interdependent: splitting the
9168 literal pool can move a branch further away from its target,
9169 causing the 64K limit to overflow, and on the other hand,
9170 replacing a PC-relative branch by an absolute branch means
9171 we need to put the branch target address into the literal
9172 pool, possibly causing it to overflow.
44a61e21 9173
875862bf 9174 So, we loop trying to fix up both problems until we manage
9175 to satisfy both conditions at the same time. Note that the
9176 loop is guaranteed to terminate as every pass of the loop
9177 strictly decreases the total number of PC-relative branches
9178 in the function. (This is not completely true as there
9179 might be branch-over-pool insns introduced by chunkify_start.
9180 Those never need to be split however.) */
44a61e21 9181
875862bf 9182 for (;;)
9183 {
9184 struct constant_pool *pool = NULL;
80aaaa56 9185
875862bf 9186 /* Collect the literal pool. */
9187 if (!pool_overflow)
9188 {
9189 pool = s390_mainpool_start ();
9190 if (!pool)
9191 pool_overflow = true;
9192 }
80aaaa56 9193
875862bf 9194 /* If literal pool overflowed, start to chunkify it. */
9195 if (pool_overflow)
9196 pool = s390_chunkify_start ();
80aaaa56 9197
875862bf 9198 /* Split out-of-range branches. If this has created new
9199 literal pool entries, cancel current chunk list and
9200 recompute it. zSeries machines have large branch
9201 instructions, so we never need to split a branch. */
9202 if (!TARGET_CPU_ZARCH && s390_split_branches ())
9203 {
9204 if (pool_overflow)
9205 s390_chunkify_cancel (pool);
9206 else
9207 s390_mainpool_cancel (pool);
80aaaa56 9208
875862bf 9209 continue;
9210 }
9211
9212 /* If we made it up to here, both conditions are satisfied.
9213 Finish up literal pool related changes. */
9214 if (pool_overflow)
9215 s390_chunkify_finish (pool);
9216 else
9217 s390_mainpool_finish (pool);
9218
9219 /* We're done splitting branches. */
9220 cfun->machine->split_branches_pending_p = false;
9221 break;
80aaaa56 9222 }
80aaaa56 9223
babfdedf 9224 /* Generate out-of-pool execute target insns. */
9225 if (TARGET_CPU_ZARCH)
9226 {
9227 rtx insn, label, target;
9228
9229 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9230 {
9231 label = s390_execute_label (insn);
9232 if (!label)
9233 continue;
9234
9235 gcc_assert (label != const0_rtx);
9236
9237 target = emit_label (XEXP (label, 0));
9238 INSN_ADDRESSES_NEW (target, -1);
9239
9240 target = emit_insn (s390_execute_target (insn));
9241 INSN_ADDRESSES_NEW (target, -1);
9242 }
9243 }
9244
9245 /* Try to optimize prologue and epilogue further. */
875862bf 9246 s390_optimize_prologue ();
9247}
7346ca58 9248
7346ca58 9249
875862bf 9250/* Initialize GCC target structure. */
f588eb9f 9251
875862bf 9252#undef TARGET_ASM_ALIGNED_HI_OP
9253#define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
9254#undef TARGET_ASM_ALIGNED_DI_OP
9255#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
9256#undef TARGET_ASM_INTEGER
9257#define TARGET_ASM_INTEGER s390_assemble_integer
7346ca58 9258
875862bf 9259#undef TARGET_ASM_OPEN_PAREN
9260#define TARGET_ASM_OPEN_PAREN ""
f588eb9f 9261
875862bf 9262#undef TARGET_ASM_CLOSE_PAREN
9263#define TARGET_ASM_CLOSE_PAREN ""
7346ca58 9264
875862bf 9265#undef TARGET_DEFAULT_TARGET_FLAGS
9266#define TARGET_DEFAULT_TARGET_FLAGS (TARGET_DEFAULT | MASK_FUSED_MADD)
9267#undef TARGET_HANDLE_OPTION
9268#define TARGET_HANDLE_OPTION s390_handle_option
f588eb9f 9269
875862bf 9270#undef TARGET_ENCODE_SECTION_INFO
9271#define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
7346ca58 9272
875862bf 9273#ifdef HAVE_AS_TLS
9274#undef TARGET_HAVE_TLS
9275#define TARGET_HAVE_TLS true
9276#endif
9277#undef TARGET_CANNOT_FORCE_CONST_MEM
9278#define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
7346ca58 9279
875862bf 9280#undef TARGET_DELEGITIMIZE_ADDRESS
9281#define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
7346ca58 9282
875862bf 9283#undef TARGET_RETURN_IN_MEMORY
9284#define TARGET_RETURN_IN_MEMORY s390_return_in_memory
f588eb9f 9285
875862bf 9286#undef TARGET_INIT_BUILTINS
9287#define TARGET_INIT_BUILTINS s390_init_builtins
9288#undef TARGET_EXPAND_BUILTIN
9289#define TARGET_EXPAND_BUILTIN s390_expand_builtin
f588eb9f 9290
875862bf 9291#undef TARGET_ASM_OUTPUT_MI_THUNK
9292#define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
9293#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
9294#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
7346ca58 9295
875862bf 9296#undef TARGET_SCHED_ADJUST_PRIORITY
9297#define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
9298#undef TARGET_SCHED_ISSUE_RATE
9299#define TARGET_SCHED_ISSUE_RATE s390_issue_rate
9300#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
9301#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
f588eb9f 9302
875862bf 9303#undef TARGET_CANNOT_COPY_INSN_P
9304#define TARGET_CANNOT_COPY_INSN_P s390_cannot_copy_insn_p
9305#undef TARGET_RTX_COSTS
9306#define TARGET_RTX_COSTS s390_rtx_costs
9307#undef TARGET_ADDRESS_COST
9308#define TARGET_ADDRESS_COST s390_address_cost
f588eb9f 9309
875862bf 9310#undef TARGET_MACHINE_DEPENDENT_REORG
9311#define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
71597dac 9312
875862bf 9313#undef TARGET_VALID_POINTER_MODE
9314#define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
f588eb9f 9315
875862bf 9316#undef TARGET_BUILD_BUILTIN_VA_LIST
9317#define TARGET_BUILD_BUILTIN_VA_LIST s390_build_builtin_va_list
9318#undef TARGET_GIMPLIFY_VA_ARG_EXPR
9319#define TARGET_GIMPLIFY_VA_ARG_EXPR s390_gimplify_va_arg
b33c41a1 9320
875862bf 9321#undef TARGET_PROMOTE_FUNCTION_ARGS
9322#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
9323#undef TARGET_PROMOTE_FUNCTION_RETURN
9324#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
9325#undef TARGET_PASS_BY_REFERENCE
9326#define TARGET_PASS_BY_REFERENCE s390_pass_by_reference
b33c41a1 9327
875862bf 9328#undef TARGET_FUNCTION_OK_FOR_SIBCALL
9329#define TARGET_FUNCTION_OK_FOR_SIBCALL s390_function_ok_for_sibcall
b33c41a1 9330
875862bf 9331#undef TARGET_FIXED_CONDITION_CODE_REGS
9332#define TARGET_FIXED_CONDITION_CODE_REGS s390_fixed_condition_code_regs
b33c41a1 9333
875862bf 9334#undef TARGET_CC_MODES_COMPATIBLE
9335#define TARGET_CC_MODES_COMPATIBLE s390_cc_modes_compatible
b33c41a1 9336
1606e68a 9337#undef TARGET_INVALID_WITHIN_DOLOOP
9338#define TARGET_INVALID_WITHIN_DOLOOP hook_constcharptr_rtx_null
e75dabf7 9339
40af64cc 9340#ifdef HAVE_AS_TLS
9341#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
9342#define TARGET_ASM_OUTPUT_DWARF_DTPREL s390_output_dwarf_dtprel
9343#endif
9344
4257b08a 9345#ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
eddcdde1 9346#undef TARGET_MANGLE_TYPE
9347#define TARGET_MANGLE_TYPE s390_mangle_type
4257b08a 9348#endif
9349
36868490 9350#undef TARGET_SCALAR_MODE_SUPPORTED_P
9351#define TARGET_SCALAR_MODE_SUPPORTED_P s390_scalar_mode_supported_p
9352
328d5423 9353#undef TARGET_SECONDARY_RELOAD
9354#define TARGET_SECONDARY_RELOAD s390_secondary_reload
9355
0ef89dfd 9356#undef TARGET_LIBGCC_CMP_RETURN_MODE
9357#define TARGET_LIBGCC_CMP_RETURN_MODE s390_libgcc_cmp_return_mode
9358
9359#undef TARGET_LIBGCC_SHIFT_COUNT_MODE
9360#define TARGET_LIBGCC_SHIFT_COUNT_MODE s390_libgcc_shift_count_mode
9361
875862bf 9362struct gcc_target targetm = TARGET_INITIALIZER;
f588eb9f 9363
5a5e802f 9364#include "gt-s390.h"