]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/s390/s390.c
use rtx_insn * more places where it is obvious
[thirdparty/gcc.git] / gcc / config / s390 / s390.c
CommitLineData
4673c1a0 1/* Subroutines used for code generation on IBM S/390 and zSeries
f1717362 2 Copyright (C) 1999-2016 Free Software Foundation, Inc.
4673c1a0 3 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
e68d6a13 4 Ulrich Weigand (uweigand@de.ibm.com) and
5 Andreas Krebbel (Andreas.Krebbel@de.ibm.com).
4673c1a0 6
1e98c8f3 7This file is part of GCC.
4673c1a0 8
1e98c8f3 9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
038d1e19 11Software Foundation; either version 3, or (at your option) any later
1e98c8f3 12version.
4673c1a0 13
1e98c8f3 14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
4673c1a0 18
19You should have received a copy of the GNU General Public License
038d1e19 20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
4673c1a0 22
23#include "config.h"
4673c1a0 24#include "system.h"
805e22b2 25#include "coretypes.h"
9ef16211 26#include "backend.h"
c1eb80de 27#include "target.h"
7a0cee35 28#include "target-globals.h"
c1eb80de 29#include "rtl.h"
9ef16211 30#include "tree.h"
31#include "gimple.h"
c1eb80de 32#include "cfghooks.h"
33#include "cfgloop.h"
9ef16211 34#include "df.h"
ad7b10a2 35#include "memmodel.h"
c1eb80de 36#include "tm_p.h"
37#include "stringpool.h"
38#include "expmed.h"
39#include "optabs.h"
40#include "regs.h"
41#include "emit-rtl.h"
42#include "recog.h"
43#include "cgraph.h"
44#include "diagnostic-core.h"
7a0cee35 45#include "diagnostic.h"
b20a8bb4 46#include "alias.h"
b20a8bb4 47#include "fold-const.h"
9ed99284 48#include "print-tree.h"
9ed99284 49#include "stor-layout.h"
50#include "varasm.h"
51#include "calls.h"
4673c1a0 52#include "conditions.h"
53#include "output.h"
54#include "insn-attr.h"
55#include "flags.h"
56#include "except.h"
d53441c8 57#include "dojump.h"
58#include "explow.h"
d53441c8 59#include "stmt.h"
4673c1a0 60#include "expr.h"
c10847b9 61#include "reload.h"
94ea8568 62#include "cfgrtl.h"
63#include "cfganal.h"
64#include "lcm.h"
65#include "cfgbuild.h"
66#include "cfgcleanup.h"
7baa5366 67#include "debug.h"
a1f71e15 68#include "langhooks.h"
bc61cadb 69#include "internal-fn.h"
70#include "gimple-fold.h"
71#include "tree-eh.h"
a8783bee 72#include "gimplify.h"
c0717306 73#include "params.h"
fba5dd52 74#include "opts.h"
0b8be04c 75#include "tree-pass.h"
76#include "context.h"
f7715905 77#include "builtins.h"
15e472ec 78#include "rtl-iter.h"
76a4c804 79#include "intl.h"
80fc7f56 80#include "tm-constrs.h"
4673c1a0 81
0c71fb4f 82/* This file should be included last. */
4b498588 83#include "target-def.h"
84
7a0cee35 85/* Remember the last target of s390_set_current_function. */
86static GTY(()) tree s390_previous_fndecl;
87
18925d38 88/* Define the specific costs for a given cpu. */
89
ffead1ca 90struct processor_costs
18925d38 91{
260075cc 92 /* multiplication */
18925d38 93 const int m; /* cost of an M instruction. */
94 const int mghi; /* cost of an MGHI instruction. */
95 const int mh; /* cost of an MH instruction. */
96 const int mhi; /* cost of an MHI instruction. */
9cd3f3e6 97 const int ml; /* cost of an ML instruction. */
18925d38 98 const int mr; /* cost of an MR instruction. */
99 const int ms; /* cost of an MS instruction. */
100 const int msg; /* cost of an MSG instruction. */
101 const int msgf; /* cost of an MSGF instruction. */
102 const int msgfr; /* cost of an MSGFR instruction. */
103 const int msgr; /* cost of an MSGR instruction. */
104 const int msr; /* cost of an MSR instruction. */
105 const int mult_df; /* cost of multiplication in DFmode. */
429f9fdb 106 const int mxbr;
260075cc 107 /* square root */
429f9fdb 108 const int sqxbr; /* cost of square root in TFmode. */
9cd3f3e6 109 const int sqdbr; /* cost of square root in DFmode. */
110 const int sqebr; /* cost of square root in SFmode. */
260075cc 111 /* multiply and add */
d95e38cf 112 const int madbr; /* cost of multiply and add in DFmode. */
113 const int maebr; /* cost of multiply and add in SFmode. */
260075cc 114 /* division */
429f9fdb 115 const int dxbr;
260075cc 116 const int ddbr;
260075cc 117 const int debr;
3f074425 118 const int dlgr;
119 const int dlr;
120 const int dr;
121 const int dsgfr;
122 const int dsgr;
18925d38 123};
124
7a0cee35 125#define s390_cost ((const struct processor_costs *)(s390_cost_pointer))
18925d38 126
127static const
ffead1ca 128struct processor_costs z900_cost =
18925d38 129{
130 COSTS_N_INSNS (5), /* M */
131 COSTS_N_INSNS (10), /* MGHI */
132 COSTS_N_INSNS (5), /* MH */
133 COSTS_N_INSNS (4), /* MHI */
9cd3f3e6 134 COSTS_N_INSNS (5), /* ML */
18925d38 135 COSTS_N_INSNS (5), /* MR */
136 COSTS_N_INSNS (4), /* MS */
137 COSTS_N_INSNS (15), /* MSG */
138 COSTS_N_INSNS (7), /* MSGF */
139 COSTS_N_INSNS (7), /* MSGFR */
140 COSTS_N_INSNS (10), /* MSGR */
141 COSTS_N_INSNS (4), /* MSR */
142 COSTS_N_INSNS (7), /* multiplication in DFmode */
429f9fdb 143 COSTS_N_INSNS (13), /* MXBR */
144 COSTS_N_INSNS (136), /* SQXBR */
9cd3f3e6 145 COSTS_N_INSNS (44), /* SQDBR */
146 COSTS_N_INSNS (35), /* SQEBR */
d95e38cf 147 COSTS_N_INSNS (18), /* MADBR */
148 COSTS_N_INSNS (13), /* MAEBR */
429f9fdb 149 COSTS_N_INSNS (134), /* DXBR */
260075cc 150 COSTS_N_INSNS (30), /* DDBR */
260075cc 151 COSTS_N_INSNS (27), /* DEBR */
3f074425 152 COSTS_N_INSNS (220), /* DLGR */
153 COSTS_N_INSNS (34), /* DLR */
154 COSTS_N_INSNS (34), /* DR */
155 COSTS_N_INSNS (32), /* DSGFR */
156 COSTS_N_INSNS (32), /* DSGR */
18925d38 157};
158
159static const
ffead1ca 160struct processor_costs z990_cost =
18925d38 161{
162 COSTS_N_INSNS (4), /* M */
163 COSTS_N_INSNS (2), /* MGHI */
164 COSTS_N_INSNS (2), /* MH */
165 COSTS_N_INSNS (2), /* MHI */
9cd3f3e6 166 COSTS_N_INSNS (4), /* ML */
18925d38 167 COSTS_N_INSNS (4), /* MR */
168 COSTS_N_INSNS (5), /* MS */
169 COSTS_N_INSNS (6), /* MSG */
170 COSTS_N_INSNS (4), /* MSGF */
171 COSTS_N_INSNS (4), /* MSGFR */
172 COSTS_N_INSNS (4), /* MSGR */
173 COSTS_N_INSNS (4), /* MSR */
174 COSTS_N_INSNS (1), /* multiplication in DFmode */
429f9fdb 175 COSTS_N_INSNS (28), /* MXBR */
176 COSTS_N_INSNS (130), /* SQXBR */
9cd3f3e6 177 COSTS_N_INSNS (66), /* SQDBR */
178 COSTS_N_INSNS (38), /* SQEBR */
d95e38cf 179 COSTS_N_INSNS (1), /* MADBR */
180 COSTS_N_INSNS (1), /* MAEBR */
429f9fdb 181 COSTS_N_INSNS (60), /* DXBR */
260075cc 182 COSTS_N_INSNS (40), /* DDBR */
095798e3 183 COSTS_N_INSNS (26), /* DEBR */
3f074425 184 COSTS_N_INSNS (176), /* DLGR */
185 COSTS_N_INSNS (31), /* DLR */
186 COSTS_N_INSNS (31), /* DR */
187 COSTS_N_INSNS (31), /* DSGFR */
188 COSTS_N_INSNS (31), /* DSGR */
18925d38 189};
190
163277cf 191static const
ffead1ca 192struct processor_costs z9_109_cost =
163277cf 193{
194 COSTS_N_INSNS (4), /* M */
195 COSTS_N_INSNS (2), /* MGHI */
196 COSTS_N_INSNS (2), /* MH */
197 COSTS_N_INSNS (2), /* MHI */
198 COSTS_N_INSNS (4), /* ML */
199 COSTS_N_INSNS (4), /* MR */
200 COSTS_N_INSNS (5), /* MS */
201 COSTS_N_INSNS (6), /* MSG */
202 COSTS_N_INSNS (4), /* MSGF */
203 COSTS_N_INSNS (4), /* MSGFR */
204 COSTS_N_INSNS (4), /* MSGR */
205 COSTS_N_INSNS (4), /* MSR */
206 COSTS_N_INSNS (1), /* multiplication in DFmode */
429f9fdb 207 COSTS_N_INSNS (28), /* MXBR */
208 COSTS_N_INSNS (130), /* SQXBR */
163277cf 209 COSTS_N_INSNS (66), /* SQDBR */
210 COSTS_N_INSNS (38), /* SQEBR */
211 COSTS_N_INSNS (1), /* MADBR */
212 COSTS_N_INSNS (1), /* MAEBR */
429f9fdb 213 COSTS_N_INSNS (60), /* DXBR */
163277cf 214 COSTS_N_INSNS (40), /* DDBR */
095798e3 215 COSTS_N_INSNS (26), /* DEBR */
163277cf 216 COSTS_N_INSNS (30), /* DLGR */
217 COSTS_N_INSNS (23), /* DLR */
218 COSTS_N_INSNS (23), /* DR */
219 COSTS_N_INSNS (24), /* DSGFR */
220 COSTS_N_INSNS (24), /* DSGR */
221};
18925d38 222
a850370e 223static const
224struct processor_costs z10_cost =
225{
510c2327 226 COSTS_N_INSNS (10), /* M */
227 COSTS_N_INSNS (10), /* MGHI */
228 COSTS_N_INSNS (10), /* MH */
229 COSTS_N_INSNS (10), /* MHI */
230 COSTS_N_INSNS (10), /* ML */
231 COSTS_N_INSNS (10), /* MR */
232 COSTS_N_INSNS (10), /* MS */
233 COSTS_N_INSNS (10), /* MSG */
234 COSTS_N_INSNS (10), /* MSGF */
235 COSTS_N_INSNS (10), /* MSGFR */
236 COSTS_N_INSNS (10), /* MSGR */
237 COSTS_N_INSNS (10), /* MSR */
b0eacf26 238 COSTS_N_INSNS (1) , /* multiplication in DFmode */
510c2327 239 COSTS_N_INSNS (50), /* MXBR */
240 COSTS_N_INSNS (120), /* SQXBR */
241 COSTS_N_INSNS (52), /* SQDBR */
a850370e 242 COSTS_N_INSNS (38), /* SQEBR */
b0eacf26 243 COSTS_N_INSNS (1), /* MADBR */
244 COSTS_N_INSNS (1), /* MAEBR */
510c2327 245 COSTS_N_INSNS (111), /* DXBR */
246 COSTS_N_INSNS (39), /* DDBR */
247 COSTS_N_INSNS (32), /* DEBR */
248 COSTS_N_INSNS (160), /* DLGR */
249 COSTS_N_INSNS (71), /* DLR */
250 COSTS_N_INSNS (71), /* DR */
251 COSTS_N_INSNS (71), /* DSGFR */
252 COSTS_N_INSNS (71), /* DSGR */
a850370e 253};
254
33d033da 255static const
256struct processor_costs z196_cost =
257{
258 COSTS_N_INSNS (7), /* M */
259 COSTS_N_INSNS (5), /* MGHI */
260 COSTS_N_INSNS (5), /* MH */
261 COSTS_N_INSNS (5), /* MHI */
262 COSTS_N_INSNS (7), /* ML */
263 COSTS_N_INSNS (7), /* MR */
264 COSTS_N_INSNS (6), /* MS */
265 COSTS_N_INSNS (8), /* MSG */
266 COSTS_N_INSNS (6), /* MSGF */
267 COSTS_N_INSNS (6), /* MSGFR */
268 COSTS_N_INSNS (8), /* MSGR */
269 COSTS_N_INSNS (6), /* MSR */
270 COSTS_N_INSNS (1) , /* multiplication in DFmode */
271 COSTS_N_INSNS (40), /* MXBR B+40 */
272 COSTS_N_INSNS (100), /* SQXBR B+100 */
273 COSTS_N_INSNS (42), /* SQDBR B+42 */
274 COSTS_N_INSNS (28), /* SQEBR B+28 */
275 COSTS_N_INSNS (1), /* MADBR B */
276 COSTS_N_INSNS (1), /* MAEBR B */
277 COSTS_N_INSNS (101), /* DXBR B+101 */
278 COSTS_N_INSNS (29), /* DDBR */
279 COSTS_N_INSNS (22), /* DEBR */
280 COSTS_N_INSNS (160), /* DLGR cracked */
281 COSTS_N_INSNS (160), /* DLR cracked */
282 COSTS_N_INSNS (160), /* DR expanded */
283 COSTS_N_INSNS (160), /* DSGFR cracked */
284 COSTS_N_INSNS (160), /* DSGR cracked */
285};
286
81769881 287static const
288struct processor_costs zEC12_cost =
289{
290 COSTS_N_INSNS (7), /* M */
291 COSTS_N_INSNS (5), /* MGHI */
292 COSTS_N_INSNS (5), /* MH */
293 COSTS_N_INSNS (5), /* MHI */
294 COSTS_N_INSNS (7), /* ML */
295 COSTS_N_INSNS (7), /* MR */
296 COSTS_N_INSNS (6), /* MS */
297 COSTS_N_INSNS (8), /* MSG */
298 COSTS_N_INSNS (6), /* MSGF */
299 COSTS_N_INSNS (6), /* MSGFR */
300 COSTS_N_INSNS (8), /* MSGR */
301 COSTS_N_INSNS (6), /* MSR */
302 COSTS_N_INSNS (1) , /* multiplication in DFmode */
303 COSTS_N_INSNS (40), /* MXBR B+40 */
304 COSTS_N_INSNS (100), /* SQXBR B+100 */
305 COSTS_N_INSNS (42), /* SQDBR B+42 */
306 COSTS_N_INSNS (28), /* SQEBR B+28 */
307 COSTS_N_INSNS (1), /* MADBR B */
308 COSTS_N_INSNS (1), /* MAEBR B */
309 COSTS_N_INSNS (131), /* DXBR B+131 */
310 COSTS_N_INSNS (29), /* DDBR */
311 COSTS_N_INSNS (22), /* DEBR */
312 COSTS_N_INSNS (160), /* DLGR cracked */
313 COSTS_N_INSNS (160), /* DLR cracked */
314 COSTS_N_INSNS (160), /* DR expanded */
315 COSTS_N_INSNS (160), /* DSGFR cracked */
316 COSTS_N_INSNS (160), /* DSGR cracked */
317};
318
7a0cee35 319static struct
320{
321 const char *const name;
322 const enum processor_type processor;
323 const struct processor_costs *cost;
324}
325const processor_table[] =
326{
327 { "g5", PROCESSOR_9672_G5, &z900_cost },
328 { "g6", PROCESSOR_9672_G6, &z900_cost },
329 { "z900", PROCESSOR_2064_Z900, &z900_cost },
330 { "z990", PROCESSOR_2084_Z990, &z990_cost },
331 { "z9-109", PROCESSOR_2094_Z9_109, &z9_109_cost },
332 { "z9-ec", PROCESSOR_2094_Z9_EC, &z9_109_cost },
333 { "z10", PROCESSOR_2097_Z10, &z10_cost },
334 { "z196", PROCESSOR_2817_Z196, &z196_cost },
335 { "zEC12", PROCESSOR_2827_ZEC12, &zEC12_cost },
336 { "z13", PROCESSOR_2964_Z13, &zEC12_cost },
337 { "native", PROCESSOR_NATIVE, NULL }
338};
339
4673c1a0 340extern int reload_completed;
341
8a2a84e3 342/* Kept up to date using the SCHED_VARIABLE_ISSUE hook. */
93e0956b 343static rtx_insn *last_scheduled_insn;
0cb69051 344#define MAX_SCHED_UNITS 3
345static int last_scheduled_unit_distance[MAX_SCHED_UNITS];
346
347/* The maximum score added for an instruction whose unit hasn't been
348 in use for MAX_SCHED_MIX_DISTANCE steps. Increase this value to
349 give instruction mix scheduling more priority over instruction
350 grouping. */
351#define MAX_SCHED_MIX_SCORE 8
352
353/* The maximum distance up to which individual scores will be
354 calculated. Everything beyond this gives MAX_SCHED_MIX_SCORE.
355 Increase this with the OOO windows size of the machine. */
356#define MAX_SCHED_MIX_DISTANCE 100
8a2a84e3 357
56769981 358/* Structure used to hold the components of a S/390 memory
359 address. A legitimate address on S/390 is of the general
360 form
361 base + index + displacement
362 where any of the components is optional.
363
364 base and index are registers of the class ADDR_REGS,
365 displacement is an unsigned 12-bit immediate constant. */
4673c1a0 366
367struct s390_address
368{
369 rtx base;
370 rtx indx;
371 rtx disp;
e5537457 372 bool pointer;
05b58257 373 bool literal_pool;
4673c1a0 374};
375
ffead1ca 376/* The following structure is embedded in the machine
67928721 377 specific part of struct function. */
378
fb1e4f4a 379struct GTY (()) s390_frame_layout
67928721 380{
381 /* Offset within stack frame. */
382 HOST_WIDE_INT gprs_offset;
383 HOST_WIDE_INT f0_offset;
384 HOST_WIDE_INT f4_offset;
385 HOST_WIDE_INT f8_offset;
386 HOST_WIDE_INT backchain_offset;
5214e6ae 387
388 /* Number of first and last gpr where slots in the register
389 save area are reserved for. */
390 int first_save_gpr_slot;
391 int last_save_gpr_slot;
392
ff4ce128 393 /* Location (FP register number) where GPRs (r0-r15) should
394 be saved to.
395 0 - does not need to be saved at all
396 -1 - stack slot */
1d3cea74 397#define SAVE_SLOT_NONE 0
398#define SAVE_SLOT_STACK -1
ff4ce128 399 signed char gpr_save_slots[16];
400
5a5e802f 401 /* Number of first and last gpr to be saved, restored. */
8b4a4127 402 int first_save_gpr;
403 int first_restore_gpr;
404 int last_save_gpr;
beee1f75 405 int last_restore_gpr;
8b4a4127 406
ffead1ca 407 /* Bits standing for floating point registers. Set, if the
408 respective register has to be saved. Starting with reg 16 (f0)
67928721 409 at the rightmost bit.
6a2469fe 410 Bit 15 14 13 12 11 10 9 8 7 6 5 4 3 2 1 0
411 fpr 15 13 11 9 14 12 10 8 7 5 3 1 6 4 2 0
412 reg 31 30 29 28 27 26 25 24 23 22 21 20 19 18 17 16 */
67928721 413 unsigned int fpr_bitmap;
414
415 /* Number of floating point registers f8-f15 which must be saved. */
416 int high_fprs;
417
9bee2845 418 /* Set if return address needs to be saved.
419 This flag is set by s390_return_addr_rtx if it could not use
420 the initial value of r14 and therefore depends on r14 saved
421 to the stack. */
67928721 422 bool save_return_addr_p;
423
5a5e802f 424 /* Size of stack frame. */
8b4a4127 425 HOST_WIDE_INT frame_size;
67928721 426};
427
428/* Define the structure for the machine field in struct function. */
429
fb1e4f4a 430struct GTY(()) machine_function
67928721 431{
432 struct s390_frame_layout frame_layout;
be00aaa8 433
20074f87 434 /* Literal pool base register. */
435 rtx base_reg;
436
4fed3f99 437 /* True if we may need to perform branch splitting. */
438 bool split_branches_pending_p;
439
1e639cb0 440 bool has_landing_pad_p;
5ada7a14 441
442 /* True if the current function may contain a tbegin clobbering
443 FPRs. */
444 bool tbegin_p;
c6d481f7 445
446 /* For -fsplit-stack support: A stack local which holds a pointer to
447 the stack arguments for a function with a variable number of
448 arguments. This is set at the start of the function and is used
449 to initialize the overflow_arg_area field of the va_list
450 structure. */
451 rtx split_stack_varargs_pointer;
8b4a4127 452};
453
67928721 454/* Few accessor macros for struct cfun->machine->s390_frame_layout. */
455
456#define cfun_frame_layout (cfun->machine->frame_layout)
457#define cfun_save_high_fprs_p (!!cfun_frame_layout.high_fprs)
ff4ce128 458#define cfun_save_arg_fprs_p (!!(TARGET_64BIT \
459 ? cfun_frame_layout.fpr_bitmap & 0x0f \
460 : cfun_frame_layout.fpr_bitmap & 0x03))
461#define cfun_gprs_save_area_size ((cfun_frame_layout.last_save_gpr_slot - \
b5fdc416 462 cfun_frame_layout.first_save_gpr_slot + 1) * UNITS_PER_LONG)
29439367 463#define cfun_set_fpr_save(REGNO) (cfun->machine->frame_layout.fpr_bitmap |= \
6a2469fe 464 (1 << (REGNO - FPR0_REGNUM)))
29439367 465#define cfun_fpr_save_p(REGNO) (!!(cfun->machine->frame_layout.fpr_bitmap & \
6a2469fe 466 (1 << (REGNO - FPR0_REGNUM))))
ff4ce128 467#define cfun_gpr_save_slot(REGNO) \
468 cfun->machine->frame_layout.gpr_save_slots[REGNO]
67928721 469
6902d973 470/* Number of GPRs and FPRs used for argument passing. */
471#define GP_ARG_NUM_REG 5
472#define FP_ARG_NUM_REG (TARGET_64BIT? 4 : 2)
76a4c804 473#define VEC_ARG_NUM_REG 8
6902d973 474
cb888f33 475/* A couple of shortcuts. */
476#define CONST_OK_FOR_J(x) \
477 CONST_OK_FOR_CONSTRAINT_P((x), 'J', "J")
478#define CONST_OK_FOR_K(x) \
479 CONST_OK_FOR_CONSTRAINT_P((x), 'K', "K")
163277cf 480#define CONST_OK_FOR_Os(x) \
481 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Os")
482#define CONST_OK_FOR_Op(x) \
483 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Op")
484#define CONST_OK_FOR_On(x) \
485 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "On")
cb888f33 486
8f1128bb 487#define REGNO_PAIR_OK(REGNO, MODE) \
488 (HARD_REGNO_NREGS ((REGNO), (MODE)) == 1 || !((REGNO) & 1))
489
73df8a45 490/* That's the read ahead of the dynamic branch prediction unit in
33d033da 491 bytes on a z10 (or higher) CPU. */
492#define PREDICT_DISTANCE (TARGET_Z10 ? 384 : 2048)
73df8a45 493
07f32359 494
6b7cfb9c 495/* Indicate which ABI has been used for passing vector args.
496 0 - no vector type arguments have been passed where the ABI is relevant
497 1 - the old ABI has been used
498 2 - a vector type argument has been passed either in a vector register
499 or on the stack by value */
500static int s390_vector_abi = 0;
501
502/* Set the vector ABI marker if TYPE is subject to the vector ABI
503 switch. The vector ABI affects only vector data types. There are
504 two aspects of the vector ABI relevant here:
505
506 1. vectors >= 16 bytes have an alignment of 8 bytes with the new
507 ABI and natural alignment with the old.
508
509 2. vector <= 16 bytes are passed in VRs or by value on the stack
510 with the new ABI but by reference on the stack with the old.
511
512 If ARG_P is true TYPE is used for a function argument or return
513 value. The ABI marker then is set for all vector data types. If
514 ARG_P is false only type 1 vectors are being checked. */
515
516static void
517s390_check_type_for_vector_abi (const_tree type, bool arg_p, bool in_struct_p)
518{
519 static hash_set<const_tree> visited_types_hash;
520
521 if (s390_vector_abi)
522 return;
523
524 if (type == NULL_TREE || TREE_CODE (type) == ERROR_MARK)
525 return;
526
527 if (visited_types_hash.contains (type))
528 return;
529
530 visited_types_hash.add (type);
531
532 if (VECTOR_TYPE_P (type))
533 {
534 int type_size = int_size_in_bytes (type);
535
536 /* Outside arguments only the alignment is changing and this
537 only happens for vector types >= 16 bytes. */
538 if (!arg_p && type_size < 16)
539 return;
540
541 /* In arguments vector types > 16 are passed as before (GCC
542 never enforced the bigger alignment for arguments which was
543 required by the old vector ABI). However, it might still be
544 ABI relevant due to the changed alignment if it is a struct
545 member. */
546 if (arg_p && type_size > 16 && !in_struct_p)
547 return;
548
549 s390_vector_abi = TARGET_VX_ABI ? 2 : 1;
550 }
551 else if (POINTER_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE)
552 {
553 /* ARRAY_TYPE: Since with neither of the ABIs we have more than
554 natural alignment there will never be ABI dependent padding
555 in an array type. That's why we do not set in_struct_p to
556 true here. */
557 s390_check_type_for_vector_abi (TREE_TYPE (type), arg_p, in_struct_p);
558 }
559 else if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
560 {
561 tree arg_chain;
562
563 /* Check the return type. */
564 s390_check_type_for_vector_abi (TREE_TYPE (type), true, false);
565
566 for (arg_chain = TYPE_ARG_TYPES (type);
567 arg_chain;
568 arg_chain = TREE_CHAIN (arg_chain))
569 s390_check_type_for_vector_abi (TREE_VALUE (arg_chain), true, false);
570 }
571 else if (RECORD_OR_UNION_TYPE_P (type))
572 {
573 tree field;
574
575 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
576 {
577 if (TREE_CODE (field) != FIELD_DECL)
578 continue;
579
580 s390_check_type_for_vector_abi (TREE_TYPE (field), arg_p, true);
581 }
582 }
583}
584
585
07f32359 586/* System z builtins. */
587
588#include "s390-builtins.h"
589
a8aefbef 590const unsigned int bflags_builtin[S390_BUILTIN_MAX + 1] =
07f32359 591 {
592#undef B_DEF
593#undef OB_DEF
594#undef OB_DEF_VAR
a8aefbef 595#define B_DEF(NAME, PATTERN, ATTRS, BFLAGS, ...) BFLAGS,
07f32359 596#define OB_DEF(...)
597#define OB_DEF_VAR(...)
598#include "s390-builtins.def"
599 0
600 };
601
a8aefbef 602const unsigned int opflags_builtin[S390_BUILTIN_MAX + 1] =
603 {
604#undef B_DEF
605#undef OB_DEF
606#undef OB_DEF_VAR
607#define B_DEF(NAME, PATTERN, ATTRS, BFLAGS, OPFLAGS, ...) OPFLAGS,
608#define OB_DEF(...)
609#define OB_DEF_VAR(...)
610#include "s390-builtins.def"
611 0
612 };
613
614const unsigned int bflags_overloaded_builtin[S390_OVERLOADED_BUILTIN_MAX + 1] =
615 {
616#undef B_DEF
617#undef OB_DEF
618#undef OB_DEF_VAR
619#define B_DEF(...)
620#define OB_DEF(NAME, FIRST_VAR_NAME, LAST_VAR_NAME, BFLAGS, ...) BFLAGS,
621#define OB_DEF_VAR(...)
622#include "s390-builtins.def"
623 0
624 };
625
626const unsigned int
627opflags_overloaded_builtin_var[S390_OVERLOADED_BUILTIN_VAR_MAX + 1] =
07f32359 628 {
629#undef B_DEF
630#undef OB_DEF
631#undef OB_DEF_VAR
632#define B_DEF(...)
633#define OB_DEF(...)
634#define OB_DEF_VAR(NAME, PATTERN, FLAGS, FNTYPE) FLAGS,
635#include "s390-builtins.def"
636 0
637 };
638
639tree s390_builtin_types[BT_MAX];
640tree s390_builtin_fn_types[BT_FN_MAX];
641tree s390_builtin_decls[S390_BUILTIN_MAX +
642 S390_OVERLOADED_BUILTIN_MAX +
643 S390_OVERLOADED_BUILTIN_VAR_MAX];
644
645static enum insn_code const code_for_builtin[S390_BUILTIN_MAX + 1] = {
646#undef B_DEF
647#undef OB_DEF
648#undef OB_DEF_VAR
649#define B_DEF(NAME, PATTERN, ...) CODE_FOR_##PATTERN,
650#define OB_DEF(...)
651#define OB_DEF_VAR(...)
652
653#include "s390-builtins.def"
654 CODE_FOR_nothing
655};
656
657static void
658s390_init_builtins (void)
659{
660 /* These definitions are being used in s390-builtins.def. */
661 tree returns_twice_attr = tree_cons (get_identifier ("returns_twice"),
662 NULL, NULL);
663 tree noreturn_attr = tree_cons (get_identifier ("noreturn"), NULL, NULL);
664 tree c_uint64_type_node;
665
666 /* The uint64_type_node from tree.c is not compatible to the C99
667 uint64_t data type. What we want is c_uint64_type_node from
668 c-common.c. But since backend code is not supposed to interface
669 with the frontend we recreate it here. */
670 if (TARGET_64BIT)
671 c_uint64_type_node = long_unsigned_type_node;
672 else
673 c_uint64_type_node = long_long_unsigned_type_node;
674
675#undef DEF_TYPE
a8aefbef 676#define DEF_TYPE(INDEX, BFLAGS, NODE, CONST_P) \
7a0cee35 677 if (s390_builtin_types[INDEX] == NULL) \
a8aefbef 678 s390_builtin_types[INDEX] = (!CONST_P) ? \
679 (NODE) : build_type_variant ((NODE), 1, 0);
07f32359 680
681#undef DEF_POINTER_TYPE
a8aefbef 682#define DEF_POINTER_TYPE(INDEX, BFLAGS, INDEX_BASE) \
7a0cee35 683 if (s390_builtin_types[INDEX] == NULL) \
a8aefbef 684 s390_builtin_types[INDEX] = \
685 build_pointer_type (s390_builtin_types[INDEX_BASE]);
07f32359 686
687#undef DEF_DISTINCT_TYPE
a8aefbef 688#define DEF_DISTINCT_TYPE(INDEX, BFLAGS, INDEX_BASE) \
7a0cee35 689 if (s390_builtin_types[INDEX] == NULL) \
a8aefbef 690 s390_builtin_types[INDEX] = \
691 build_distinct_type_copy (s390_builtin_types[INDEX_BASE]);
07f32359 692
693#undef DEF_VECTOR_TYPE
a8aefbef 694#define DEF_VECTOR_TYPE(INDEX, BFLAGS, INDEX_BASE, ELEMENTS) \
7a0cee35 695 if (s390_builtin_types[INDEX] == NULL) \
a8aefbef 696 s390_builtin_types[INDEX] = \
697 build_vector_type (s390_builtin_types[INDEX_BASE], ELEMENTS);
07f32359 698
699#undef DEF_OPAQUE_VECTOR_TYPE
a8aefbef 700#define DEF_OPAQUE_VECTOR_TYPE(INDEX, BFLAGS, INDEX_BASE, ELEMENTS) \
7a0cee35 701 if (s390_builtin_types[INDEX] == NULL) \
a8aefbef 702 s390_builtin_types[INDEX] = \
703 build_opaque_vector_type (s390_builtin_types[INDEX_BASE], ELEMENTS);
07f32359 704
705#undef DEF_FN_TYPE
a8aefbef 706#define DEF_FN_TYPE(INDEX, BFLAGS, args...) \
7a0cee35 707 if (s390_builtin_fn_types[INDEX] == NULL) \
a8aefbef 708 s390_builtin_fn_types[INDEX] = \
7a0cee35 709 build_function_type_list (args, NULL_TREE);
07f32359 710#undef DEF_OV_TYPE
711#define DEF_OV_TYPE(...)
712#include "s390-builtin-types.def"
713
714#undef B_DEF
a8aefbef 715#define B_DEF(NAME, PATTERN, ATTRS, BFLAGS, OPFLAGS, FNTYPE) \
7a0cee35 716 if (s390_builtin_decls[S390_BUILTIN_##NAME] == NULL) \
a8aefbef 717 s390_builtin_decls[S390_BUILTIN_##NAME] = \
718 add_builtin_function ("__builtin_" #NAME, \
719 s390_builtin_fn_types[FNTYPE], \
720 S390_BUILTIN_##NAME, \
721 BUILT_IN_MD, \
722 NULL, \
723 ATTRS);
07f32359 724#undef OB_DEF
a8aefbef 725#define OB_DEF(NAME, FIRST_VAR_NAME, LAST_VAR_NAME, BFLAGS, FNTYPE) \
7a0cee35 726 if (s390_builtin_decls[S390_OVERLOADED_BUILTIN_##NAME + S390_BUILTIN_MAX] \
727 == NULL) \
a8aefbef 728 s390_builtin_decls[S390_OVERLOADED_BUILTIN_##NAME + S390_BUILTIN_MAX] = \
729 add_builtin_function ("__builtin_" #NAME, \
730 s390_builtin_fn_types[FNTYPE], \
731 S390_OVERLOADED_BUILTIN_##NAME + S390_BUILTIN_MAX, \
732 BUILT_IN_MD, \
733 NULL, \
734 0);
07f32359 735#undef OB_DEF_VAR
736#define OB_DEF_VAR(...)
737#include "s390-builtins.def"
738
739}
740
741/* Return true if ARG is appropriate as argument number ARGNUM of
742 builtin DECL. The operand flags from s390-builtins.def have to
743 passed as OP_FLAGS. */
744bool
745s390_const_operand_ok (tree arg, int argnum, int op_flags, tree decl)
746{
747 if (O_UIMM_P (op_flags))
748 {
749 int bitwidths[] = { 1, 2, 3, 4, 5, 8, 12, 16, 32 };
750 int bitwidth = bitwidths[op_flags - O_U1];
751
752 if (!tree_fits_uhwi_p (arg)
753 || tree_to_uhwi (arg) > ((unsigned HOST_WIDE_INT)1 << bitwidth) - 1)
754 {
755 error("constant argument %d for builtin %qF is out of range (0.."
756 HOST_WIDE_INT_PRINT_UNSIGNED ")",
757 argnum, decl,
758 ((unsigned HOST_WIDE_INT)1 << bitwidth) - 1);
759 return false;
760 }
761 }
762
763 if (O_SIMM_P (op_flags))
764 {
765 int bitwidths[] = { 2, 3, 4, 5, 8, 12, 16, 32 };
766 int bitwidth = bitwidths[op_flags - O_S2];
767
768 if (!tree_fits_shwi_p (arg)
769 || tree_to_shwi (arg) < -((HOST_WIDE_INT)1 << (bitwidth - 1))
770 || tree_to_shwi (arg) > (((HOST_WIDE_INT)1 << (bitwidth - 1)) - 1))
771 {
772 error("constant argument %d for builtin %qF is out of range ("
773 HOST_WIDE_INT_PRINT_DEC ".."
774 HOST_WIDE_INT_PRINT_DEC ")",
775 argnum, decl,
99881286 776 -((HOST_WIDE_INT)1 << (bitwidth - 1)),
07f32359 777 ((HOST_WIDE_INT)1 << (bitwidth - 1)) - 1);
778 return false;
779 }
780 }
781 return true;
782}
783
784/* Expand an expression EXP that calls a built-in function,
785 with result going to TARGET if that's convenient
786 (and in mode MODE if that's convenient).
787 SUBTARGET may be used as the target for computing one of EXP's operands.
788 IGNORE is nonzero if the value is to be ignored. */
789
790static rtx
791s390_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
792 machine_mode mode ATTRIBUTE_UNUSED,
793 int ignore ATTRIBUTE_UNUSED)
794{
674b3578 795#define MAX_ARGS 6
07f32359 796
797 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
798 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
799 enum insn_code icode;
800 rtx op[MAX_ARGS], pat;
801 int arity;
802 bool nonvoid;
803 tree arg;
804 call_expr_arg_iterator iter;
a8aefbef 805 unsigned int all_op_flags = opflags_for_builtin (fcode);
07f32359 806 machine_mode last_vec_mode = VOIDmode;
807
808 if (TARGET_DEBUG_ARG)
809 {
810 fprintf (stderr,
7a0cee35 811 "s390_expand_builtin, code = %4d, %s, bflags = 0x%x\n",
812 (int)fcode, IDENTIFIER_POINTER (DECL_NAME (fndecl)),
813 bflags_for_builtin (fcode));
07f32359 814 }
815
7a0cee35 816 if (S390_USE_TARGET_ATTRIBUTE)
817 {
818 unsigned int bflags;
819
820 bflags = bflags_for_builtin (fcode);
821 if ((bflags & B_HTM) && !TARGET_HTM)
822 {
823 error ("Builtin %qF is not supported without -mhtm "
824 "(default with -march=zEC12 and higher).", fndecl);
825 return const0_rtx;
826 }
827 if ((bflags & B_VX) && !TARGET_VX)
828 {
829 error ("Builtin %qF is not supported without -mvx "
830 "(default with -march=z13 and higher).", fndecl);
831 return const0_rtx;
832 }
833 }
07f32359 834 if (fcode >= S390_OVERLOADED_BUILTIN_VAR_OFFSET
835 && fcode < S390_ALL_BUILTIN_MAX)
836 {
837 gcc_unreachable ();
838 }
839 else if (fcode < S390_OVERLOADED_BUILTIN_OFFSET)
840 {
841 icode = code_for_builtin[fcode];
842 /* Set a flag in the machine specific cfun part in order to support
843 saving/restoring of FPRs. */
844 if (fcode == S390_BUILTIN_tbegin || fcode == S390_BUILTIN_tbegin_retry)
845 cfun->machine->tbegin_p = true;
846 }
847 else if (fcode < S390_OVERLOADED_BUILTIN_VAR_OFFSET)
848 {
849 error ("Unresolved overloaded builtin");
850 return const0_rtx;
851 }
852 else
853 internal_error ("bad builtin fcode");
854
855 if (icode == 0)
856 internal_error ("bad builtin icode");
857
858 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
859
860 if (nonvoid)
861 {
862 machine_mode tmode = insn_data[icode].operand[0].mode;
863 if (!target
864 || GET_MODE (target) != tmode
865 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
866 target = gen_reg_rtx (tmode);
867
868 /* There are builtins (e.g. vec_promote) with no vector
869 arguments but an element selector. So we have to also look
870 at the vector return type when emitting the modulo
871 operation. */
872 if (VECTOR_MODE_P (insn_data[icode].operand[0].mode))
873 last_vec_mode = insn_data[icode].operand[0].mode;
874 }
875
876 arity = 0;
877 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
878 {
b0c401ca 879 rtx tmp_rtx;
07f32359 880 const struct insn_operand_data *insn_op;
881 unsigned int op_flags = all_op_flags & ((1 << O_SHIFT) - 1);
882
883 all_op_flags = all_op_flags >> O_SHIFT;
884
885 if (arg == error_mark_node)
886 return NULL_RTX;
887 if (arity >= MAX_ARGS)
888 return NULL_RTX;
889
890 if (O_IMM_P (op_flags)
891 && TREE_CODE (arg) != INTEGER_CST)
892 {
893 error ("constant value required for builtin %qF argument %d",
894 fndecl, arity + 1);
895 return const0_rtx;
896 }
897
898 if (!s390_const_operand_ok (arg, arity + 1, op_flags, fndecl))
899 return const0_rtx;
900
901 insn_op = &insn_data[icode].operand[arity + nonvoid];
902 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, EXPAND_NORMAL);
903
0570ddf1 904 /* expand_expr truncates constants to the target mode only if it
905 is "convenient". However, our checks below rely on this
906 being done. */
907 if (CONST_INT_P (op[arity])
908 && SCALAR_INT_MODE_P (insn_op->mode)
909 && GET_MODE (op[arity]) != insn_op->mode)
910 op[arity] = GEN_INT (trunc_int_for_mode (INTVAL (op[arity]),
911 insn_op->mode));
912
07f32359 913 /* Wrap the expanded RTX for pointer types into a MEM expr with
914 the proper mode. This allows us to use e.g. (match_operand
915 "memory_operand"..) in the insn patterns instead of (mem
916 (match_operand "address_operand)). This is helpful for
917 patterns not just accepting MEMs. */
918 if (POINTER_TYPE_P (TREE_TYPE (arg))
919 && insn_op->predicate != address_operand)
920 op[arity] = gen_rtx_MEM (insn_op->mode, op[arity]);
921
922 /* Expand the module operation required on element selectors. */
923 if (op_flags == O_ELEM)
924 {
925 gcc_assert (last_vec_mode != VOIDmode);
926 op[arity] = simplify_expand_binop (SImode, code_to_optab (AND),
927 op[arity],
928 GEN_INT (GET_MODE_NUNITS (last_vec_mode) - 1),
929 NULL_RTX, 1, OPTAB_DIRECT);
930 }
931
932 /* Record the vector mode used for an element selector. This assumes:
933 1. There is no builtin with two different vector modes and an element selector
934 2. The element selector comes after the vector type it is referring to.
935 This currently the true for all the builtins but FIXME we
936 should better check for that. */
937 if (VECTOR_MODE_P (insn_op->mode))
938 last_vec_mode = insn_op->mode;
939
940 if (insn_op->predicate (op[arity], insn_op->mode))
941 {
942 arity++;
943 continue;
944 }
945
946 if (MEM_P (op[arity])
947 && insn_op->predicate == memory_operand
948 && (GET_MODE (XEXP (op[arity], 0)) == Pmode
949 || GET_MODE (XEXP (op[arity], 0)) == VOIDmode))
950 {
951 op[arity] = replace_equiv_address (op[arity],
952 copy_to_mode_reg (Pmode,
953 XEXP (op[arity], 0)));
954 }
b0c401ca 955 /* Some of the builtins require different modes/types than the
956 pattern in order to implement a specific API. Instead of
957 adding many expanders which do the mode change we do it here.
958 E.g. s390_vec_add_u128 required to have vector unsigned char
959 arguments is mapped to addti3. */
960 else if (insn_op->mode != VOIDmode
961 && GET_MODE (op[arity]) != VOIDmode
962 && GET_MODE (op[arity]) != insn_op->mode
963 && ((tmp_rtx = simplify_gen_subreg (insn_op->mode, op[arity],
964 GET_MODE (op[arity]), 0))
965 != NULL_RTX))
966 {
967 op[arity] = tmp_rtx;
968 }
07f32359 969 else if (GET_MODE (op[arity]) == insn_op->mode
970 || GET_MODE (op[arity]) == VOIDmode
971 || (insn_op->predicate == address_operand
972 && GET_MODE (op[arity]) == Pmode))
973 {
974 /* An address_operand usually has VOIDmode in the expander
975 so we cannot use this. */
976 machine_mode target_mode =
977 (insn_op->predicate == address_operand
978 ? Pmode : insn_op->mode);
979 op[arity] = copy_to_mode_reg (target_mode, op[arity]);
980 }
981
982 if (!insn_op->predicate (op[arity], insn_op->mode))
983 {
984 error ("Invalid argument %d for builtin %qF", arity + 1, fndecl);
985 return const0_rtx;
986 }
987 arity++;
988 }
989
07f32359 990 switch (arity)
991 {
992 case 0:
993 pat = GEN_FCN (icode) (target);
994 break;
995 case 1:
996 if (nonvoid)
997 pat = GEN_FCN (icode) (target, op[0]);
998 else
999 pat = GEN_FCN (icode) (op[0]);
1000 break;
1001 case 2:
1002 if (nonvoid)
1003 pat = GEN_FCN (icode) (target, op[0], op[1]);
1004 else
1005 pat = GEN_FCN (icode) (op[0], op[1]);
1006 break;
1007 case 3:
1008 if (nonvoid)
1009 pat = GEN_FCN (icode) (target, op[0], op[1], op[2]);
1010 else
1011 pat = GEN_FCN (icode) (op[0], op[1], op[2]);
1012 break;
1013 case 4:
1014 if (nonvoid)
1015 pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3]);
1016 else
1017 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
1018 break;
1019 case 5:
1020 if (nonvoid)
1021 pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3], op[4]);
1022 else
1023 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
1024 break;
1025 case 6:
1026 if (nonvoid)
1027 pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3], op[4], op[5]);
1028 else
1029 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4], op[5]);
1030 break;
1031 default:
1032 gcc_unreachable ();
1033 }
1034 if (!pat)
1035 return NULL_RTX;
1036 emit_insn (pat);
1037
1038 if (nonvoid)
1039 return target;
1040 else
1041 return const0_rtx;
1042}
1043
1044
11762b83 1045static const int s390_hotpatch_hw_max = 1000000;
1046static int s390_hotpatch_hw_before_label = 0;
1047static int s390_hotpatch_hw_after_label = 0;
77bc9912 1048
1049/* Check whether the hotpatch attribute is applied to a function and, if it has
1050 an argument, the argument is valid. */
1051
1052static tree
1053s390_handle_hotpatch_attribute (tree *node, tree name, tree args,
1054 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
1055{
11762b83 1056 tree expr;
1057 tree expr2;
1058 int err;
1059
77bc9912 1060 if (TREE_CODE (*node) != FUNCTION_DECL)
1061 {
1062 warning (OPT_Wattributes, "%qE attribute only applies to functions",
1063 name);
1064 *no_add_attrs = true;
1065 }
11762b83 1066 if (args != NULL && TREE_CHAIN (args) != NULL)
1067 {
1068 expr = TREE_VALUE (args);
1069 expr2 = TREE_VALUE (TREE_CHAIN (args));
1070 }
1071 if (args == NULL || TREE_CHAIN (args) == NULL)
1072 err = 1;
1073 else if (TREE_CODE (expr) != INTEGER_CST
1074 || !INTEGRAL_TYPE_P (TREE_TYPE (expr))
1075 || wi::gtu_p (expr, s390_hotpatch_hw_max))
1076 err = 1;
1077 else if (TREE_CODE (expr2) != INTEGER_CST
1078 || !INTEGRAL_TYPE_P (TREE_TYPE (expr2))
1079 || wi::gtu_p (expr2, s390_hotpatch_hw_max))
1080 err = 1;
1081 else
1082 err = 0;
1083 if (err)
77bc9912 1084 {
11762b83 1085 error ("requested %qE attribute is not a comma separated pair of"
1086 " non-negative integer constants or too large (max. %d)", name,
1087 s390_hotpatch_hw_max);
1088 *no_add_attrs = true;
77bc9912 1089 }
1090
1091 return NULL_TREE;
1092}
1093
07f32359 1094/* Expand the s390_vector_bool type attribute. */
1095
1096static tree
1097s390_handle_vectorbool_attribute (tree *node, tree name ATTRIBUTE_UNUSED,
1098 tree args ATTRIBUTE_UNUSED,
1099 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
1100{
1101 tree type = *node, result = NULL_TREE;
1102 machine_mode mode;
1103
1104 while (POINTER_TYPE_P (type)
1105 || TREE_CODE (type) == FUNCTION_TYPE
1106 || TREE_CODE (type) == METHOD_TYPE
1107 || TREE_CODE (type) == ARRAY_TYPE)
1108 type = TREE_TYPE (type);
1109
1110 mode = TYPE_MODE (type);
1111 switch (mode)
1112 {
1113 case DImode: case V2DImode: result = s390_builtin_types[BT_BV2DI]; break;
1114 case SImode: case V4SImode: result = s390_builtin_types[BT_BV4SI]; break;
1115 case HImode: case V8HImode: result = s390_builtin_types[BT_BV8HI]; break;
1116 case QImode: case V16QImode: result = s390_builtin_types[BT_BV16QI];
1117 default: break;
1118 }
1119
1120 *no_add_attrs = true; /* No need to hang on to the attribute. */
1121
1122 if (result)
1123 *node = lang_hooks.types.reconstruct_complex_type (*node, result);
1124
1125 return NULL_TREE;
1126}
1127
77bc9912 1128static const struct attribute_spec s390_attribute_table[] = {
07f32359 1129 { "hotpatch", 2, 2, true, false, false, s390_handle_hotpatch_attribute, false },
1130 { "s390_vector_bool", 0, 0, false, true, false, s390_handle_vectorbool_attribute, true },
77bc9912 1131 /* End element. */
1132 { NULL, 0, 0, false, false, false, NULL, false }
1133};
1134
6d0afa28 1135/* Return the alignment for LABEL. We default to the -falign-labels
1136 value except for the literal pool base label. */
1137int
56d95ea5 1138s390_label_align (rtx_insn *label)
6d0afa28 1139{
50fc2d35 1140 rtx_insn *prev_insn = prev_active_insn (label);
1141 rtx set, src;
6d0afa28 1142
1143 if (prev_insn == NULL_RTX)
1144 goto old;
1145
50fc2d35 1146 set = single_set (prev_insn);
6d0afa28 1147
50fc2d35 1148 if (set == NULL_RTX)
6d0afa28 1149 goto old;
1150
50fc2d35 1151 src = SET_SRC (set);
6d0afa28 1152
1153 /* Don't align literal pool base labels. */
50fc2d35 1154 if (GET_CODE (src) == UNSPEC
1155 && XINT (src, 1) == UNSPEC_MAIN_BASE)
6d0afa28 1156 return 0;
1157
1158 old:
1159 return align_labels_log;
1160}
1161
3754d046 1162static machine_mode
0ef89dfd 1163s390_libgcc_cmp_return_mode (void)
1164{
1165 return TARGET_64BIT ? DImode : SImode;
1166}
1167
3754d046 1168static machine_mode
0ef89dfd 1169s390_libgcc_shift_count_mode (void)
1170{
1171 return TARGET_64BIT ? DImode : SImode;
1172}
1173
3754d046 1174static machine_mode
b5fdc416 1175s390_unwind_word_mode (void)
1176{
1177 return TARGET_64BIT ? DImode : SImode;
1178}
1179
36868490 1180/* Return true if the back end supports mode MODE. */
1181static bool
3754d046 1182s390_scalar_mode_supported_p (machine_mode mode)
36868490 1183{
b5fdc416 1184 /* In contrast to the default implementation reject TImode constants on 31bit
1185 TARGET_ZARCH for ABI compliance. */
1186 if (!TARGET_64BIT && TARGET_ZARCH && mode == TImode)
1187 return false;
1188
36868490 1189 if (DECIMAL_FLOAT_MODE_P (mode))
8e72d11d 1190 return default_decimal_float_supported_p ();
b5fdc416 1191
1192 return default_scalar_mode_supported_p (mode);
36868490 1193}
1194
76a4c804 1195/* Return true if the back end supports vector mode MODE. */
1196static bool
1197s390_vector_mode_supported_p (machine_mode mode)
1198{
1199 machine_mode inner;
1200
1201 if (!VECTOR_MODE_P (mode)
1202 || !TARGET_VX
1203 || GET_MODE_SIZE (mode) > 16)
1204 return false;
1205
1206 inner = GET_MODE_INNER (mode);
1207
1208 switch (inner)
1209 {
1210 case QImode:
1211 case HImode:
1212 case SImode:
1213 case DImode:
1214 case TImode:
1215 case SFmode:
1216 case DFmode:
1217 case TFmode:
1218 return true;
1219 default:
1220 return false;
1221 }
1222}
1223
1e639cb0 1224/* Set the has_landing_pad_p flag in struct machine_function to VALUE. */
1225
1226void
1227s390_set_has_landing_pad_p (bool value)
1228{
1229 cfun->machine->has_landing_pad_p = value;
1230}
6902d973 1231
9c93d843 1232/* If two condition code modes are compatible, return a condition code
1233 mode which is compatible with both. Otherwise, return
1234 VOIDmode. */
1235
3754d046 1236static machine_mode
1237s390_cc_modes_compatible (machine_mode m1, machine_mode m2)
9c93d843 1238{
1239 if (m1 == m2)
1240 return m1;
1241
1242 switch (m1)
1243 {
1244 case CCZmode:
1245 if (m2 == CCUmode || m2 == CCTmode || m2 == CCZ1mode
1246 || m2 == CCSmode || m2 == CCSRmode || m2 == CCURmode)
1247 return m2;
1248 return VOIDmode;
1249
1250 case CCSmode:
1251 case CCUmode:
1252 case CCTmode:
1253 case CCSRmode:
1254 case CCURmode:
1255 case CCZ1mode:
1256 if (m2 == CCZmode)
1257 return m1;
ffead1ca 1258
9c93d843 1259 return VOIDmode;
1260
1261 default:
1262 return VOIDmode;
1263 }
1264 return VOIDmode;
1265}
1266
56769981 1267/* Return true if SET either doesn't set the CC register, or else
f81e845f 1268 the source and destination have matching CC modes and that
56769981 1269 CC mode is at least as constrained as REQ_MODE. */
f81e845f 1270
e5537457 1271static bool
3754d046 1272s390_match_ccmode_set (rtx set, machine_mode req_mode)
4673c1a0 1273{
3754d046 1274 machine_mode set_mode;
4673c1a0 1275
32eda510 1276 gcc_assert (GET_CODE (set) == SET);
4673c1a0 1277
1278 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
1279 return 1;
1280
1281 set_mode = GET_MODE (SET_DEST (set));
1282 switch (set_mode)
1283 {
4673c1a0 1284 case CCSmode:
c6821d1c 1285 case CCSRmode:
4673c1a0 1286 case CCUmode:
c6821d1c 1287 case CCURmode:
2eb8fe23 1288 case CCLmode:
c6821d1c 1289 case CCL1mode:
1290 case CCL2mode:
3b699fc7 1291 case CCL3mode:
c6821d1c 1292 case CCT1mode:
1293 case CCT2mode:
1294 case CCT3mode:
26233f43 1295 case CCVEQmode:
07f32359 1296 case CCVHmode:
1297 case CCVHUmode:
26233f43 1298 case CCVFHmode:
1299 case CCVFHEmode:
c6821d1c 1300 if (req_mode != set_mode)
2eb8fe23 1301 return 0;
1302 break;
c6821d1c 1303
4673c1a0 1304 case CCZmode:
c6821d1c 1305 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
1306 && req_mode != CCSRmode && req_mode != CCURmode)
4673c1a0 1307 return 0;
1308 break;
3c482144 1309
1310 case CCAPmode:
1311 case CCANmode:
1312 if (req_mode != CCAmode)
1313 return 0;
1314 break;
f81e845f 1315
4673c1a0 1316 default:
32eda510 1317 gcc_unreachable ();
4673c1a0 1318 }
f81e845f 1319
4673c1a0 1320 return (GET_MODE (SET_SRC (set)) == set_mode);
1321}
1322
f81e845f 1323/* Return true if every SET in INSN that sets the CC register
1324 has source and destination with matching CC modes and that
1325 CC mode is at least as constrained as REQ_MODE.
c6821d1c 1326 If REQ_MODE is VOIDmode, always return false. */
f81e845f 1327
e5537457 1328bool
3754d046 1329s390_match_ccmode (rtx_insn *insn, machine_mode req_mode)
4673c1a0 1330{
1331 int i;
1332
c6821d1c 1333 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
1334 if (req_mode == VOIDmode)
e5537457 1335 return false;
c6821d1c 1336
4673c1a0 1337 if (GET_CODE (PATTERN (insn)) == SET)
1338 return s390_match_ccmode_set (PATTERN (insn), req_mode);
1339
1340 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1341 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1342 {
1343 rtx set = XVECEXP (PATTERN (insn), 0, i);
1344 if (GET_CODE (set) == SET)
1345 if (!s390_match_ccmode_set (set, req_mode))
e5537457 1346 return false;
4673c1a0 1347 }
1348
e5537457 1349 return true;
4673c1a0 1350}
1351
f81e845f 1352/* If a test-under-mask instruction can be used to implement
c6821d1c 1353 (compare (and ... OP1) OP2), return the CC mode required
f81e845f 1354 to do that. Otherwise, return VOIDmode.
c6821d1c 1355 MIXED is true if the instruction can distinguish between
1356 CC1 and CC2 for mixed selected bits (TMxx), it is false
1357 if the instruction cannot (TM). */
1358
3754d046 1359machine_mode
e5537457 1360s390_tm_ccmode (rtx op1, rtx op2, bool mixed)
c6821d1c 1361{
1362 int bit0, bit1;
1363
ba0e61d6 1364 /* ??? Fixme: should work on CONST_WIDE_INT as well. */
c6821d1c 1365 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
1366 return VOIDmode;
1367
eeba5f25 1368 /* Selected bits all zero: CC0.
1369 e.g.: int a; if ((a & (16 + 128)) == 0) */
c6821d1c 1370 if (INTVAL (op2) == 0)
1371 return CCTmode;
1372
ffead1ca 1373 /* Selected bits all one: CC3.
eeba5f25 1374 e.g.: int a; if ((a & (16 + 128)) == 16 + 128) */
c6821d1c 1375 if (INTVAL (op2) == INTVAL (op1))
1376 return CCT3mode;
1377
eeba5f25 1378 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. e.g.:
1379 int a;
1380 if ((a & (16 + 128)) == 16) -> CCT1
1381 if ((a & (16 + 128)) == 128) -> CCT2 */
c6821d1c 1382 if (mixed)
1383 {
1384 bit1 = exact_log2 (INTVAL (op2));
1385 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
1386 if (bit0 != -1 && bit1 != -1)
1387 return bit0 > bit1 ? CCT1mode : CCT2mode;
1388 }
1389
1390 return VOIDmode;
1391}
1392
f81e845f 1393/* Given a comparison code OP (EQ, NE, etc.) and the operands
1394 OP0 and OP1 of a COMPARE, return the mode to be used for the
2eb8fe23 1395 comparison. */
1396
3754d046 1397machine_mode
b40da9a7 1398s390_select_ccmode (enum rtx_code code, rtx op0, rtx op1)
2eb8fe23 1399{
26233f43 1400 if (TARGET_VX
1401 && register_operand (op0, DFmode)
1402 && register_operand (op1, DFmode))
1403 {
1404 /* LT, LE, UNGT, UNGE require swapping OP0 and OP1. Either
1405 s390_emit_compare or s390_canonicalize_comparison will take
1406 care of it. */
1407 switch (code)
1408 {
1409 case EQ:
1410 case NE:
1411 return CCVEQmode;
1412 case GT:
1413 case UNLE:
1414 return CCVFHmode;
1415 case GE:
1416 case UNLT:
1417 return CCVFHEmode;
1418 default:
1419 ;
1420 }
1421 }
1422
2eb8fe23 1423 switch (code)
1424 {
1425 case EQ:
1426 case NE:
9be33ca2 1427 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
1428 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
1429 return CCAPmode;
3c482144 1430 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
cb888f33 1431 && CONST_OK_FOR_K (INTVAL (XEXP (op0, 1))))
3c482144 1432 return CCAPmode;
e9fd5349 1433 if ((GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
1434 || GET_CODE (op1) == NEG)
1435 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
2eb8fe23 1436 return CCLmode;
1437
c6821d1c 1438 if (GET_CODE (op0) == AND)
1439 {
1440 /* Check whether we can potentially do it via TM. */
3754d046 1441 machine_mode ccmode;
c6821d1c 1442 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
1443 if (ccmode != VOIDmode)
1444 {
1445 /* Relax CCTmode to CCZmode to allow fall-back to AND
1446 if that turns out to be beneficial. */
1447 return ccmode == CCTmode ? CCZmode : ccmode;
1448 }
1449 }
1450
f81e845f 1451 if (register_operand (op0, HImode)
c6821d1c 1452 && GET_CODE (op1) == CONST_INT
1453 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
1454 return CCT3mode;
f81e845f 1455 if (register_operand (op0, QImode)
c6821d1c 1456 && GET_CODE (op1) == CONST_INT
1457 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
1458 return CCT3mode;
1459
2eb8fe23 1460 return CCZmode;
1461
1462 case LE:
1463 case LT:
1464 case GE:
1465 case GT:
eeba5f25 1466 /* The only overflow condition of NEG and ABS happens when
1467 -INT_MAX is used as parameter, which stays negative. So
ffead1ca 1468 we have an overflow from a positive value to a negative.
eeba5f25 1469 Using CCAP mode the resulting cc can be used for comparisons. */
9be33ca2 1470 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
1471 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
1472 return CCAPmode;
eeba5f25 1473
1474 /* If constants are involved in an add instruction it is possible to use
1475 the resulting cc for comparisons with zero. Knowing the sign of the
0975351b 1476 constant the overflow behavior gets predictable. e.g.:
ffead1ca 1477 int a, b; if ((b = a + c) > 0)
eeba5f25 1478 with c as a constant value: c < 0 -> CCAN and c >= 0 -> CCAP */
9be33ca2 1479 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
ea14438e 1480 && (CONST_OK_FOR_K (INTVAL (XEXP (op0, 1)))
1481 || (CONST_OK_FOR_CONSTRAINT_P (INTVAL (XEXP (op0, 1)), 'O', "Os")
1482 /* Avoid INT32_MIN on 32 bit. */
1483 && (!TARGET_ZARCH || INTVAL (XEXP (op0, 1)) != -0x7fffffff - 1))))
9be33ca2 1484 {
1485 if (INTVAL (XEXP((op0), 1)) < 0)
1486 return CCANmode;
1487 else
1488 return CCAPmode;
1489 }
1490 /* Fall through. */
2eb8fe23 1491 case UNORDERED:
1492 case ORDERED:
1493 case UNEQ:
1494 case UNLE:
1495 case UNLT:
1496 case UNGE:
1497 case UNGT:
1498 case LTGT:
c6821d1c 1499 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
1500 && GET_CODE (op1) != CONST_INT)
1501 return CCSRmode;
2eb8fe23 1502 return CCSmode;
1503
2eb8fe23 1504 case LTU:
1505 case GEU:
e9fd5349 1506 if (GET_CODE (op0) == PLUS
1507 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
c6821d1c 1508 return CCL1mode;
1509
1510 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
1511 && GET_CODE (op1) != CONST_INT)
1512 return CCURmode;
1513 return CCUmode;
1514
1515 case LEU:
2eb8fe23 1516 case GTU:
e9fd5349 1517 if (GET_CODE (op0) == MINUS
1518 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
c6821d1c 1519 return CCL2mode;
1520
1521 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
1522 && GET_CODE (op1) != CONST_INT)
1523 return CCURmode;
2eb8fe23 1524 return CCUmode;
1525
1526 default:
32eda510 1527 gcc_unreachable ();
2eb8fe23 1528 }
1529}
1530
ebe32bb0 1531/* Replace the comparison OP0 CODE OP1 by a semantically equivalent one
1532 that we can implement more efficiently. */
1533
d5065e6e 1534static void
1535s390_canonicalize_comparison (int *code, rtx *op0, rtx *op1,
1536 bool op0_preserve_value)
ebe32bb0 1537{
d5065e6e 1538 if (op0_preserve_value)
1539 return;
1540
ebe32bb0 1541 /* Convert ZERO_EXTRACT back to AND to enable TM patterns. */
1542 if ((*code == EQ || *code == NE)
1543 && *op1 == const0_rtx
1544 && GET_CODE (*op0) == ZERO_EXTRACT
1545 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
1546 && GET_CODE (XEXP (*op0, 2)) == CONST_INT
1547 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
1548 {
1549 rtx inner = XEXP (*op0, 0);
1550 HOST_WIDE_INT modesize = GET_MODE_BITSIZE (GET_MODE (inner));
1551 HOST_WIDE_INT len = INTVAL (XEXP (*op0, 1));
1552 HOST_WIDE_INT pos = INTVAL (XEXP (*op0, 2));
1553
1554 if (len > 0 && len < modesize
1555 && pos >= 0 && pos + len <= modesize
1556 && modesize <= HOST_BITS_PER_WIDE_INT)
1557 {
1558 unsigned HOST_WIDE_INT block;
1559 block = ((unsigned HOST_WIDE_INT) 1 << len) - 1;
1560 block <<= modesize - pos - len;
1561
1562 *op0 = gen_rtx_AND (GET_MODE (inner), inner,
1563 gen_int_mode (block, GET_MODE (inner)));
1564 }
1565 }
1566
1567 /* Narrow AND of memory against immediate to enable TM. */
1568 if ((*code == EQ || *code == NE)
1569 && *op1 == const0_rtx
1570 && GET_CODE (*op0) == AND
1571 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
1572 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
1573 {
1574 rtx inner = XEXP (*op0, 0);
1575 rtx mask = XEXP (*op0, 1);
1576
1577 /* Ignore paradoxical SUBREGs if all extra bits are masked out. */
1578 if (GET_CODE (inner) == SUBREG
1579 && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (inner)))
1580 && (GET_MODE_SIZE (GET_MODE (inner))
1581 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner))))
1582 && ((INTVAL (mask)
1583 & GET_MODE_MASK (GET_MODE (inner))
1584 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (inner))))
1585 == 0))
1586 inner = SUBREG_REG (inner);
1587
1588 /* Do not change volatile MEMs. */
1589 if (MEM_P (inner) && !MEM_VOLATILE_P (inner))
1590 {
1591 int part = s390_single_part (XEXP (*op0, 1),
1592 GET_MODE (inner), QImode, 0);
1593 if (part >= 0)
1594 {
1595 mask = gen_int_mode (s390_extract_part (mask, QImode, 0), QImode);
1596 inner = adjust_address_nv (inner, QImode, part);
1597 *op0 = gen_rtx_AND (QImode, inner, mask);
1598 }
1599 }
1600 }
1601
1602 /* Narrow comparisons against 0xffff to HImode if possible. */
ebe32bb0 1603 if ((*code == EQ || *code == NE)
1604 && GET_CODE (*op1) == CONST_INT
1605 && INTVAL (*op1) == 0xffff
1606 && SCALAR_INT_MODE_P (GET_MODE (*op0))
ffead1ca 1607 && (nonzero_bits (*op0, GET_MODE (*op0))
ebe32bb0 1608 & ~(unsigned HOST_WIDE_INT) 0xffff) == 0)
1609 {
1610 *op0 = gen_lowpart (HImode, *op0);
1611 *op1 = constm1_rtx;
1612 }
80b53886 1613
5ada7a14 1614 /* Remove redundant UNSPEC_STRCMPCC_TO_INT conversions if possible. */
80b53886 1615 if (GET_CODE (*op0) == UNSPEC
5ada7a14 1616 && XINT (*op0, 1) == UNSPEC_STRCMPCC_TO_INT
80b53886 1617 && XVECLEN (*op0, 0) == 1
1618 && GET_MODE (XVECEXP (*op0, 0, 0)) == CCUmode
1619 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
1620 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
1621 && *op1 == const0_rtx)
1622 {
1623 enum rtx_code new_code = UNKNOWN;
1624 switch (*code)
1625 {
1626 case EQ: new_code = EQ; break;
1627 case NE: new_code = NE; break;
dd16a4bd 1628 case LT: new_code = GTU; break;
1629 case GT: new_code = LTU; break;
1630 case LE: new_code = GEU; break;
1631 case GE: new_code = LEU; break;
80b53886 1632 default: break;
1633 }
1634
1635 if (new_code != UNKNOWN)
1636 {
1637 *op0 = XVECEXP (*op0, 0, 0);
1638 *code = new_code;
1639 }
1640 }
9c93d843 1641
5ada7a14 1642 /* Remove redundant UNSPEC_CC_TO_INT conversions if possible. */
27784c70 1643 if (GET_CODE (*op0) == UNSPEC
5ada7a14 1644 && XINT (*op0, 1) == UNSPEC_CC_TO_INT
27784c70 1645 && XVECLEN (*op0, 0) == 1
27784c70 1646 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
1647 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
5ada7a14 1648 && CONST_INT_P (*op1))
27784c70 1649 {
1650 enum rtx_code new_code = UNKNOWN;
5ada7a14 1651 switch (GET_MODE (XVECEXP (*op0, 0, 0)))
27784c70 1652 {
5ada7a14 1653 case CCZmode:
1654 case CCRAWmode:
1655 switch (*code)
1656 {
1657 case EQ: new_code = EQ; break;
1658 case NE: new_code = NE; break;
1659 default: break;
1660 }
1661 break;
1662 default: break;
27784c70 1663 }
1664
1665 if (new_code != UNKNOWN)
1666 {
5ada7a14 1667 /* For CCRAWmode put the required cc mask into the second
1668 operand. */
91dfd73e 1669 if (GET_MODE (XVECEXP (*op0, 0, 0)) == CCRAWmode
1670 && INTVAL (*op1) >= 0 && INTVAL (*op1) <= 3)
5ada7a14 1671 *op1 = gen_rtx_CONST_INT (VOIDmode, 1 << (3 - INTVAL (*op1)));
27784c70 1672 *op0 = XVECEXP (*op0, 0, 0);
1673 *code = new_code;
1674 }
1675 }
1676
9c93d843 1677 /* Simplify cascaded EQ, NE with const0_rtx. */
1678 if ((*code == NE || *code == EQ)
1679 && (GET_CODE (*op0) == EQ || GET_CODE (*op0) == NE)
1680 && GET_MODE (*op0) == SImode
1681 && GET_MODE (XEXP (*op0, 0)) == CCZ1mode
1682 && REG_P (XEXP (*op0, 0))
1683 && XEXP (*op0, 1) == const0_rtx
1684 && *op1 == const0_rtx)
1685 {
1686 if ((*code == EQ && GET_CODE (*op0) == NE)
1687 || (*code == NE && GET_CODE (*op0) == EQ))
1688 *code = EQ;
1689 else
1690 *code = NE;
1691 *op0 = XEXP (*op0, 0);
1692 }
a0631a8a 1693
1694 /* Prefer register over memory as first operand. */
1695 if (MEM_P (*op0) && REG_P (*op1))
1696 {
1697 rtx tem = *op0; *op0 = *op1; *op1 = tem;
d5065e6e 1698 *code = (int)swap_condition ((enum rtx_code)*code);
a0631a8a 1699 }
26233f43 1700
1701 /* Using the scalar variants of vector instructions for 64 bit FP
1702 comparisons might require swapping the operands. */
1703 if (TARGET_VX
1704 && register_operand (*op0, DFmode)
1705 && register_operand (*op1, DFmode)
1706 && (*code == LT || *code == LE || *code == UNGT || *code == UNGE))
1707 {
1708 rtx tmp;
1709
1710 switch (*code)
1711 {
1712 case LT: *code = GT; break;
1713 case LE: *code = GE; break;
1714 case UNGT: *code = UNLE; break;
1715 case UNGE: *code = UNLT; break;
1716 default: ;
1717 }
1718 tmp = *op0; *op0 = *op1; *op1 = tmp;
1719 }
ebe32bb0 1720}
1721
26233f43 1722/* Helper function for s390_emit_compare. If possible emit a 64 bit
1723 FP compare using the single element variant of vector instructions.
1724 Replace CODE with the comparison code to be used in the CC reg
1725 compare and return the condition code register RTX in CC. */
1726
1727static bool
1728s390_expand_vec_compare_scalar (enum rtx_code *code, rtx cmp1, rtx cmp2,
1729 rtx *cc)
1730{
1731 machine_mode cmp_mode;
1732 bool swap_p = false;
1733
1734 switch (*code)
1735 {
1736 case EQ: cmp_mode = CCVEQmode; break;
1737 case NE: cmp_mode = CCVEQmode; break;
1738 case GT: cmp_mode = CCVFHmode; break;
1739 case GE: cmp_mode = CCVFHEmode; break;
1740 case UNLE: cmp_mode = CCVFHmode; break;
1741 case UNLT: cmp_mode = CCVFHEmode; break;
1742 case LT: cmp_mode = CCVFHmode; *code = GT; swap_p = true; break;
1743 case LE: cmp_mode = CCVFHEmode; *code = GE; swap_p = true; break;
1744 case UNGE: cmp_mode = CCVFHmode; *code = UNLE; swap_p = true; break;
1745 case UNGT: cmp_mode = CCVFHEmode; *code = UNLT; swap_p = true; break;
1746 default: return false;
1747 }
1748
1749 if (swap_p)
1750 {
1751 rtx tmp = cmp2;
1752 cmp2 = cmp1;
1753 cmp1 = tmp;
1754 }
1755 *cc = gen_rtx_REG (cmp_mode, CC_REGNUM);
1756 emit_insn (gen_rtx_PARALLEL (VOIDmode,
1757 gen_rtvec (2,
1758 gen_rtx_SET (*cc,
1759 gen_rtx_COMPARE (cmp_mode, cmp1,
1760 cmp2)),
1761 gen_rtx_CLOBBER (VOIDmode,
1762 gen_rtx_SCRATCH (V2DImode)))));
1763 return true;
1764}
1765
1766
0d656e8b 1767/* Emit a compare instruction suitable to implement the comparison
1768 OP0 CODE OP1. Return the correct condition RTL to be placed in
1769 the IF_THEN_ELSE of the conditional branch testing the result. */
1770
1771rtx
1772s390_emit_compare (enum rtx_code code, rtx op0, rtx op1)
1773{
3754d046 1774 machine_mode mode = s390_select_ccmode (code, op0, op1);
8e58aded 1775 rtx cc;
0d656e8b 1776
26233f43 1777 if (TARGET_VX
1778 && register_operand (op0, DFmode)
1779 && register_operand (op1, DFmode)
1780 && s390_expand_vec_compare_scalar (&code, op0, op1, &cc))
1781 {
1782 /* Work has been done by s390_expand_vec_compare_scalar already. */
1783 }
1784 else if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
8e58aded 1785 {
26233f43 1786 /* Do not output a redundant compare instruction if a
1787 compare_and_swap pattern already computed the result and the
1788 machine modes are compatible. */
8e58aded 1789 gcc_assert (s390_cc_modes_compatible (GET_MODE (op0), mode)
1790 == GET_MODE (op0));
1791 cc = op0;
1792 }
891e3096 1793 else
1794 {
8e58aded 1795 cc = gen_rtx_REG (mode, CC_REGNUM);
d1f9b275 1796 emit_insn (gen_rtx_SET (cc, gen_rtx_COMPARE (mode, op0, op1)));
891e3096 1797 }
8e58aded 1798
ffead1ca 1799 return gen_rtx_fmt_ee (code, VOIDmode, cc, const0_rtx);
0d656e8b 1800}
1801
8deb3959 1802/* Emit a SImode compare and swap instruction setting MEM to NEW_RTX if OLD
db1f11e3 1803 matches CMP.
1804 Return the correct condition RTL to be placed in the IF_THEN_ELSE of the
1805 conditional branch testing the result. */
1806
1807static rtx
8c753480 1808s390_emit_compare_and_swap (enum rtx_code code, rtx old, rtx mem,
1809 rtx cmp, rtx new_rtx)
db1f11e3 1810{
8c753480 1811 emit_insn (gen_atomic_compare_and_swapsi_internal (old, mem, cmp, new_rtx));
1812 return s390_emit_compare (code, gen_rtx_REG (CCZ1mode, CC_REGNUM),
1813 const0_rtx);
db1f11e3 1814}
1815
5ada7a14 1816/* Emit a jump instruction to TARGET and return it. If COND is
1817 NULL_RTX, emit an unconditional jump, else a conditional jump under
1818 condition COND. */
0d656e8b 1819
93e0956b 1820rtx_insn *
0d656e8b 1821s390_emit_jump (rtx target, rtx cond)
1822{
1823 rtx insn;
1824
1825 target = gen_rtx_LABEL_REF (VOIDmode, target);
1826 if (cond)
1827 target = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, target, pc_rtx);
1828
d1f9b275 1829 insn = gen_rtx_SET (pc_rtx, target);
5ada7a14 1830 return emit_jump_insn (insn);
0d656e8b 1831}
1832
f81e845f 1833/* Return branch condition mask to implement a branch
80b53886 1834 specified by CODE. Return -1 for invalid comparisons. */
2eb8fe23 1835
8cc5de33 1836int
b40da9a7 1837s390_branch_condition_mask (rtx code)
f81e845f 1838{
2eb8fe23 1839 const int CC0 = 1 << 3;
1840 const int CC1 = 1 << 2;
1841 const int CC2 = 1 << 1;
1842 const int CC3 = 1 << 0;
1843
32eda510 1844 gcc_assert (GET_CODE (XEXP (code, 0)) == REG);
1845 gcc_assert (REGNO (XEXP (code, 0)) == CC_REGNUM);
5ada7a14 1846 gcc_assert (XEXP (code, 1) == const0_rtx
1847 || (GET_MODE (XEXP (code, 0)) == CCRAWmode
1848 && CONST_INT_P (XEXP (code, 1))));
1849
2eb8fe23 1850
1851 switch (GET_MODE (XEXP (code, 0)))
1852 {
1853 case CCZmode:
9c93d843 1854 case CCZ1mode:
2eb8fe23 1855 switch (GET_CODE (code))
1856 {
1857 case EQ: return CC0;
1858 case NE: return CC1 | CC2 | CC3;
80b53886 1859 default: return -1;
2eb8fe23 1860 }
1861 break;
1862
c6821d1c 1863 case CCT1mode:
1864 switch (GET_CODE (code))
1865 {
1866 case EQ: return CC1;
1867 case NE: return CC0 | CC2 | CC3;
80b53886 1868 default: return -1;
c6821d1c 1869 }
1870 break;
1871
1872 case CCT2mode:
1873 switch (GET_CODE (code))
1874 {
1875 case EQ: return CC2;
1876 case NE: return CC0 | CC1 | CC3;
80b53886 1877 default: return -1;
c6821d1c 1878 }
1879 break;
1880
1881 case CCT3mode:
1882 switch (GET_CODE (code))
1883 {
1884 case EQ: return CC3;
1885 case NE: return CC0 | CC1 | CC2;
80b53886 1886 default: return -1;
c6821d1c 1887 }
1888 break;
1889
2eb8fe23 1890 case CCLmode:
1891 switch (GET_CODE (code))
1892 {
1893 case EQ: return CC0 | CC2;
1894 case NE: return CC1 | CC3;
80b53886 1895 default: return -1;
c6821d1c 1896 }
1897 break;
1898
1899 case CCL1mode:
1900 switch (GET_CODE (code))
1901 {
1902 case LTU: return CC2 | CC3; /* carry */
1903 case GEU: return CC0 | CC1; /* no carry */
80b53886 1904 default: return -1;
c6821d1c 1905 }
1906 break;
1907
1908 case CCL2mode:
1909 switch (GET_CODE (code))
1910 {
1911 case GTU: return CC0 | CC1; /* borrow */
1912 case LEU: return CC2 | CC3; /* no borrow */
80b53886 1913 default: return -1;
2eb8fe23 1914 }
1915 break;
1916
3b699fc7 1917 case CCL3mode:
1918 switch (GET_CODE (code))
1919 {
1920 case EQ: return CC0 | CC2;
1921 case NE: return CC1 | CC3;
1922 case LTU: return CC1;
1923 case GTU: return CC3;
1924 case LEU: return CC1 | CC2;
1925 case GEU: return CC2 | CC3;
80b53886 1926 default: return -1;
3b699fc7 1927 }
1928
2eb8fe23 1929 case CCUmode:
1930 switch (GET_CODE (code))
1931 {
1932 case EQ: return CC0;
1933 case NE: return CC1 | CC2 | CC3;
1934 case LTU: return CC1;
1935 case GTU: return CC2;
1936 case LEU: return CC0 | CC1;
1937 case GEU: return CC0 | CC2;
80b53886 1938 default: return -1;
2eb8fe23 1939 }
1940 break;
1941
c6821d1c 1942 case CCURmode:
1943 switch (GET_CODE (code))
1944 {
1945 case EQ: return CC0;
1946 case NE: return CC2 | CC1 | CC3;
1947 case LTU: return CC2;
1948 case GTU: return CC1;
1949 case LEU: return CC0 | CC2;
1950 case GEU: return CC0 | CC1;
80b53886 1951 default: return -1;
c6821d1c 1952 }
1953 break;
1954
3c482144 1955 case CCAPmode:
1956 switch (GET_CODE (code))
1957 {
1958 case EQ: return CC0;
1959 case NE: return CC1 | CC2 | CC3;
1960 case LT: return CC1 | CC3;
1961 case GT: return CC2;
1962 case LE: return CC0 | CC1 | CC3;
1963 case GE: return CC0 | CC2;
80b53886 1964 default: return -1;
3c482144 1965 }
1966 break;
1967
1968 case CCANmode:
1969 switch (GET_CODE (code))
1970 {
1971 case EQ: return CC0;
1972 case NE: return CC1 | CC2 | CC3;
1973 case LT: return CC1;
1974 case GT: return CC2 | CC3;
1975 case LE: return CC0 | CC1;
1976 case GE: return CC0 | CC2 | CC3;
80b53886 1977 default: return -1;
3c482144 1978 }
1979 break;
1980
2eb8fe23 1981 case CCSmode:
1982 switch (GET_CODE (code))
1983 {
1984 case EQ: return CC0;
1985 case NE: return CC1 | CC2 | CC3;
1986 case LT: return CC1;
1987 case GT: return CC2;
1988 case LE: return CC0 | CC1;
1989 case GE: return CC0 | CC2;
1990 case UNORDERED: return CC3;
1991 case ORDERED: return CC0 | CC1 | CC2;
1992 case UNEQ: return CC0 | CC3;
1993 case UNLT: return CC1 | CC3;
1994 case UNGT: return CC2 | CC3;
1995 case UNLE: return CC0 | CC1 | CC3;
1996 case UNGE: return CC0 | CC2 | CC3;
1997 case LTGT: return CC1 | CC2;
80b53886 1998 default: return -1;
2eb8fe23 1999 }
c6821d1c 2000 break;
2001
2002 case CCSRmode:
2003 switch (GET_CODE (code))
2004 {
2005 case EQ: return CC0;
2006 case NE: return CC2 | CC1 | CC3;
2007 case LT: return CC2;
2008 case GT: return CC1;
2009 case LE: return CC0 | CC2;
2010 case GE: return CC0 | CC1;
2011 case UNORDERED: return CC3;
2012 case ORDERED: return CC0 | CC2 | CC1;
2013 case UNEQ: return CC0 | CC3;
2014 case UNLT: return CC2 | CC3;
2015 case UNGT: return CC1 | CC3;
2016 case UNLE: return CC0 | CC2 | CC3;
2017 case UNGE: return CC0 | CC1 | CC3;
2018 case LTGT: return CC2 | CC1;
80b53886 2019 default: return -1;
c6821d1c 2020 }
2021 break;
2eb8fe23 2022
26233f43 2023 /* Vector comparison modes. */
2024
2025 case CCVEQmode:
2026 switch (GET_CODE (code))
2027 {
2028 case EQ: return CC0;
2029 case NE: return CC3;
2030 default: return -1;
2031 }
07f32359 2032
2033 case CCVEQANYmode:
2034 switch (GET_CODE (code))
2035 {
2036 case EQ: return CC0 | CC1;
2037 case NE: return CC3 | CC1;
2038 default: return -1;
2039 }
2040
2041 /* Integer vector compare modes. */
2042
2043 case CCVHmode:
2044 switch (GET_CODE (code))
2045 {
2046 case GT: return CC0;
2047 case LE: return CC3;
2048 default: return -1;
2049 }
2050
2051 case CCVHANYmode:
2052 switch (GET_CODE (code))
2053 {
2054 case GT: return CC0 | CC1;
2055 case LE: return CC3 | CC1;
2056 default: return -1;
2057 }
2058
2059 case CCVHUmode:
2060 switch (GET_CODE (code))
2061 {
2062 case GTU: return CC0;
2063 case LEU: return CC3;
2064 default: return -1;
2065 }
2066
2067 case CCVHUANYmode:
2068 switch (GET_CODE (code))
2069 {
2070 case GTU: return CC0 | CC1;
2071 case LEU: return CC3 | CC1;
2072 default: return -1;
2073 }
2074
26233f43 2075 /* FP vector compare modes. */
2076
2077 case CCVFHmode:
2078 switch (GET_CODE (code))
2079 {
2080 case GT: return CC0;
2081 case UNLE: return CC3;
2082 default: return -1;
2083 }
07f32359 2084
2085 case CCVFHANYmode:
2086 switch (GET_CODE (code))
2087 {
2088 case GT: return CC0 | CC1;
2089 case UNLE: return CC3 | CC1;
2090 default: return -1;
2091 }
2092
26233f43 2093 case CCVFHEmode:
2094 switch (GET_CODE (code))
2095 {
2096 case GE: return CC0;
2097 case UNLT: return CC3;
2098 default: return -1;
2099 }
07f32359 2100
2101 case CCVFHEANYmode:
2102 switch (GET_CODE (code))
2103 {
2104 case GE: return CC0 | CC1;
2105 case UNLT: return CC3 | CC1;
2106 default: return -1;
2107 }
2108
2109
5ada7a14 2110 case CCRAWmode:
2111 switch (GET_CODE (code))
2112 {
2113 case EQ:
2114 return INTVAL (XEXP (code, 1));
2115 case NE:
2116 return (INTVAL (XEXP (code, 1))) ^ 0xf;
2117 default:
2118 gcc_unreachable ();
2119 }
2120
2eb8fe23 2121 default:
80b53886 2122 return -1;
2eb8fe23 2123 }
2124}
2125
e68d6a13 2126
2127/* Return branch condition mask to implement a compare and branch
2128 specified by CODE. Return -1 for invalid comparisons. */
2129
2130int
2131s390_compare_and_branch_condition_mask (rtx code)
2132{
2133 const int CC0 = 1 << 3;
2134 const int CC1 = 1 << 2;
2135 const int CC2 = 1 << 1;
2136
2137 switch (GET_CODE (code))
2138 {
2139 case EQ:
2140 return CC0;
2141 case NE:
2142 return CC1 | CC2;
2143 case LT:
2144 case LTU:
2145 return CC1;
2146 case GT:
2147 case GTU:
2148 return CC2;
2149 case LE:
2150 case LEU:
2151 return CC0 | CC1;
2152 case GE:
2153 case GEU:
2154 return CC0 | CC2;
2155 default:
2156 gcc_unreachable ();
2157 }
2158 return -1;
2159}
2160
f81e845f 2161/* If INV is false, return assembler mnemonic string to implement
2162 a branch specified by CODE. If INV is true, return mnemonic
2eb8fe23 2163 for the corresponding inverted branch. */
2164
2165static const char *
b40da9a7 2166s390_branch_condition_mnemonic (rtx code, int inv)
2eb8fe23 2167{
e68d6a13 2168 int mask;
2169
c8834c5f 2170 static const char *const mnemonic[16] =
2eb8fe23 2171 {
2172 NULL, "o", "h", "nle",
2173 "l", "nhe", "lh", "ne",
2174 "e", "nlh", "he", "nl",
2175 "le", "nh", "no", NULL
2176 };
2177
e68d6a13 2178 if (GET_CODE (XEXP (code, 0)) == REG
2179 && REGNO (XEXP (code, 0)) == CC_REGNUM
5ada7a14 2180 && (XEXP (code, 1) == const0_rtx
2181 || (GET_MODE (XEXP (code, 0)) == CCRAWmode
2182 && CONST_INT_P (XEXP (code, 1)))))
e68d6a13 2183 mask = s390_branch_condition_mask (code);
2184 else
2185 mask = s390_compare_and_branch_condition_mask (code);
2186
80b53886 2187 gcc_assert (mask >= 0);
2eb8fe23 2188
2189 if (inv)
2190 mask ^= 15;
2191
32eda510 2192 gcc_assert (mask >= 1 && mask <= 14);
2eb8fe23 2193
2194 return mnemonic[mask];
2195}
2196
64a1078f 2197/* Return the part of op which has a value different from def.
2198 The size of the part is determined by mode.
f588eb9f 2199 Use this function only if you already know that op really
64a1078f 2200 contains such a part. */
8b4a4127 2201
64a1078f 2202unsigned HOST_WIDE_INT
3754d046 2203s390_extract_part (rtx op, machine_mode mode, int def)
8b4a4127 2204{
64a1078f 2205 unsigned HOST_WIDE_INT value = 0;
2206 int max_parts = HOST_BITS_PER_WIDE_INT / GET_MODE_BITSIZE (mode);
2207 int part_bits = GET_MODE_BITSIZE (mode);
0451e449 2208 unsigned HOST_WIDE_INT part_mask
2209 = ((unsigned HOST_WIDE_INT)1 << part_bits) - 1;
64a1078f 2210 int i;
f588eb9f 2211
64a1078f 2212 for (i = 0; i < max_parts; i++)
8b4a4127 2213 {
64a1078f 2214 if (i == 0)
2215 value = (unsigned HOST_WIDE_INT) INTVAL (op);
8b4a4127 2216 else
64a1078f 2217 value >>= part_bits;
f588eb9f 2218
64a1078f 2219 if ((value & part_mask) != (def & part_mask))
2220 return value & part_mask;
8b4a4127 2221 }
f588eb9f 2222
32eda510 2223 gcc_unreachable ();
8b4a4127 2224}
2225
2226/* If OP is an integer constant of mode MODE with exactly one
64a1078f 2227 part of mode PART_MODE unequal to DEF, return the number of that
2228 part. Otherwise, return -1. */
8b4a4127 2229
2230int
f588eb9f 2231s390_single_part (rtx op,
3754d046 2232 machine_mode mode,
2233 machine_mode part_mode,
64a1078f 2234 int def)
2235{
2236 unsigned HOST_WIDE_INT value = 0;
2237 int n_parts = GET_MODE_SIZE (mode) / GET_MODE_SIZE (part_mode);
0451e449 2238 unsigned HOST_WIDE_INT part_mask
2239 = ((unsigned HOST_WIDE_INT)1 << GET_MODE_BITSIZE (part_mode)) - 1;
64a1078f 2240 int i, part = -1;
2241
2242 if (GET_CODE (op) != CONST_INT)
2243 return -1;
f588eb9f 2244
64a1078f 2245 for (i = 0; i < n_parts; i++)
2246 {
2247 if (i == 0)
2248 value = (unsigned HOST_WIDE_INT) INTVAL (op);
8b4a4127 2249 else
64a1078f 2250 value >>= GET_MODE_BITSIZE (part_mode);
f588eb9f 2251
64a1078f 2252 if ((value & part_mask) != (def & part_mask))
2253 {
2254 if (part != -1)
2255 return -1;
2256 else
2257 part = i;
2258 }
8b4a4127 2259 }
64a1078f 2260 return part == -1 ? -1 : n_parts - 1 - part;
8b4a4127 2261}
2262
e68d6a13 2263/* Return true if IN contains a contiguous bitfield in the lower SIZE
e64f5133 2264 bits and no other bits are set in (the lower SIZE bits of) IN.
e68d6a13 2265
e64f5133 2266 PSTART and PEND can be used to obtain the start and end
2267 position (inclusive) of the bitfield relative to 64
2268 bits. *PSTART / *PEND gives the position of the first/last bit
2269 of the bitfield counting from the highest order bit starting
2270 with zero. */
e68d6a13 2271
2272bool
e64f5133 2273s390_contiguous_bitmask_nowrap_p (unsigned HOST_WIDE_INT in, int size,
2274 int *pstart, int *pend)
e68d6a13 2275{
e64f5133 2276 int start;
2277 int end = -1;
2278 int lowbit = sizeof (HOST_WIDE_INT) * BITS_PER_UNIT - 1;
2279 int highbit = sizeof (HOST_WIDE_INT) * BITS_PER_UNIT - size;
2280 unsigned HOST_WIDE_INT bitmask = 1ULL;
2281
2282 gcc_assert (!!pstart == !!pend);
2283 for (start = lowbit; start >= highbit; bitmask <<= 1, start--)
2284 if (end == -1)
2285 {
2286 /* Look for the rightmost bit of a contiguous range of ones. */
2287 if (bitmask & in)
2288 /* Found it. */
2289 end = start;
2290 }
2291 else
2292 {
2293 /* Look for the firt zero bit after the range of ones. */
2294 if (! (bitmask & in))
2295 /* Found it. */
2296 break;
2297 }
2298 /* We're one past the last one-bit. */
2299 start++;
e68d6a13 2300
e64f5133 2301 if (end == -1)
2302 /* No one bits found. */
2303 return false;
2304
2305 if (start > highbit)
e68d6a13 2306 {
e64f5133 2307 unsigned HOST_WIDE_INT mask;
2308
2309 /* Calculate a mask for all bits beyond the contiguous bits. */
2310 mask = ((~(0ULL) >> highbit) & (~(0ULL) << (lowbit - start + 1)));
2311 if (mask & in)
2312 /* There are more bits set beyond the first range of one bits. */
2313 return false;
e68d6a13 2314 }
2315
e64f5133 2316 if (pstart)
2317 {
2318 *pstart = start;
2319 *pend = end;
2320 }
e68d6a13 2321
e64f5133 2322 return true;
2323}
e68d6a13 2324
e64f5133 2325/* Same as s390_contiguous_bitmask_nowrap_p but also returns true
2326 if ~IN contains a contiguous bitfield. In that case, *END is <
2327 *START.
76a4c804 2328
e64f5133 2329 If WRAP_P is true, a bitmask that wraps around is also tested.
2330 When a wraparoud occurs *START is greater than *END (in
2331 non-null pointers), and the uppermost (64 - SIZE) bits are thus
2332 part of the range. If WRAP_P is false, no wraparound is
2333 tested. */
e68d6a13 2334
e64f5133 2335bool
2336s390_contiguous_bitmask_p (unsigned HOST_WIDE_INT in, bool wrap_p,
2337 int size, int *start, int *end)
2338{
2339 int bs = sizeof (HOST_WIDE_INT) * BITS_PER_UNIT;
2340 bool b;
2341
2342 gcc_assert (!!start == !!end);
2343 if ((in & ((~(0ULL)) >> (bs - size))) == 0)
2344 /* This cannot be expressed as a contiguous bitmask. Exit early because
2345 the second call of s390_contiguous_bitmask_nowrap_p would accept this as
2346 a valid bitmask. */
e68d6a13 2347 return false;
e64f5133 2348 b = s390_contiguous_bitmask_nowrap_p (in, size, start, end);
2349 if (b)
2350 return true;
2351 if (! wrap_p)
2352 return false;
2353 b = s390_contiguous_bitmask_nowrap_p (~in, size, start, end);
2354 if (b && start)
2355 {
2356 int s = *start;
2357 int e = *end;
e68d6a13 2358
e64f5133 2359 gcc_assert (s >= 1);
2360 *start = ((e + 1) & (bs - 1));
2361 *end = ((s - 1 + bs) & (bs - 1));
2362 }
e68d6a13 2363
e64f5133 2364 return b;
e68d6a13 2365}
2366
76a4c804 2367/* Return true if OP contains the same contiguous bitfield in *all*
2368 its elements. START and END can be used to obtain the start and
2369 end position of the bitfield.
2370
2371 START/STOP give the position of the first/last bit of the bitfield
2372 counting from the lowest order bit starting with zero. In order to
2373 use these values for S/390 instructions this has to be converted to
2374 "bits big endian" style. */
2375
2376bool
2377s390_contiguous_bitmask_vector_p (rtx op, int *start, int *end)
2378{
2379 unsigned HOST_WIDE_INT mask;
e64f5133 2380 int size;
62fdb8e4 2381 rtx elt;
e64f5133 2382 bool b;
76a4c804 2383
e64f5133 2384 gcc_assert (!!start == !!end);
62fdb8e4 2385 if (!const_vec_duplicate_p (op, &elt)
2386 || !CONST_INT_P (elt))
76a4c804 2387 return false;
2388
76a4c804 2389 size = GET_MODE_UNIT_BITSIZE (GET_MODE (op));
f81e57c4 2390
2391 /* We cannot deal with V1TI/V1TF. This would require a vgmq. */
2392 if (size > 64)
2393 return false;
2394
62fdb8e4 2395 mask = UINTVAL (elt);
e64f5133 2396
2397 b = s390_contiguous_bitmask_p (mask, true, size, start, end);
2398 if (b)
76a4c804 2399 {
e64f5133 2400 if (start)
2401 {
2402 int bs = sizeof (HOST_WIDE_INT) * BITS_PER_UNIT;
2403
2404 *start -= (bs - size);
2405 *end -= (bs - size);
2406 }
76a4c804 2407 return true;
2408 }
e64f5133 2409 else
2410 return false;
76a4c804 2411}
2412
2413/* Return true if C consists only of byte chunks being either 0 or
2414 0xff. If MASK is !=NULL a byte mask is generated which is
2415 appropriate for the vector generate byte mask instruction. */
2416
2417bool
2418s390_bytemask_vector_p (rtx op, unsigned *mask)
2419{
2420 int i;
2421 unsigned tmp_mask = 0;
2422 int nunit, unit_size;
2423
2424 if (!VECTOR_MODE_P (GET_MODE (op))
2425 || GET_CODE (op) != CONST_VECTOR
2426 || !CONST_INT_P (XVECEXP (op, 0, 0)))
2427 return false;
2428
2429 nunit = GET_MODE_NUNITS (GET_MODE (op));
2430 unit_size = GET_MODE_UNIT_SIZE (GET_MODE (op));
2431
2432 for (i = 0; i < nunit; i++)
2433 {
2434 unsigned HOST_WIDE_INT c;
2435 int j;
2436
2437 if (!CONST_INT_P (XVECEXP (op, 0, i)))
2438 return false;
2439
2440 c = UINTVAL (XVECEXP (op, 0, i));
2441 for (j = 0; j < unit_size; j++)
2442 {
2443 if ((c & 0xff) != 0 && (c & 0xff) != 0xff)
2444 return false;
2445 tmp_mask |= (c & 1) << ((nunit - 1 - i) * unit_size + j);
2446 c = c >> BITS_PER_UNIT;
2447 }
2448 }
2449
2450 if (mask != NULL)
2451 *mask = tmp_mask;
2452
2453 return true;
2454}
2455
6bc28655 2456/* Check whether a rotate of ROTL followed by an AND of CONTIG is
2457 equivalent to a shift followed by the AND. In particular, CONTIG
2458 should not overlap the (rotated) bit 0/bit 63 gap. Negative values
2459 for ROTL indicate a rotate to the right. */
2460
2461bool
2462s390_extzv_shift_ok (int bitsize, int rotl, unsigned HOST_WIDE_INT contig)
2463{
e64f5133 2464 int start, end;
6bc28655 2465 bool ok;
2466
e64f5133 2467 ok = s390_contiguous_bitmask_nowrap_p (contig, bitsize, &start, &end);
6bc28655 2468 gcc_assert (ok);
2469
e64f5133 2470 if (rotl >= 0)
2471 return (64 - end >= rotl);
2472 else
2473 {
2474 /* Translate "- rotate right" in BITSIZE mode to "rotate left" in
2475 DIMode. */
2476 rotl = -rotl + (64 - bitsize);
2477 return (start >= rotl);
2478 }
6bc28655 2479}
2480
f81e845f 2481/* Check whether we can (and want to) split a double-word
2482 move in mode MODE from SRC to DST into two single-word
66795431 2483 moves, moving the subword FIRST_SUBWORD first. */
2484
2485bool
3754d046 2486s390_split_ok_p (rtx dst, rtx src, machine_mode mode, int first_subword)
66795431 2487{
76a4c804 2488 /* Floating point and vector registers cannot be split. */
2489 if (FP_REG_P (src) || FP_REG_P (dst) || VECTOR_REG_P (src) || VECTOR_REG_P (dst))
66795431 2490 return false;
2491
1fc184ee 2492 /* We don't need to split if operands are directly accessible. */
66795431 2493 if (s_operand (src, mode) || s_operand (dst, mode))
2494 return false;
2495
2496 /* Non-offsettable memory references cannot be split. */
2497 if ((GET_CODE (src) == MEM && !offsettable_memref_p (src))
2498 || (GET_CODE (dst) == MEM && !offsettable_memref_p (dst)))
2499 return false;
2500
2501 /* Moving the first subword must not clobber a register
2502 needed to move the second subword. */
2503 if (register_operand (dst, mode))
2504 {
2505 rtx subreg = operand_subword (dst, first_subword, 0, mode);
2506 if (reg_overlap_mentioned_p (subreg, src))
2507 return false;
2508 }
2509
2510 return true;
2511}
2512
74bdf297 2513/* Return true if it can be proven that [MEM1, MEM1 + SIZE]
2514 and [MEM2, MEM2 + SIZE] do overlap and false
2515 otherwise. */
2516
2517bool
2518s390_overlap_p (rtx mem1, rtx mem2, HOST_WIDE_INT size)
2519{
2520 rtx addr1, addr2, addr_delta;
2521 HOST_WIDE_INT delta;
2522
2523 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
2524 return true;
2525
2526 if (size == 0)
2527 return false;
2528
2529 addr1 = XEXP (mem1, 0);
2530 addr2 = XEXP (mem2, 0);
2531
2532 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
2533
2534 /* This overlapping check is used by peepholes merging memory block operations.
2535 Overlapping operations would otherwise be recognized by the S/390 hardware
ffead1ca 2536 and would fall back to a slower implementation. Allowing overlapping
74bdf297 2537 operations would lead to slow code but not to wrong code. Therefore we are
ffead1ca 2538 somewhat optimistic if we cannot prove that the memory blocks are
74bdf297 2539 overlapping.
2540 That's why we return false here although this may accept operations on
2541 overlapping memory areas. */
2542 if (!addr_delta || GET_CODE (addr_delta) != CONST_INT)
2543 return false;
2544
2545 delta = INTVAL (addr_delta);
2546
2547 if (delta == 0
2548 || (delta > 0 && delta < size)
2549 || (delta < 0 && -delta < size))
2550 return true;
2551
2552 return false;
2553}
2554
9dffd3ff 2555/* Check whether the address of memory reference MEM2 equals exactly
2556 the address of memory reference MEM1 plus DELTA. Return true if
2557 we can prove this to be the case, false otherwise. */
2558
2559bool
2560s390_offset_p (rtx mem1, rtx mem2, rtx delta)
2561{
2562 rtx addr1, addr2, addr_delta;
2563
2564 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
2565 return false;
2566
2567 addr1 = XEXP (mem1, 0);
2568 addr2 = XEXP (mem2, 0);
2569
2570 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
2571 if (!addr_delta || !rtx_equal_p (addr_delta, delta))
2572 return false;
2573
2574 return true;
2575}
2576
3e247a31 2577/* Expand logical operator CODE in mode MODE with operands OPERANDS. */
2578
2579void
3754d046 2580s390_expand_logical_operator (enum rtx_code code, machine_mode mode,
3e247a31 2581 rtx *operands)
2582{
3754d046 2583 machine_mode wmode = mode;
3e247a31 2584 rtx dst = operands[0];
2585 rtx src1 = operands[1];
2586 rtx src2 = operands[2];
2587 rtx op, clob, tem;
2588
2589 /* If we cannot handle the operation directly, use a temp register. */
2590 if (!s390_logical_operator_ok_p (operands))
2591 dst = gen_reg_rtx (mode);
2592
2593 /* QImode and HImode patterns make sense only if we have a destination
2594 in memory. Otherwise perform the operation in SImode. */
2595 if ((mode == QImode || mode == HImode) && GET_CODE (dst) != MEM)
2596 wmode = SImode;
2597
2598 /* Widen operands if required. */
2599 if (mode != wmode)
2600 {
2601 if (GET_CODE (dst) == SUBREG
2602 && (tem = simplify_subreg (wmode, dst, mode, 0)) != 0)
2603 dst = tem;
2604 else if (REG_P (dst))
2605 dst = gen_rtx_SUBREG (wmode, dst, 0);
2606 else
2607 dst = gen_reg_rtx (wmode);
2608
2609 if (GET_CODE (src1) == SUBREG
2610 && (tem = simplify_subreg (wmode, src1, mode, 0)) != 0)
2611 src1 = tem;
2612 else if (GET_MODE (src1) != VOIDmode)
2613 src1 = gen_rtx_SUBREG (wmode, force_reg (mode, src1), 0);
2614
2615 if (GET_CODE (src2) == SUBREG
2616 && (tem = simplify_subreg (wmode, src2, mode, 0)) != 0)
2617 src2 = tem;
2618 else if (GET_MODE (src2) != VOIDmode)
2619 src2 = gen_rtx_SUBREG (wmode, force_reg (mode, src2), 0);
2620 }
2621
2622 /* Emit the instruction. */
d1f9b275 2623 op = gen_rtx_SET (dst, gen_rtx_fmt_ee (code, wmode, src1, src2));
3e247a31 2624 clob = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
2625 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, op, clob)));
2626
2627 /* Fix up the destination if needed. */
2628 if (dst != operands[0])
2629 emit_move_insn (operands[0], gen_lowpart (mode, dst));
2630}
2631
2632/* Check whether OPERANDS are OK for a logical operation (AND, IOR, XOR). */
2633
2634bool
2635s390_logical_operator_ok_p (rtx *operands)
2636{
2637 /* If the destination operand is in memory, it needs to coincide
2638 with one of the source operands. After reload, it has to be
2639 the first source operand. */
2640 if (GET_CODE (operands[0]) == MEM)
2641 return rtx_equal_p (operands[0], operands[1])
2642 || (!reload_completed && rtx_equal_p (operands[0], operands[2]));
2643
2644 return true;
2645}
2646
3f56e755 2647/* Narrow logical operation CODE of memory operand MEMOP with immediate
2648 operand IMMOP to switch from SS to SI type instructions. */
2649
2650void
2651s390_narrow_logical_operator (enum rtx_code code, rtx *memop, rtx *immop)
2652{
2653 int def = code == AND ? -1 : 0;
2654 HOST_WIDE_INT mask;
2655 int part;
2656
2657 gcc_assert (GET_CODE (*memop) == MEM);
2658 gcc_assert (!MEM_VOLATILE_P (*memop));
2659
2660 mask = s390_extract_part (*immop, QImode, def);
2661 part = s390_single_part (*immop, GET_MODE (*memop), QImode, def);
2662 gcc_assert (part >= 0);
2663
2664 *memop = adjust_address (*memop, QImode, part);
2665 *immop = gen_int_mode (mask, QImode);
2666}
2667
2eb8fe23 2668
875862bf 2669/* How to allocate a 'struct machine_function'. */
2670
2671static struct machine_function *
2672s390_init_machine_status (void)
2673{
25a27413 2674 return ggc_cleared_alloc<machine_function> ();
875862bf 2675}
2676
4673c1a0 2677/* Map for smallest class containing reg regno. */
2678
c8834c5f 2679const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
76a4c804 2680{ GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS, /* 0 */
2681 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS, /* 4 */
2682 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS, /* 8 */
2683 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS, /* 12 */
2684 FP_REGS, FP_REGS, FP_REGS, FP_REGS, /* 16 */
2685 FP_REGS, FP_REGS, FP_REGS, FP_REGS, /* 20 */
2686 FP_REGS, FP_REGS, FP_REGS, FP_REGS, /* 24 */
2687 FP_REGS, FP_REGS, FP_REGS, FP_REGS, /* 28 */
2688 ADDR_REGS, CC_REGS, ADDR_REGS, ADDR_REGS, /* 32 */
2689 ACCESS_REGS, ACCESS_REGS, VEC_REGS, VEC_REGS, /* 36 */
2690 VEC_REGS, VEC_REGS, VEC_REGS, VEC_REGS, /* 40 */
2691 VEC_REGS, VEC_REGS, VEC_REGS, VEC_REGS, /* 44 */
2692 VEC_REGS, VEC_REGS, VEC_REGS, VEC_REGS, /* 48 */
2693 VEC_REGS, VEC_REGS /* 52 */
4673c1a0 2694};
2695
71343e6b 2696/* Return attribute type of insn. */
2697
2698static enum attr_type
ed3e6e5d 2699s390_safe_attr_type (rtx_insn *insn)
71343e6b 2700{
2701 if (recog_memoized (insn) >= 0)
2702 return get_attr_type (insn);
2703 else
2704 return TYPE_NONE;
2705}
4673c1a0 2706
51aa1e9c 2707/* Return true if DISP is a valid short displacement. */
2708
e5537457 2709static bool
b40da9a7 2710s390_short_displacement (rtx disp)
51aa1e9c 2711{
2712 /* No displacement is OK. */
2713 if (!disp)
e5537457 2714 return true;
51aa1e9c 2715
a7b49046 2716 /* Without the long displacement facility we don't need to
2717 distingiush between long and short displacement. */
2718 if (!TARGET_LONG_DISPLACEMENT)
2719 return true;
2720
51aa1e9c 2721 /* Integer displacement in range. */
2722 if (GET_CODE (disp) == CONST_INT)
2723 return INTVAL (disp) >= 0 && INTVAL (disp) < 4096;
2724
2725 /* GOT offset is not OK, the GOT can be large. */
2726 if (GET_CODE (disp) == CONST
2727 && GET_CODE (XEXP (disp, 0)) == UNSPEC
a6e4e903 2728 && (XINT (XEXP (disp, 0), 1) == UNSPEC_GOT
2729 || XINT (XEXP (disp, 0), 1) == UNSPEC_GOTNTPOFF))
e5537457 2730 return false;
51aa1e9c 2731
2732 /* All other symbolic constants are literal pool references,
2733 which are OK as the literal pool must be small. */
2734 if (GET_CODE (disp) == CONST)
e5537457 2735 return true;
51aa1e9c 2736
e5537457 2737 return false;
51aa1e9c 2738}
2739
875862bf 2740/* Decompose a RTL expression ADDR for a memory address into
2741 its components, returned in OUT.
a5004c3d 2742
e5537457 2743 Returns false if ADDR is not a valid memory address, true
875862bf 2744 otherwise. If OUT is NULL, don't return the components,
2745 but check for validity only.
a5004c3d 2746
875862bf 2747 Note: Only addresses in canonical form are recognized.
2748 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
2749 canonical form so that they will be recognized. */
64a1078f 2750
875862bf 2751static int
edd89d66 2752s390_decompose_address (rtx addr, struct s390_address *out)
875862bf 2753{
2754 HOST_WIDE_INT offset = 0;
2755 rtx base = NULL_RTX;
2756 rtx indx = NULL_RTX;
2757 rtx disp = NULL_RTX;
2758 rtx orig_disp;
e5537457 2759 bool pointer = false;
2760 bool base_ptr = false;
2761 bool indx_ptr = false;
05b58257 2762 bool literal_pool = false;
2763
2764 /* We may need to substitute the literal pool base register into the address
2765 below. However, at this point we do not know which register is going to
2766 be used as base, so we substitute the arg pointer register. This is going
2767 to be treated as holding a pointer below -- it shouldn't be used for any
2768 other purpose. */
2769 rtx fake_pool_base = gen_rtx_REG (Pmode, ARG_POINTER_REGNUM);
3f56e755 2770
875862bf 2771 /* Decompose address into base + index + displacement. */
3f56e755 2772
875862bf 2773 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
2774 base = addr;
3f56e755 2775
875862bf 2776 else if (GET_CODE (addr) == PLUS)
6b1c8423 2777 {
875862bf 2778 rtx op0 = XEXP (addr, 0);
2779 rtx op1 = XEXP (addr, 1);
2780 enum rtx_code code0 = GET_CODE (op0);
2781 enum rtx_code code1 = GET_CODE (op1);
6b1c8423 2782
875862bf 2783 if (code0 == REG || code0 == UNSPEC)
2784 {
2785 if (code1 == REG || code1 == UNSPEC)
2786 {
2787 indx = op0; /* index + base */
2788 base = op1;
2789 }
6b1c8423 2790
875862bf 2791 else
2792 {
2793 base = op0; /* base + displacement */
2794 disp = op1;
2795 }
2796 }
a5004c3d 2797
875862bf 2798 else if (code0 == PLUS)
51aa1e9c 2799 {
875862bf 2800 indx = XEXP (op0, 0); /* index + base + disp */
2801 base = XEXP (op0, 1);
2802 disp = op1;
51aa1e9c 2803 }
51aa1e9c 2804
875862bf 2805 else
51aa1e9c 2806 {
e5537457 2807 return false;
51aa1e9c 2808 }
875862bf 2809 }
51aa1e9c 2810
875862bf 2811 else
2812 disp = addr; /* displacement */
51aa1e9c 2813
875862bf 2814 /* Extract integer part of displacement. */
2815 orig_disp = disp;
2816 if (disp)
2817 {
2818 if (GET_CODE (disp) == CONST_INT)
51aa1e9c 2819 {
875862bf 2820 offset = INTVAL (disp);
2821 disp = NULL_RTX;
51aa1e9c 2822 }
875862bf 2823 else if (GET_CODE (disp) == CONST
2824 && GET_CODE (XEXP (disp, 0)) == PLUS
2825 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
2826 {
2827 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
2828 disp = XEXP (XEXP (disp, 0), 0);
2829 }
2830 }
51aa1e9c 2831
875862bf 2832 /* Strip off CONST here to avoid special case tests later. */
2833 if (disp && GET_CODE (disp) == CONST)
2834 disp = XEXP (disp, 0);
63ebd742 2835
875862bf 2836 /* We can convert literal pool addresses to
2837 displacements by basing them off the base register. */
2838 if (disp && GET_CODE (disp) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (disp))
2839 {
2840 /* Either base or index must be free to hold the base register. */
2841 if (!base)
05b58257 2842 base = fake_pool_base, literal_pool = true;
875862bf 2843 else if (!indx)
05b58257 2844 indx = fake_pool_base, literal_pool = true;
875862bf 2845 else
e5537457 2846 return false;
875862bf 2847
2848 /* Mark up the displacement. */
2849 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp),
2850 UNSPEC_LTREL_OFFSET);
51aa1e9c 2851 }
a5004c3d 2852
875862bf 2853 /* Validate base register. */
2854 if (base)
2855 {
2856 if (GET_CODE (base) == UNSPEC)
2857 switch (XINT (base, 1))
2858 {
2859 case UNSPEC_LTREF:
2860 if (!disp)
ffead1ca 2861 disp = gen_rtx_UNSPEC (Pmode,
875862bf 2862 gen_rtvec (1, XVECEXP (base, 0, 0)),
2863 UNSPEC_LTREL_OFFSET);
2864 else
e5537457 2865 return false;
a5004c3d 2866
05b58257 2867 base = XVECEXP (base, 0, 1);
875862bf 2868 break;
64a1078f 2869
875862bf 2870 case UNSPEC_LTREL_BASE:
05b58257 2871 if (XVECLEN (base, 0) == 1)
2872 base = fake_pool_base, literal_pool = true;
2873 else
2874 base = XVECEXP (base, 0, 1);
875862bf 2875 break;
64a1078f 2876
875862bf 2877 default:
e5537457 2878 return false;
875862bf 2879 }
64a1078f 2880
a25e52e9 2881 if (!REG_P (base) || GET_MODE (base) != Pmode)
e5537457 2882 return false;
875862bf 2883
05b58257 2884 if (REGNO (base) == STACK_POINTER_REGNUM
875862bf 2885 || REGNO (base) == FRAME_POINTER_REGNUM
2886 || ((reload_completed || reload_in_progress)
2887 && frame_pointer_needed
2888 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
2889 || REGNO (base) == ARG_POINTER_REGNUM
2890 || (flag_pic
2891 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
e5537457 2892 pointer = base_ptr = true;
05b58257 2893
2894 if ((reload_completed || reload_in_progress)
2895 && base == cfun->machine->base_reg)
2896 pointer = base_ptr = literal_pool = true;
875862bf 2897 }
2898
2899 /* Validate index register. */
2900 if (indx)
64a1078f 2901 {
875862bf 2902 if (GET_CODE (indx) == UNSPEC)
2903 switch (XINT (indx, 1))
2904 {
2905 case UNSPEC_LTREF:
2906 if (!disp)
ffead1ca 2907 disp = gen_rtx_UNSPEC (Pmode,
875862bf 2908 gen_rtvec (1, XVECEXP (indx, 0, 0)),
2909 UNSPEC_LTREL_OFFSET);
2910 else
e5537457 2911 return false;
64a1078f 2912
05b58257 2913 indx = XVECEXP (indx, 0, 1);
875862bf 2914 break;
64a1078f 2915
875862bf 2916 case UNSPEC_LTREL_BASE:
05b58257 2917 if (XVECLEN (indx, 0) == 1)
2918 indx = fake_pool_base, literal_pool = true;
2919 else
2920 indx = XVECEXP (indx, 0, 1);
875862bf 2921 break;
64a1078f 2922
875862bf 2923 default:
e5537457 2924 return false;
875862bf 2925 }
64a1078f 2926
a25e52e9 2927 if (!REG_P (indx) || GET_MODE (indx) != Pmode)
e5537457 2928 return false;
64a1078f 2929
05b58257 2930 if (REGNO (indx) == STACK_POINTER_REGNUM
875862bf 2931 || REGNO (indx) == FRAME_POINTER_REGNUM
2932 || ((reload_completed || reload_in_progress)
2933 && frame_pointer_needed
2934 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
2935 || REGNO (indx) == ARG_POINTER_REGNUM
2936 || (flag_pic
2937 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
e5537457 2938 pointer = indx_ptr = true;
05b58257 2939
2940 if ((reload_completed || reload_in_progress)
2941 && indx == cfun->machine->base_reg)
2942 pointer = indx_ptr = literal_pool = true;
875862bf 2943 }
f588eb9f 2944
875862bf 2945 /* Prefer to use pointer as base, not index. */
2946 if (base && indx && !base_ptr
2947 && (indx_ptr || (!REG_POINTER (base) && REG_POINTER (indx))))
2948 {
2949 rtx tmp = base;
2950 base = indx;
2951 indx = tmp;
2952 }
64a1078f 2953
875862bf 2954 /* Validate displacement. */
2955 if (!disp)
2956 {
ffead1ca 2957 /* If virtual registers are involved, the displacement will change later
2958 anyway as the virtual registers get eliminated. This could make a
2959 valid displacement invalid, but it is more likely to make an invalid
2960 displacement valid, because we sometimes access the register save area
119114cb 2961 via negative offsets to one of those registers.
875862bf 2962 Thus we don't check the displacement for validity here. If after
2963 elimination the displacement turns out to be invalid after all,
2964 this is fixed up by reload in any case. */
7b1bda1c 2965 /* LRA maintains always displacements up to date and we need to
2966 know the displacement is right during all LRA not only at the
2967 final elimination. */
2968 if (lra_in_progress
2969 || (base != arg_pointer_rtx
2970 && indx != arg_pointer_rtx
2971 && base != return_address_pointer_rtx
2972 && indx != return_address_pointer_rtx
2973 && base != frame_pointer_rtx
2974 && indx != frame_pointer_rtx
2975 && base != virtual_stack_vars_rtx
2976 && indx != virtual_stack_vars_rtx))
875862bf 2977 if (!DISP_IN_RANGE (offset))
e5537457 2978 return false;
875862bf 2979 }
2980 else
2981 {
2982 /* All the special cases are pointers. */
e5537457 2983 pointer = true;
64a1078f 2984
875862bf 2985 /* In the small-PIC case, the linker converts @GOT
2986 and @GOTNTPOFF offsets to possible displacements. */
2987 if (GET_CODE (disp) == UNSPEC
2988 && (XINT (disp, 1) == UNSPEC_GOT
2989 || XINT (disp, 1) == UNSPEC_GOTNTPOFF)
875862bf 2990 && flag_pic == 1)
2991 {
2992 ;
2993 }
64a1078f 2994
1ed7a160 2995 /* Accept pool label offsets. */
2996 else if (GET_CODE (disp) == UNSPEC
2997 && XINT (disp, 1) == UNSPEC_POOL_OFFSET)
2998 ;
64a1078f 2999
875862bf 3000 /* Accept literal pool references. */
3001 else if (GET_CODE (disp) == UNSPEC
3002 && XINT (disp, 1) == UNSPEC_LTREL_OFFSET)
3003 {
cf8ffe7d 3004 /* In case CSE pulled a non literal pool reference out of
3005 the pool we have to reject the address. This is
3006 especially important when loading the GOT pointer on non
3007 zarch CPUs. In this case the literal pool contains an lt
3008 relative offset to the _GLOBAL_OFFSET_TABLE_ label which
3009 will most likely exceed the displacement. */
3010 if (GET_CODE (XVECEXP (disp, 0, 0)) != SYMBOL_REF
3011 || !CONSTANT_POOL_ADDRESS_P (XVECEXP (disp, 0, 0)))
3012 return false;
3013
875862bf 3014 orig_disp = gen_rtx_CONST (Pmode, disp);
3015 if (offset)
3016 {
3017 /* If we have an offset, make sure it does not
3018 exceed the size of the constant pool entry. */
3019 rtx sym = XVECEXP (disp, 0, 0);
3020 if (offset >= GET_MODE_SIZE (get_pool_mode (sym)))
e5537457 3021 return false;
64a1078f 3022
29c05e22 3023 orig_disp = plus_constant (Pmode, orig_disp, offset);
875862bf 3024 }
3025 }
3026
3027 else
e5537457 3028 return false;
64a1078f 3029 }
3030
875862bf 3031 if (!base && !indx)
e5537457 3032 pointer = true;
875862bf 3033
3034 if (out)
3035 {
3036 out->base = base;
3037 out->indx = indx;
3038 out->disp = orig_disp;
3039 out->pointer = pointer;
05b58257 3040 out->literal_pool = literal_pool;
875862bf 3041 }
3042
e5537457 3043 return true;
64a1078f 3044}
3045
2be7449b 3046/* Decompose a RTL expression OP for an address style operand into its
3047 components, and return the base register in BASE and the offset in
3048 OFFSET. While OP looks like an address it is never supposed to be
3049 used as such.
6d6be381 3050
2be7449b 3051 Return true if OP is a valid address operand, false if not. */
6d6be381 3052
3053bool
2be7449b 3054s390_decompose_addrstyle_without_index (rtx op, rtx *base,
3055 HOST_WIDE_INT *offset)
6d6be381 3056{
6191f2a0 3057 rtx off = NULL_RTX;
6d6be381 3058
6d6be381 3059 /* We can have an integer constant, an address register,
3060 or a sum of the two. */
6191f2a0 3061 if (CONST_SCALAR_INT_P (op))
6d6be381 3062 {
6191f2a0 3063 off = op;
6d6be381 3064 op = NULL_RTX;
3065 }
6191f2a0 3066 if (op && GET_CODE (op) == PLUS && CONST_SCALAR_INT_P (XEXP (op, 1)))
6d6be381 3067 {
6191f2a0 3068 off = XEXP (op, 1);
6d6be381 3069 op = XEXP (op, 0);
3070 }
3071 while (op && GET_CODE (op) == SUBREG)
3072 op = SUBREG_REG (op);
3073
3074 if (op && GET_CODE (op) != REG)
3075 return false;
3076
3077 if (offset)
6191f2a0 3078 {
3079 if (off == NULL_RTX)
3080 *offset = 0;
3081 else if (CONST_INT_P (off))
3082 *offset = INTVAL (off);
3083 else if (CONST_WIDE_INT_P (off))
3084 /* The offset will anyway be cut down to 12 bits so take just
3085 the lowest order chunk of the wide int. */
3086 *offset = CONST_WIDE_INT_ELT (off, 0);
3087 else
3088 gcc_unreachable ();
3089 }
6d6be381 3090 if (base)
3091 *base = op;
3092
3093 return true;
3094}
3095
3096
875862bf 3097/* Return true if CODE is a valid address without index. */
fab7adbf 3098
875862bf 3099bool
3100s390_legitimate_address_without_index_p (rtx op)
3101{
3102 struct s390_address addr;
3103
3104 if (!s390_decompose_address (XEXP (op, 0), &addr))
3105 return false;
3106 if (addr.indx)
3107 return false;
3108
3109 return true;
3110}
3111
59bc01b3 3112
2a672556 3113/* Return TRUE if ADDR is an operand valid for a load/store relative
3114 instruction. Be aware that the alignment of the operand needs to
3115 be checked separately.
3116 Valid addresses are single references or a sum of a reference and a
3117 constant integer. Return these parts in SYMREF and ADDEND. You can
3118 pass NULL in REF and/or ADDEND if you are not interested in these
3119 values. Literal pool references are *not* considered symbol
3120 references. */
875862bf 3121
a7b49046 3122static bool
2a672556 3123s390_loadrelative_operand_p (rtx addr, rtx *symref, HOST_WIDE_INT *addend)
875862bf 3124{
a7b49046 3125 HOST_WIDE_INT tmpaddend = 0;
875862bf 3126
a7b49046 3127 if (GET_CODE (addr) == CONST)
3128 addr = XEXP (addr, 0);
3129
3130 if (GET_CODE (addr) == PLUS)
875862bf 3131 {
2a672556 3132 if (!CONST_INT_P (XEXP (addr, 1)))
a7b49046 3133 return false;
875862bf 3134
2a672556 3135 tmpaddend = INTVAL (XEXP (addr, 1));
3136 addr = XEXP (addr, 0);
3137 }
62cb5855 3138
2a672556 3139 if ((GET_CODE (addr) == SYMBOL_REF && !CONSTANT_POOL_ADDRESS_P (addr))
3140 || (GET_CODE (addr) == UNSPEC
3141 && (XINT (addr, 1) == UNSPEC_GOTENT
3142 || (TARGET_CPU_ZARCH && XINT (addr, 1) == UNSPEC_PLT))))
3143 {
3144 if (symref)
3145 *symref = addr;
3146 if (addend)
3147 *addend = tmpaddend;
62cb5855 3148
2a672556 3149 return true;
3150 }
3151 return false;
62cb5855 3152}
a7b49046 3153
3154/* Return true if the address in OP is valid for constraint letter C
3155 if wrapped in a MEM rtx. Set LIT_POOL_OK to true if it literal
3156 pool MEMs should be accepted. Only the Q, R, S, T constraint
3157 letters are allowed for C. */
875862bf 3158
a7b49046 3159static int
3160s390_check_qrst_address (char c, rtx op, bool lit_pool_ok)
3161{
3162 struct s390_address addr;
3163 bool decomposed = false;
3164
3165 /* This check makes sure that no symbolic address (except literal
3166 pool references) are accepted by the R or T constraints. */
2a672556 3167 if (s390_loadrelative_operand_p (op, NULL, NULL))
f3959569 3168 return 0;
3169
3170 /* Ensure literal pool references are only accepted if LIT_POOL_OK. */
3171 if (!lit_pool_ok)
875862bf 3172 {
a7b49046 3173 if (!s390_decompose_address (op, &addr))
875862bf 3174 return 0;
f3959569 3175 if (addr.literal_pool)
875862bf 3176 return 0;
a7b49046 3177 decomposed = true;
875862bf 3178 }
3179
7396c35d 3180 /* With reload, we sometimes get intermediate address forms that are
3181 actually invalid as-is, but we need to accept them in the most
3182 generic cases below ('R' or 'T'), since reload will in fact fix
3183 them up. LRA behaves differently here; we never see such forms,
3184 but on the other hand, we need to strictly reject every invalid
3185 address form. Perform this check right up front. */
3186 if (lra_in_progress)
3187 {
3188 if (!decomposed && !s390_decompose_address (op, &addr))
3189 return 0;
3190 decomposed = true;
3191 }
3192
875862bf 3193 switch (c)
3194 {
a7b49046 3195 case 'Q': /* no index short displacement */
3196 if (!decomposed && !s390_decompose_address (op, &addr))
875862bf 3197 return 0;
3198 if (addr.indx)
3199 return 0;
a7b49046 3200 if (!s390_short_displacement (addr.disp))
875862bf 3201 return 0;
a7b49046 3202 break;
875862bf 3203
a7b49046 3204 case 'R': /* with index short displacement */
875862bf 3205 if (TARGET_LONG_DISPLACEMENT)
3206 {
a7b49046 3207 if (!decomposed && !s390_decompose_address (op, &addr))
875862bf 3208 return 0;
3209 if (!s390_short_displacement (addr.disp))
3210 return 0;
3211 }
a7b49046 3212 /* Any invalid address here will be fixed up by reload,
3213 so accept it for the most generic constraint. */
875862bf 3214 break;
3215
a7b49046 3216 case 'S': /* no index long displacement */
a7b49046 3217 if (!decomposed && !s390_decompose_address (op, &addr))
875862bf 3218 return 0;
3219 if (addr.indx)
3220 return 0;
875862bf 3221 break;
3222
a7b49046 3223 case 'T': /* with index long displacement */
a7b49046 3224 /* Any invalid address here will be fixed up by reload,
3225 so accept it for the most generic constraint. */
875862bf 3226 break;
7396c35d 3227
a7b49046 3228 default:
3229 return 0;
3230 }
3231 return 1;
3232}
875862bf 3233
875862bf 3234
a7b49046 3235/* Evaluates constraint strings described by the regular expression
7396c35d 3236 ([A|B|Z](Q|R|S|T))|Y and returns 1 if OP is a valid operand for
a7b49046 3237 the constraint given in STR, or 0 else. */
3238
3239int
3240s390_mem_constraint (const char *str, rtx op)
3241{
3242 char c = str[0];
3243
3244 switch (c)
3245 {
3246 case 'A':
3247 /* Check for offsettable variants of memory constraints. */
3248 if (!MEM_P (op) || MEM_VOLATILE_P (op))
875862bf 3249 return 0;
a7b49046 3250 if ((reload_completed || reload_in_progress)
3251 ? !offsettable_memref_p (op) : !offsettable_nonstrict_memref_p (op))
e68d6a13 3252 return 0;
a7b49046 3253 return s390_check_qrst_address (str[1], XEXP (op, 0), true);
3254 case 'B':
3255 /* Check for non-literal-pool variants of memory constraints. */
3256 if (!MEM_P (op))
875862bf 3257 return 0;
a7b49046 3258 return s390_check_qrst_address (str[1], XEXP (op, 0), false);
3259 case 'Q':
3260 case 'R':
3261 case 'S':
3262 case 'T':
3263 if (GET_CODE (op) != MEM)
3264 return 0;
3265 return s390_check_qrst_address (c, XEXP (op, 0), true);
875862bf 3266 case 'Y':
6d6be381 3267 /* Simply check for the basic form of a shift count. Reload will
3268 take care of making sure we have a proper base register. */
2be7449b 3269 if (!s390_decompose_addrstyle_without_index (op, NULL, NULL))
6d6be381 3270 return 0;
3271 break;
a7b49046 3272 case 'Z':
3273 return s390_check_qrst_address (str[1], op, true);
875862bf 3274 default:
3275 return 0;
3276 }
875862bf 3277 return 1;
3278}
3279
59bc01b3 3280
59bc01b3 3281/* Evaluates constraint strings starting with letter O. Input
3282 parameter C is the second letter following the "O" in the constraint
3283 string. Returns 1 if VALUE meets the respective constraint and 0
3284 otherwise. */
875862bf 3285
e863b008 3286int
59bc01b3 3287s390_O_constraint_str (const char c, HOST_WIDE_INT value)
e863b008 3288{
59bc01b3 3289 if (!TARGET_EXTIMM)
3290 return 0;
e863b008 3291
59bc01b3 3292 switch (c)
e863b008 3293 {
59bc01b3 3294 case 's':
3295 return trunc_int_for_mode (value, SImode) == value;
3296
3297 case 'p':
3298 return value == 0
3299 || s390_single_part (GEN_INT (value), DImode, SImode, 0) == 1;
3300
3301 case 'n':
29847ec4 3302 return s390_single_part (GEN_INT (value - 1), DImode, SImode, -1) == 1;
59bc01b3 3303
e863b008 3304 default:
59bc01b3 3305 gcc_unreachable ();
e863b008 3306 }
3307}
3308
59bc01b3 3309
3310/* Evaluates constraint strings starting with letter N. Parameter STR
3311 contains the letters following letter "N" in the constraint string.
3312 Returns true if VALUE matches the constraint. */
e863b008 3313
875862bf 3314int
59bc01b3 3315s390_N_constraint_str (const char *str, HOST_WIDE_INT value)
875862bf 3316{
3754d046 3317 machine_mode mode, part_mode;
875862bf 3318 int def;
3319 int part, part_goal;
3320
875862bf 3321
59bc01b3 3322 if (str[0] == 'x')
3323 part_goal = -1;
3324 else
3325 part_goal = str[0] - '0';
875862bf 3326
59bc01b3 3327 switch (str[1])
3328 {
3329 case 'Q':
3330 part_mode = QImode;
875862bf 3331 break;
59bc01b3 3332 case 'H':
3333 part_mode = HImode;
163277cf 3334 break;
59bc01b3 3335 case 'S':
3336 part_mode = SImode;
3337 break;
3338 default:
3339 return 0;
3340 }
163277cf 3341
59bc01b3 3342 switch (str[2])
3343 {
3344 case 'H':
3345 mode = HImode;
3346 break;
3347 case 'S':
3348 mode = SImode;
3349 break;
3350 case 'D':
3351 mode = DImode;
3352 break;
3353 default:
3354 return 0;
3355 }
53239c89 3356
59bc01b3 3357 switch (str[3])
3358 {
3359 case '0':
3360 def = 0;
3361 break;
3362 case 'F':
3363 def = -1;
3364 break;
875862bf 3365 default:
3366 return 0;
3367 }
3368
59bc01b3 3369 if (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (part_mode))
3370 return 0;
3371
3372 part = s390_single_part (GEN_INT (value), mode, part_mode, def);
3373 if (part < 0)
3374 return 0;
3375 if (part_goal != -1 && part_goal != part)
3376 return 0;
3377
875862bf 3378 return 1;
3379}
3380
59bc01b3 3381
3382/* Returns true if the input parameter VALUE is a float zero. */
3383
3384int
3385s390_float_const_zero_p (rtx value)
3386{
3387 return (GET_MODE_CLASS (GET_MODE (value)) == MODE_FLOAT
3388 && value == CONST0_RTX (GET_MODE (value)));
3389}
3390
fa7a995b 3391/* Implement TARGET_REGISTER_MOVE_COST. */
3392
3393static int
5fe5762e 3394s390_register_move_cost (machine_mode mode,
fa7a995b 3395 reg_class_t from, reg_class_t to)
3396{
5fe5762e 3397 /* On s390, copy between fprs and gprs is expensive. */
3398
3399 /* It becomes somewhat faster having ldgr/lgdr. */
3400 if (TARGET_Z10 && GET_MODE_SIZE (mode) == 8)
3401 {
3402 /* ldgr is single cycle. */
3403 if (reg_classes_intersect_p (from, GENERAL_REGS)
3404 && reg_classes_intersect_p (to, FP_REGS))
3405 return 1;
3406 /* lgdr needs 3 cycles. */
3407 if (reg_classes_intersect_p (to, GENERAL_REGS)
3408 && reg_classes_intersect_p (from, FP_REGS))
3409 return 3;
3410 }
3411
3412 /* Otherwise copying is done via memory. */
3413 if ((reg_classes_intersect_p (from, GENERAL_REGS)
3414 && reg_classes_intersect_p (to, FP_REGS))
3415 || (reg_classes_intersect_p (from, FP_REGS)
3416 && reg_classes_intersect_p (to, GENERAL_REGS)))
fa7a995b 3417 return 10;
3418
3419 return 1;
3420}
3421
3422/* Implement TARGET_MEMORY_MOVE_COST. */
3423
3424static int
3754d046 3425s390_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
fa7a995b 3426 reg_class_t rclass ATTRIBUTE_UNUSED,
3427 bool in ATTRIBUTE_UNUSED)
3428{
9a071c9f 3429 return 2;
fa7a995b 3430}
59bc01b3 3431
875862bf 3432/* Compute a (partial) cost for rtx X. Return true if the complete
3433 cost has been computed, and false if subexpressions should be
016d030e 3434 scanned. In either case, *TOTAL contains the cost result. The
3435 initial value of *TOTAL is the default value computed by
3436 rtx_cost. It may be left unmodified. OUTER_CODE contains the
3437 code of the superexpression of x. */
875862bf 3438
3439static bool
5ae4887d 3440s390_rtx_costs (rtx x, machine_mode mode, int outer_code,
3441 int opno ATTRIBUTE_UNUSED,
20d892d1 3442 int *total, bool speed ATTRIBUTE_UNUSED)
fab7adbf 3443{
5ae4887d 3444 int code = GET_CODE (x);
fab7adbf 3445 switch (code)
3446 {
3447 case CONST:
fab7adbf 3448 case CONST_INT:
fab7adbf 3449 case LABEL_REF:
3450 case SYMBOL_REF:
3451 case CONST_DOUBLE:
ba0e61d6 3452 case CONST_WIDE_INT:
3f074425 3453 case MEM:
fab7adbf 3454 *total = 0;
3455 return true;
3456
02a8efd2 3457 case IOR:
3458 /* risbg */
3459 if (GET_CODE (XEXP (x, 0)) == AND
3460 && GET_CODE (XEXP (x, 1)) == ASHIFT
3461 && REG_P (XEXP (XEXP (x, 0), 0))
3462 && REG_P (XEXP (XEXP (x, 1), 0))
3463 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
3464 && CONST_INT_P (XEXP (XEXP (x, 1), 1))
3465 && (UINTVAL (XEXP (XEXP (x, 0), 1)) ==
3466 (1UL << UINTVAL (XEXP (XEXP (x, 1), 1))) - 1))
3467 {
3468 *total = COSTS_N_INSNS (2);
3469 return true;
3470 }
0903985d 3471 /* fallthrough */
fab7adbf 3472 case ASHIFT:
3473 case ASHIFTRT:
3474 case LSHIFTRT:
18925d38 3475 case ROTATE:
3476 case ROTATERT:
fab7adbf 3477 case AND:
fab7adbf 3478 case XOR:
fab7adbf 3479 case NEG:
3480 case NOT:
3481 *total = COSTS_N_INSNS (1);
18925d38 3482 return false;
fab7adbf 3483
9cd3f3e6 3484 case PLUS:
3485 case MINUS:
9cd3f3e6 3486 *total = COSTS_N_INSNS (1);
3487 return false;
3488
ffead1ca 3489 case MULT:
5ae4887d 3490 switch (mode)
18925d38 3491 {
3492 case SImode:
9cd3f3e6 3493 {
18925d38 3494 rtx left = XEXP (x, 0);
3495 rtx right = XEXP (x, 1);
3496 if (GET_CODE (right) == CONST_INT
cb888f33 3497 && CONST_OK_FOR_K (INTVAL (right)))
18925d38 3498 *total = s390_cost->mhi;
3499 else if (GET_CODE (left) == SIGN_EXTEND)
3500 *total = s390_cost->mh;
3501 else
3502 *total = s390_cost->ms; /* msr, ms, msy */
3503 break;
3504 }
3505 case DImode:
3506 {
3507 rtx left = XEXP (x, 0);
3508 rtx right = XEXP (x, 1);
b5fdc416 3509 if (TARGET_ZARCH)
18925d38 3510 {
3511 if (GET_CODE (right) == CONST_INT
cb888f33 3512 && CONST_OK_FOR_K (INTVAL (right)))
18925d38 3513 *total = s390_cost->mghi;
3514 else if (GET_CODE (left) == SIGN_EXTEND)
3515 *total = s390_cost->msgf;
3516 else
3517 *total = s390_cost->msg; /* msgr, msg */
3518 }
3519 else /* TARGET_31BIT */
3520 {
3521 if (GET_CODE (left) == SIGN_EXTEND
3522 && GET_CODE (right) == SIGN_EXTEND)
3523 /* mulsidi case: mr, m */
3524 *total = s390_cost->m;
9cd3f3e6 3525 else if (GET_CODE (left) == ZERO_EXTEND
3526 && GET_CODE (right) == ZERO_EXTEND
3527 && TARGET_CPU_ZARCH)
3528 /* umulsidi case: ml, mlr */
3529 *total = s390_cost->ml;
18925d38 3530 else
3531 /* Complex calculation is required. */
3532 *total = COSTS_N_INSNS (40);
3533 }
3534 break;
3535 }
3536 case SFmode:
3537 case DFmode:
3538 *total = s390_cost->mult_df;
3539 break;
429f9fdb 3540 case TFmode:
3541 *total = s390_cost->mxbr;
3542 break;
18925d38 3543 default:
3544 return false;
3545 }
3546 return false;
fab7adbf 3547
81470015 3548 case FMA:
5ae4887d 3549 switch (mode)
81470015 3550 {
3551 case DFmode:
3552 *total = s390_cost->madbr;
3553 break;
3554 case SFmode:
3555 *total = s390_cost->maebr;
3556 break;
3557 default:
3558 return false;
3559 }
3560 /* Negate in the third argument is free: FMSUB. */
3561 if (GET_CODE (XEXP (x, 2)) == NEG)
3562 {
5ae4887d 3563 *total += (rtx_cost (XEXP (x, 0), mode, FMA, 0, speed)
3564 + rtx_cost (XEXP (x, 1), mode, FMA, 1, speed)
3565 + rtx_cost (XEXP (XEXP (x, 2), 0), mode, FMA, 2, speed));
81470015 3566 return true;
3567 }
3568 return false;
3569
3f074425 3570 case UDIV:
3571 case UMOD:
5ae4887d 3572 if (mode == TImode) /* 128 bit division */
3f074425 3573 *total = s390_cost->dlgr;
5ae4887d 3574 else if (mode == DImode)
3f074425 3575 {
3576 rtx right = XEXP (x, 1);
3577 if (GET_CODE (right) == ZERO_EXTEND) /* 64 by 32 bit division */
3578 *total = s390_cost->dlr;
3579 else /* 64 by 64 bit division */
3580 *total = s390_cost->dlgr;
3581 }
5ae4887d 3582 else if (mode == SImode) /* 32 bit division */
3f074425 3583 *total = s390_cost->dlr;
3584 return false;
3585
fab7adbf 3586 case DIV:
3f074425 3587 case MOD:
5ae4887d 3588 if (mode == DImode)
3f074425 3589 {
3590 rtx right = XEXP (x, 1);
3591 if (GET_CODE (right) == ZERO_EXTEND) /* 64 by 32 bit division */
b5fdc416 3592 if (TARGET_ZARCH)
3f074425 3593 *total = s390_cost->dsgfr;
3594 else
3595 *total = s390_cost->dr;
3596 else /* 64 by 64 bit division */
3597 *total = s390_cost->dsgr;
3598 }
5ae4887d 3599 else if (mode == SImode) /* 32 bit division */
3f074425 3600 *total = s390_cost->dlr;
5ae4887d 3601 else if (mode == SFmode)
260075cc 3602 {
095798e3 3603 *total = s390_cost->debr;
260075cc 3604 }
5ae4887d 3605 else if (mode == DFmode)
260075cc 3606 {
095798e3 3607 *total = s390_cost->ddbr;
260075cc 3608 }
5ae4887d 3609 else if (mode == TFmode)
429f9fdb 3610 {
095798e3 3611 *total = s390_cost->dxbr;
429f9fdb 3612 }
18925d38 3613 return false;
3614
9cd3f3e6 3615 case SQRT:
5ae4887d 3616 if (mode == SFmode)
9cd3f3e6 3617 *total = s390_cost->sqebr;
5ae4887d 3618 else if (mode == DFmode)
9cd3f3e6 3619 *total = s390_cost->sqdbr;
429f9fdb 3620 else /* TFmode */
3621 *total = s390_cost->sqxbr;
9cd3f3e6 3622 return false;
3623
18925d38 3624 case SIGN_EXTEND:
9cd3f3e6 3625 case ZERO_EXTEND:
3f074425 3626 if (outer_code == MULT || outer_code == DIV || outer_code == MOD
3627 || outer_code == PLUS || outer_code == MINUS
3628 || outer_code == COMPARE)
18925d38 3629 *total = 0;
3630 return false;
fab7adbf 3631
3f074425 3632 case COMPARE:
3633 *total = COSTS_N_INSNS (1);
3634 if (GET_CODE (XEXP (x, 0)) == AND
3635 && GET_CODE (XEXP (x, 1)) == CONST_INT
3636 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
3637 {
3638 rtx op0 = XEXP (XEXP (x, 0), 0);
3639 rtx op1 = XEXP (XEXP (x, 0), 1);
3640 rtx op2 = XEXP (x, 1);
3641
3642 if (memory_operand (op0, GET_MODE (op0))
3643 && s390_tm_ccmode (op1, op2, 0) != VOIDmode)
3644 return true;
3645 if (register_operand (op0, GET_MODE (op0))
3646 && s390_tm_ccmode (op1, op2, 1) != VOIDmode)
3647 return true;
3648 }
3649 return false;
3650
fab7adbf 3651 default:
3652 return false;
3653 }
3654}
3655
ee9c19ee 3656/* Return the cost of an address rtx ADDR. */
3657
ec0457a8 3658static int
3754d046 3659s390_address_cost (rtx addr, machine_mode mode ATTRIBUTE_UNUSED,
d9c5e5f4 3660 addr_space_t as ATTRIBUTE_UNUSED,
3661 bool speed ATTRIBUTE_UNUSED)
ee9c19ee 3662{
3663 struct s390_address ad;
3664 if (!s390_decompose_address (addr, &ad))
3665 return 1000;
3666
3667 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
3668}
3669
be00aaa8 3670/* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
3671 otherwise return 0. */
3672
3673int
edd89d66 3674tls_symbolic_operand (rtx op)
be00aaa8 3675{
be00aaa8 3676 if (GET_CODE (op) != SYMBOL_REF)
3677 return 0;
a3e33162 3678 return SYMBOL_REF_TLS_MODEL (op);
be00aaa8 3679}
4673c1a0 3680\f
923cf36d 3681/* Split DImode access register reference REG (on 64-bit) into its constituent
3682 low and high parts, and store them into LO and HI. Note that gen_lowpart/
3683 gen_highpart cannot be used as they assume all registers are word-sized,
3684 while our access registers have only half that size. */
3685
3686void
3687s390_split_access_reg (rtx reg, rtx *lo, rtx *hi)
3688{
3689 gcc_assert (TARGET_64BIT);
3690 gcc_assert (ACCESS_REG_P (reg));
3691 gcc_assert (GET_MODE (reg) == DImode);
3692 gcc_assert (!(REGNO (reg) & 1));
3693
3694 *lo = gen_rtx_REG (SImode, REGNO (reg) + 1);
3695 *hi = gen_rtx_REG (SImode, REGNO (reg));
3696}
4673c1a0 3697
56769981 3698/* Return true if OP contains a symbol reference */
4673c1a0 3699
e5537457 3700bool
b40da9a7 3701symbolic_reference_mentioned_p (rtx op)
4673c1a0 3702{
edd89d66 3703 const char *fmt;
3704 int i;
4673c1a0 3705
3706 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
3707 return 1;
3708
3709 fmt = GET_RTX_FORMAT (GET_CODE (op));
3710 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
3711 {
3712 if (fmt[i] == 'E')
3713 {
edd89d66 3714 int j;
4673c1a0 3715
3716 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
3717 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
3718 return 1;
3719 }
3720
3721 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
3722 return 1;
3723 }
3724
3725 return 0;
3726}
3727
be00aaa8 3728/* Return true if OP contains a reference to a thread-local symbol. */
3729
e5537457 3730bool
b40da9a7 3731tls_symbolic_reference_mentioned_p (rtx op)
be00aaa8 3732{
edd89d66 3733 const char *fmt;
3734 int i;
be00aaa8 3735
3736 if (GET_CODE (op) == SYMBOL_REF)
3737 return tls_symbolic_operand (op);
3738
3739 fmt = GET_RTX_FORMAT (GET_CODE (op));
3740 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
3741 {
3742 if (fmt[i] == 'E')
3743 {
edd89d66 3744 int j;
be00aaa8 3745
3746 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
3747 if (tls_symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
e5537457 3748 return true;
be00aaa8 3749 }
3750
3751 else if (fmt[i] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op, i)))
e5537457 3752 return true;
be00aaa8 3753 }
3754
e5537457 3755 return false;
be00aaa8 3756}
3757
4673c1a0 3758
f81e845f 3759/* Return true if OP is a legitimate general operand when
3760 generating PIC code. It is given that flag_pic is on
ba0e61d6 3761 and that OP satisfies CONSTANT_P. */
56769981 3762
4673c1a0 3763int
edd89d66 3764legitimate_pic_operand_p (rtx op)
4673c1a0 3765{
8b4a4127 3766 /* Accept all non-symbolic constants. */
4673c1a0 3767 if (!SYMBOLIC_CONST (op))
3768 return 1;
3769
f81e845f 3770 /* Reject everything else; must be handled
be00aaa8 3771 via emit_symbolic_move. */
4673c1a0 3772 return 0;
3773}
3774
56769981 3775/* Returns true if the constant value OP is a legitimate general operand.
ba0e61d6 3776 It is given that OP satisfies CONSTANT_P. */
56769981 3777
ca316360 3778static bool
3754d046 3779s390_legitimate_constant_p (machine_mode mode, rtx op)
4673c1a0 3780{
abf3beed 3781 if (TARGET_VX && VECTOR_MODE_P (mode) && GET_CODE (op) == CONST_VECTOR)
76a4c804 3782 {
3783 if (GET_MODE_SIZE (mode) != 16)
3784 return 0;
3785
80fc7f56 3786 if (!satisfies_constraint_j00 (op)
3787 && !satisfies_constraint_jm1 (op)
3788 && !satisfies_constraint_jKK (op)
3789 && !satisfies_constraint_jxx (op)
3790 && !satisfies_constraint_jyy (op))
76a4c804 3791 return 0;
3792 }
3793
8b4a4127 3794 /* Accept all non-symbolic constants. */
4673c1a0 3795 if (!SYMBOLIC_CONST (op))
3796 return 1;
3797
be00aaa8 3798 /* Accept immediate LARL operands. */
ca316360 3799 if (TARGET_CPU_ZARCH && larl_operand (op, mode))
be00aaa8 3800 return 1;
3801
3802 /* Thread-local symbols are never legal constants. This is
3803 so that emit_call knows that computing such addresses
3804 might require a function call. */
3805 if (TLS_SYMBOLIC_CONST (op))
3806 return 0;
3807
4673c1a0 3808 /* In the PIC case, symbolic constants must *not* be
3809 forced into the literal pool. We accept them here,
be00aaa8 3810 so that they will be handled by emit_symbolic_move. */
4673c1a0 3811 if (flag_pic)
3812 return 1;
3813
4673c1a0 3814 /* All remaining non-PIC symbolic constants are
3815 forced into the literal pool. */
3816 return 0;
3817}
3818
be00aaa8 3819/* Determine if it's legal to put X into the constant pool. This
3820 is not possible if X contains the address of a symbol that is
3821 not constant (TLS) or not known at final link time (PIC). */
3822
3823static bool
3754d046 3824s390_cannot_force_const_mem (machine_mode mode, rtx x)
be00aaa8 3825{
3826 switch (GET_CODE (x))
3827 {
3828 case CONST_INT:
3829 case CONST_DOUBLE:
ba0e61d6 3830 case CONST_WIDE_INT:
76a4c804 3831 case CONST_VECTOR:
be00aaa8 3832 /* Accept all non-symbolic constants. */
3833 return false;
3834
3835 case LABEL_REF:
3836 /* Labels are OK iff we are non-PIC. */
3837 return flag_pic != 0;
3838
3839 case SYMBOL_REF:
3840 /* 'Naked' TLS symbol references are never OK,
3841 non-TLS symbols are OK iff we are non-PIC. */
3842 if (tls_symbolic_operand (x))
3843 return true;
3844 else
3845 return flag_pic != 0;
3846
3847 case CONST:
7d7d7bd2 3848 return s390_cannot_force_const_mem (mode, XEXP (x, 0));
be00aaa8 3849 case PLUS:
3850 case MINUS:
7d7d7bd2 3851 return s390_cannot_force_const_mem (mode, XEXP (x, 0))
3852 || s390_cannot_force_const_mem (mode, XEXP (x, 1));
be00aaa8 3853
3854 case UNSPEC:
3855 switch (XINT (x, 1))
3856 {
3857 /* Only lt-relative or GOT-relative UNSPECs are OK. */
12ef3745 3858 case UNSPEC_LTREL_OFFSET:
3859 case UNSPEC_GOT:
3860 case UNSPEC_GOTOFF:
3861 case UNSPEC_PLTOFF:
be00aaa8 3862 case UNSPEC_TLSGD:
3863 case UNSPEC_TLSLDM:
3864 case UNSPEC_NTPOFF:
3865 case UNSPEC_DTPOFF:
3866 case UNSPEC_GOTNTPOFF:
3867 case UNSPEC_INDNTPOFF:
3868 return false;
3869
d345b493 3870 /* If the literal pool shares the code section, be put
3871 execute template placeholders into the pool as well. */
3872 case UNSPEC_INSN:
3873 return TARGET_CPU_ZARCH;
3874
be00aaa8 3875 default:
3876 return true;
3877 }
3878 break;
3879
3880 default:
32eda510 3881 gcc_unreachable ();
be00aaa8 3882 }
3883}
3884
8b4a4127 3885/* Returns true if the constant value OP is a legitimate general
f81e845f 3886 operand during and after reload. The difference to
8b4a4127 3887 legitimate_constant_p is that this function will not accept
3888 a constant that would need to be forced to the literal pool
33d033da 3889 before it can be used as operand.
3890 This function accepts all constants which can be loaded directly
3891 into a GPR. */
8b4a4127 3892
e5537457 3893bool
edd89d66 3894legitimate_reload_constant_p (rtx op)
8b4a4127 3895{
51aa1e9c 3896 /* Accept la(y) operands. */
f81e845f 3897 if (GET_CODE (op) == CONST_INT
51aa1e9c 3898 && DISP_IN_RANGE (INTVAL (op)))
e5537457 3899 return true;
51aa1e9c 3900
163277cf 3901 /* Accept l(g)hi/l(g)fi operands. */
8b4a4127 3902 if (GET_CODE (op) == CONST_INT
163277cf 3903 && (CONST_OK_FOR_K (INTVAL (op)) || CONST_OK_FOR_Os (INTVAL (op))))
e5537457 3904 return true;
8b4a4127 3905
3906 /* Accept lliXX operands. */
dafc8d45 3907 if (TARGET_ZARCH
53239c89 3908 && GET_CODE (op) == CONST_INT
3909 && trunc_int_for_mode (INTVAL (op), word_mode) == INTVAL (op)
3910 && s390_single_part (op, word_mode, HImode, 0) >= 0)
e5537457 3911 return true;
8b4a4127 3912
163277cf 3913 if (TARGET_EXTIMM
3914 && GET_CODE (op) == CONST_INT
3915 && trunc_int_for_mode (INTVAL (op), word_mode) == INTVAL (op)
3916 && s390_single_part (op, word_mode, SImode, 0) >= 0)
3917 return true;
3918
8b4a4127 3919 /* Accept larl operands. */
dafc8d45 3920 if (TARGET_CPU_ZARCH
8b4a4127 3921 && larl_operand (op, VOIDmode))
e5537457 3922 return true;
8b4a4127 3923
70037005 3924 /* Accept floating-point zero operands that fit into a single GPR. */
3925 if (GET_CODE (op) == CONST_DOUBLE
3926 && s390_float_const_zero_p (op)
3927 && GET_MODE_SIZE (GET_MODE (op)) <= UNITS_PER_WORD)
3928 return true;
3929
53239c89 3930 /* Accept double-word operands that can be split. */
ba0e61d6 3931 if (GET_CODE (op) == CONST_WIDE_INT
3932 || (GET_CODE (op) == CONST_INT
3933 && trunc_int_for_mode (INTVAL (op), word_mode) != INTVAL (op)))
53239c89 3934 {
3754d046 3935 machine_mode dword_mode = word_mode == SImode ? DImode : TImode;
53239c89 3936 rtx hi = operand_subword (op, 0, 0, dword_mode);
3937 rtx lo = operand_subword (op, 1, 0, dword_mode);
3938 return legitimate_reload_constant_p (hi)
3939 && legitimate_reload_constant_p (lo);
3940 }
3941
8b4a4127 3942 /* Everything else cannot be handled without reload. */
e5537457 3943 return false;
8b4a4127 3944}
3945
33d033da 3946/* Returns true if the constant value OP is a legitimate fp operand
3947 during and after reload.
3948 This function accepts all constants which can be loaded directly
3949 into an FPR. */
3950
3951static bool
3952legitimate_reload_fp_constant_p (rtx op)
3953{
3954 /* Accept floating-point zero operands if the load zero instruction
81769881 3955 can be used. Prior to z196 the load fp zero instruction caused a
3956 performance penalty if the result is used as BFP number. */
33d033da 3957 if (TARGET_Z196
3958 && GET_CODE (op) == CONST_DOUBLE
3959 && s390_float_const_zero_p (op))
3960 return true;
3961
3962 return false;
3963}
3964
76a4c804 3965/* Returns true if the constant value OP is a legitimate vector operand
3966 during and after reload.
3967 This function accepts all constants which can be loaded directly
3968 into an VR. */
3969
3970static bool
3971legitimate_reload_vector_constant_p (rtx op)
3972{
76a4c804 3973 if (TARGET_VX && GET_MODE_SIZE (GET_MODE (op)) == 16
80fc7f56 3974 && (satisfies_constraint_j00 (op)
3975 || satisfies_constraint_jm1 (op)
3976 || satisfies_constraint_jKK (op)
3977 || satisfies_constraint_jxx (op)
3978 || satisfies_constraint_jyy (op)))
76a4c804 3979 return true;
3980
3981 return false;
3982}
3983
8deb3959 3984/* Given an rtx OP being reloaded into a reg required to be in class RCLASS,
8b4a4127 3985 return the class of reg to actually use. */
3986
3359ccfd 3987static reg_class_t
3988s390_preferred_reload_class (rtx op, reg_class_t rclass)
8b4a4127 3989{
8b4a4127 3990 switch (GET_CODE (op))
3991 {
70037005 3992 /* Constants we cannot reload into general registers
3993 must be forced into the literal pool. */
76a4c804 3994 case CONST_VECTOR:
8b4a4127 3995 case CONST_DOUBLE:
3996 case CONST_INT:
ba0e61d6 3997 case CONST_WIDE_INT:
70037005 3998 if (reg_class_subset_p (GENERAL_REGS, rclass)
3999 && legitimate_reload_constant_p (op))
4000 return GENERAL_REGS;
4001 else if (reg_class_subset_p (ADDR_REGS, rclass)
4002 && legitimate_reload_constant_p (op))
4003 return ADDR_REGS;
33d033da 4004 else if (reg_class_subset_p (FP_REGS, rclass)
4005 && legitimate_reload_fp_constant_p (op))
4006 return FP_REGS;
76a4c804 4007 else if (reg_class_subset_p (VEC_REGS, rclass)
4008 && legitimate_reload_vector_constant_p (op))
4009 return VEC_REGS;
4010
33d033da 4011 return NO_REGS;
8b4a4127 4012
4013 /* If a symbolic constant or a PLUS is reloaded,
0b300c86 4014 it is most likely being used as an address, so
4015 prefer ADDR_REGS. If 'class' is not a superset
4016 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
de47476b 4017 case CONST:
37c55f71 4018 /* Symrefs cannot be pushed into the literal pool with -fPIC
4019 so we *MUST NOT* return NO_REGS for these cases
4020 (s390_cannot_force_const_mem will return true).
4021
4022 On the other hand we MUST return NO_REGS for symrefs with
4023 invalid addend which might have been pushed to the literal
4024 pool (no -fPIC). Usually we would expect them to be
4025 handled via secondary reload but this does not happen if
4026 they are used as literal pool slot replacement in reload
4027 inheritance (see emit_input_reload_insns). */
de47476b 4028 if (TARGET_CPU_ZARCH
4029 && GET_CODE (XEXP (op, 0)) == PLUS
4030 && GET_CODE (XEXP (XEXP(op, 0), 0)) == SYMBOL_REF
4031 && GET_CODE (XEXP (XEXP(op, 0), 1)) == CONST_INT)
4032 {
37c55f71 4033 if (flag_pic && reg_class_subset_p (ADDR_REGS, rclass))
de47476b 4034 return ADDR_REGS;
4035 else
4036 return NO_REGS;
4037 }
4038 /* fallthrough */
8b4a4127 4039 case LABEL_REF:
4040 case SYMBOL_REF:
08d88e72 4041 if (!legitimate_reload_constant_p (op))
4042 return NO_REGS;
4043 /* fallthrough */
4044 case PLUS:
4045 /* load address will be used. */
8deb3959 4046 if (reg_class_subset_p (ADDR_REGS, rclass))
08d88e72 4047 return ADDR_REGS;
0b300c86 4048 else
4049 return NO_REGS;
8b4a4127 4050
4051 default:
4052 break;
4053 }
4054
8deb3959 4055 return rclass;
8b4a4127 4056}
4673c1a0 4057
e68d6a13 4058/* Return true if ADDR is SYMBOL_REF + addend with addend being a
4059 multiple of ALIGNMENT and the SYMBOL_REF being naturally
4060 aligned. */
4061
4062bool
4063s390_check_symref_alignment (rtx addr, HOST_WIDE_INT alignment)
4064{
4065 HOST_WIDE_INT addend;
4066 rtx symref;
4067
78affa36 4068 /* The "required alignment" might be 0 (e.g. for certain structs
4069 accessed via BLKmode). Early abort in this case, as well as when
4070 an alignment > 8 is required. */
4071 if (alignment < 2 || alignment > 8)
4072 return false;
4073
2a672556 4074 if (!s390_loadrelative_operand_p (addr, &symref, &addend))
4075 return false;
62cb5855 4076
2a672556 4077 if (addend & (alignment - 1))
e68d6a13 4078 return false;
4079
78affa36 4080 if (GET_CODE (symref) == SYMBOL_REF)
4081 {
4082 /* We have load-relative instructions for 2-byte, 4-byte, and
4083 8-byte alignment so allow only these. */
4084 switch (alignment)
4085 {
4086 case 8: return !SYMBOL_FLAG_NOTALIGN8_P (symref);
4087 case 4: return !SYMBOL_FLAG_NOTALIGN4_P (symref);
4088 case 2: return !SYMBOL_FLAG_NOTALIGN2_P (symref);
4089 default: return false;
4090 }
4091 }
2a672556 4092
4093 if (GET_CODE (symref) == UNSPEC
4094 && alignment <= UNITS_PER_LONG)
4095 return true;
4096
4097 return false;
e68d6a13 4098}
4099
4100/* ADDR is moved into REG using larl. If ADDR isn't a valid larl
4101 operand SCRATCH is used to reload the even part of the address and
4102 adding one. */
4103
4104void
4105s390_reload_larl_operand (rtx reg, rtx addr, rtx scratch)
4106{
4107 HOST_WIDE_INT addend;
4108 rtx symref;
4109
2a672556 4110 if (!s390_loadrelative_operand_p (addr, &symref, &addend))
e68d6a13 4111 gcc_unreachable ();
4112
4113 if (!(addend & 1))
4114 /* Easy case. The addend is even so larl will do fine. */
4115 emit_move_insn (reg, addr);
4116 else
4117 {
4118 /* We can leave the scratch register untouched if the target
4119 register is a valid base register. */
4120 if (REGNO (reg) < FIRST_PSEUDO_REGISTER
4121 && REGNO_REG_CLASS (REGNO (reg)) == ADDR_REGS)
4122 scratch = reg;
4123
4124 gcc_assert (REGNO (scratch) < FIRST_PSEUDO_REGISTER);
4125 gcc_assert (REGNO_REG_CLASS (REGNO (scratch)) == ADDR_REGS);
4126
4127 if (addend != 1)
4128 emit_move_insn (scratch,
4129 gen_rtx_CONST (Pmode,
4130 gen_rtx_PLUS (Pmode, symref,
4131 GEN_INT (addend - 1))));
4132 else
4133 emit_move_insn (scratch, symref);
4134
4135 /* Increment the address using la in order to avoid clobbering cc. */
de47476b 4136 s390_load_address (reg, gen_rtx_PLUS (Pmode, scratch, const1_rtx));
e68d6a13 4137 }
4138}
4139
4140/* Generate what is necessary to move between REG and MEM using
4141 SCRATCH. The direction is given by TOMEM. */
4142
4143void
4144s390_reload_symref_address (rtx reg, rtx mem, rtx scratch, bool tomem)
4145{
4146 /* Reload might have pulled a constant out of the literal pool.
4147 Force it back in. */
4148 if (CONST_INT_P (mem) || GET_CODE (mem) == CONST_DOUBLE
ba0e61d6 4149 || GET_CODE (mem) == CONST_WIDE_INT
76a4c804 4150 || GET_CODE (mem) == CONST_VECTOR
e68d6a13 4151 || GET_CODE (mem) == CONST)
4152 mem = force_const_mem (GET_MODE (reg), mem);
4153
4154 gcc_assert (MEM_P (mem));
4155
4156 /* For a load from memory we can leave the scratch register
4157 untouched if the target register is a valid base register. */
4158 if (!tomem
4159 && REGNO (reg) < FIRST_PSEUDO_REGISTER
4160 && REGNO_REG_CLASS (REGNO (reg)) == ADDR_REGS
4161 && GET_MODE (reg) == GET_MODE (scratch))
4162 scratch = reg;
4163
4164 /* Load address into scratch register. Since we can't have a
4165 secondary reload for a secondary reload we have to cover the case
4166 where larl would need a secondary reload here as well. */
4167 s390_reload_larl_operand (scratch, XEXP (mem, 0), scratch);
4168
4169 /* Now we can use a standard load/store to do the move. */
4170 if (tomem)
4171 emit_move_insn (replace_equiv_address (mem, scratch), reg);
4172 else
4173 emit_move_insn (reg, replace_equiv_address (mem, scratch));
4174}
4175
328d5423 4176/* Inform reload about cases where moving X with a mode MODE to a register in
8deb3959 4177 RCLASS requires an extra scratch or immediate register. Return the class
328d5423 4178 needed for the immediate register. */
429f9fdb 4179
964229b7 4180static reg_class_t
4181s390_secondary_reload (bool in_p, rtx x, reg_class_t rclass_i,
3754d046 4182 machine_mode mode, secondary_reload_info *sri)
328d5423 4183{
964229b7 4184 enum reg_class rclass = (enum reg_class) rclass_i;
4185
328d5423 4186 /* Intermediate register needed. */
8deb3959 4187 if (reg_classes_intersect_p (CC_REGS, rclass))
bcbf02a5 4188 return GENERAL_REGS;
4189
76a4c804 4190 if (TARGET_VX)
4191 {
4192 /* The vst/vl vector move instructions allow only for short
4193 displacements. */
4194 if (MEM_P (x)
4195 && GET_CODE (XEXP (x, 0)) == PLUS
4196 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4197 && !SHORT_DISP_IN_RANGE(INTVAL (XEXP (XEXP (x, 0), 1)))
4198 && reg_class_subset_p (rclass, VEC_REGS)
4199 && (!reg_class_subset_p (rclass, FP_REGS)
4200 || (GET_MODE_SIZE (mode) > 8
4201 && s390_class_max_nregs (FP_REGS, mode) == 1)))
4202 {
4203 if (in_p)
4204 sri->icode = (TARGET_64BIT ?
4205 CODE_FOR_reloaddi_la_in :
4206 CODE_FOR_reloadsi_la_in);
4207 else
4208 sri->icode = (TARGET_64BIT ?
4209 CODE_FOR_reloaddi_la_out :
4210 CODE_FOR_reloadsi_la_out);
4211 }
4212 }
4213
e68d6a13 4214 if (TARGET_Z10)
4215 {
08d88e72 4216 HOST_WIDE_INT offset;
4217 rtx symref;
4218
e68d6a13 4219 /* On z10 several optimizer steps may generate larl operands with
4220 an odd addend. */
4221 if (in_p
2a672556 4222 && s390_loadrelative_operand_p (x, &symref, &offset)
e68d6a13 4223 && mode == Pmode
78affa36 4224 && !SYMBOL_FLAG_NOTALIGN2_P (symref)
08d88e72 4225 && (offset & 1) == 1)
e68d6a13 4226 sri->icode = ((mode == DImode) ? CODE_FOR_reloaddi_larl_odd_addend_z10
4227 : CODE_FOR_reloadsi_larl_odd_addend_z10);
4228
ddb92daa 4229 /* Handle all the (mem (symref)) accesses we cannot use the z10
4230 instructions for. */
e68d6a13 4231 if (MEM_P (x)
2a672556 4232 && s390_loadrelative_operand_p (XEXP (x, 0), NULL, NULL)
ddb92daa 4233 && (mode == QImode
a1142483 4234 || !reg_class_subset_p (rclass, GENERAL_REGS)
ddb92daa 4235 || GET_MODE_SIZE (mode) > UNITS_PER_WORD
4236 || !s390_check_symref_alignment (XEXP (x, 0),
4237 GET_MODE_SIZE (mode))))
e68d6a13 4238 {
4239#define __SECONDARY_RELOAD_CASE(M,m) \
4240 case M##mode: \
4241 if (TARGET_64BIT) \
4242 sri->icode = in_p ? CODE_FOR_reload##m##di_toreg_z10 : \
4243 CODE_FOR_reload##m##di_tomem_z10; \
4244 else \
4245 sri->icode = in_p ? CODE_FOR_reload##m##si_toreg_z10 : \
4246 CODE_FOR_reload##m##si_tomem_z10; \
4247 break;
4248
4249 switch (GET_MODE (x))
4250 {
4251 __SECONDARY_RELOAD_CASE (QI, qi);
4252 __SECONDARY_RELOAD_CASE (HI, hi);
4253 __SECONDARY_RELOAD_CASE (SI, si);
4254 __SECONDARY_RELOAD_CASE (DI, di);
4255 __SECONDARY_RELOAD_CASE (TI, ti);
4256 __SECONDARY_RELOAD_CASE (SF, sf);
4257 __SECONDARY_RELOAD_CASE (DF, df);
4258 __SECONDARY_RELOAD_CASE (TF, tf);
4259 __SECONDARY_RELOAD_CASE (SD, sd);
4260 __SECONDARY_RELOAD_CASE (DD, dd);
4261 __SECONDARY_RELOAD_CASE (TD, td);
76a4c804 4262 __SECONDARY_RELOAD_CASE (V1QI, v1qi);
4263 __SECONDARY_RELOAD_CASE (V2QI, v2qi);
4264 __SECONDARY_RELOAD_CASE (V4QI, v4qi);
4265 __SECONDARY_RELOAD_CASE (V8QI, v8qi);
4266 __SECONDARY_RELOAD_CASE (V16QI, v16qi);
4267 __SECONDARY_RELOAD_CASE (V1HI, v1hi);
4268 __SECONDARY_RELOAD_CASE (V2HI, v2hi);
4269 __SECONDARY_RELOAD_CASE (V4HI, v4hi);
4270 __SECONDARY_RELOAD_CASE (V8HI, v8hi);
4271 __SECONDARY_RELOAD_CASE (V1SI, v1si);
4272 __SECONDARY_RELOAD_CASE (V2SI, v2si);
4273 __SECONDARY_RELOAD_CASE (V4SI, v4si);
4274 __SECONDARY_RELOAD_CASE (V1DI, v1di);
4275 __SECONDARY_RELOAD_CASE (V2DI, v2di);
4276 __SECONDARY_RELOAD_CASE (V1TI, v1ti);
4277 __SECONDARY_RELOAD_CASE (V1SF, v1sf);
4278 __SECONDARY_RELOAD_CASE (V2SF, v2sf);
4279 __SECONDARY_RELOAD_CASE (V4SF, v4sf);
4280 __SECONDARY_RELOAD_CASE (V1DF, v1df);
4281 __SECONDARY_RELOAD_CASE (V2DF, v2df);
4282 __SECONDARY_RELOAD_CASE (V1TF, v1tf);
e68d6a13 4283 default:
4284 gcc_unreachable ();
4285 }
4286#undef __SECONDARY_RELOAD_CASE
4287 }
4288 }
4289
328d5423 4290 /* We need a scratch register when loading a PLUS expression which
4291 is not a legitimate operand of the LOAD ADDRESS instruction. */
7b1bda1c 4292 /* LRA can deal with transformation of plus op very well -- so we
4293 don't need to prompt LRA in this case. */
4294 if (! lra_in_progress && in_p && s390_plus_operand (x, mode))
328d5423 4295 sri->icode = (TARGET_64BIT ?
4296 CODE_FOR_reloaddi_plus : CODE_FOR_reloadsi_plus);
4297
efec32e0 4298 /* Performing a multiword move from or to memory we have to make sure the
328d5423 4299 second chunk in memory is addressable without causing a displacement
4300 overflow. If that would be the case we calculate the address in
4301 a scratch register. */
4302 if (MEM_P (x)
4303 && GET_CODE (XEXP (x, 0)) == PLUS
4304 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4305 && !DISP_IN_RANGE (INTVAL (XEXP (XEXP (x, 0), 1))
6938bdf8 4306 + GET_MODE_SIZE (mode) - 1))
328d5423 4307 {
efec32e0 4308 /* For GENERAL_REGS a displacement overflow is no problem if occurring
328d5423 4309 in a s_operand address since we may fallback to lm/stm. So we only
4310 have to care about overflows in the b+i+d case. */
8deb3959 4311 if ((reg_classes_intersect_p (GENERAL_REGS, rclass)
328d5423 4312 && s390_class_max_nregs (GENERAL_REGS, mode) > 1
4313 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS)
4314 /* For FP_REGS no lm/stm is available so this check is triggered
4315 for displacement overflows in b+i+d and b+d like addresses. */
8deb3959 4316 || (reg_classes_intersect_p (FP_REGS, rclass)
328d5423 4317 && s390_class_max_nregs (FP_REGS, mode) > 1))
4318 {
4319 if (in_p)
4320 sri->icode = (TARGET_64BIT ?
76a4c804 4321 CODE_FOR_reloaddi_la_in :
4322 CODE_FOR_reloadsi_la_in);
328d5423 4323 else
4324 sri->icode = (TARGET_64BIT ?
76a4c804 4325 CODE_FOR_reloaddi_la_out :
4326 CODE_FOR_reloadsi_la_out);
328d5423 4327 }
4328 }
bcbf02a5 4329
08b5e262 4330 /* A scratch address register is needed when a symbolic constant is
4331 copied to r0 compiling with -fPIC. In other cases the target
4332 register might be used as temporary (see legitimize_pic_address). */
8deb3959 4333 if (in_p && SYMBOLIC_CONST (x) && flag_pic == 2 && rclass != ADDR_REGS)
08b5e262 4334 sri->icode = (TARGET_64BIT ?
4335 CODE_FOR_reloaddi_PIC_addr :
4336 CODE_FOR_reloadsi_PIC_addr);
4337
328d5423 4338 /* Either scratch or no register needed. */
66795431 4339 return NO_REGS;
4340}
4341
64f977d6 4342/* Generate code to load SRC, which is PLUS that is not a
4343 legitimate operand for the LA instruction, into TARGET.
4344 SCRATCH may be used as scratch register. */
4345
4346void
edd89d66 4347s390_expand_plus_operand (rtx target, rtx src,
4348 rtx scratch)
64f977d6 4349{
e7f0624a 4350 rtx sum1, sum2;
8ba34dcd 4351 struct s390_address ad;
dc4951d9 4352
dc4951d9 4353 /* src must be a PLUS; get its two operands. */
32eda510 4354 gcc_assert (GET_CODE (src) == PLUS);
4355 gcc_assert (GET_MODE (src) == Pmode);
64f977d6 4356
c10847b9 4357 /* Check if any of the two operands is already scheduled
4358 for replacement by reload. This can happen e.g. when
4359 float registers occur in an address. */
4360 sum1 = find_replacement (&XEXP (src, 0));
4361 sum2 = find_replacement (&XEXP (src, 1));
a5004c3d 4362 src = gen_rtx_PLUS (Pmode, sum1, sum2);
a5004c3d 4363
e7f0624a 4364 /* If the address is already strictly valid, there's nothing to do. */
4365 if (!s390_decompose_address (src, &ad)
1e280623 4366 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
4367 || (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx))))
64f977d6 4368 {
e7f0624a 4369 /* Otherwise, one of the operands cannot be an address register;
4370 we reload its value into the scratch register. */
4371 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
4372 {
4373 emit_move_insn (scratch, sum1);
4374 sum1 = scratch;
4375 }
4376 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
4377 {
4378 emit_move_insn (scratch, sum2);
4379 sum2 = scratch;
4380 }
64f977d6 4381
e7f0624a 4382 /* According to the way these invalid addresses are generated
4383 in reload.c, it should never happen (at least on s390) that
4384 *neither* of the PLUS components, after find_replacements
4385 was applied, is an address register. */
4386 if (sum1 == scratch && sum2 == scratch)
4387 {
4388 debug_rtx (src);
32eda510 4389 gcc_unreachable ();
e7f0624a 4390 }
64f977d6 4391
e7f0624a 4392 src = gen_rtx_PLUS (Pmode, sum1, sum2);
64f977d6 4393 }
4394
4395 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
4396 is only ever performed on addresses, so we can mark the
4397 sum as legitimate for LA in any case. */
4fbc4db5 4398 s390_load_address (target, src);
64f977d6 4399}
4400
4401
e5537457 4402/* Return true if ADDR is a valid memory address.
875862bf 4403 STRICT specifies whether strict register checking applies. */
4673c1a0 4404
fd50b071 4405static bool
3754d046 4406s390_legitimate_address_p (machine_mode mode, rtx addr, bool strict)
4673c1a0 4407{
875862bf 4408 struct s390_address ad;
e68d6a13 4409
4410 if (TARGET_Z10
4411 && larl_operand (addr, VOIDmode)
4412 && (mode == VOIDmode
4413 || s390_check_symref_alignment (addr, GET_MODE_SIZE (mode))))
4414 return true;
4415
875862bf 4416 if (!s390_decompose_address (addr, &ad))
e5537457 4417 return false;
8ba34dcd 4418
4419 if (strict)
4420 {
1e280623 4421 if (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
e5537457 4422 return false;
1e280623 4423
4424 if (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx)))
e5537457 4425 return false;
8ba34dcd 4426 }
4427 else
4428 {
ffead1ca 4429 if (ad.base
1e280623 4430 && !(REGNO (ad.base) >= FIRST_PSEUDO_REGISTER
4431 || REGNO_REG_CLASS (REGNO (ad.base)) == ADDR_REGS))
e5537457 4432 return false;
ffead1ca 4433
1e280623 4434 if (ad.indx
4435 && !(REGNO (ad.indx) >= FIRST_PSEUDO_REGISTER
4436 || REGNO_REG_CLASS (REGNO (ad.indx)) == ADDR_REGS))
4437 return false;
8ba34dcd 4438 }
e5537457 4439 return true;
4673c1a0 4440}
4441
e5537457 4442/* Return true if OP is a valid operand for the LA instruction.
2eb8fe23 4443 In 31-bit, we need to prove that the result is used as an
4444 address, as LA performs only a 31-bit addition. */
4445
e5537457 4446bool
edd89d66 4447legitimate_la_operand_p (rtx op)
2eb8fe23 4448{
4449 struct s390_address addr;
8ba34dcd 4450 if (!s390_decompose_address (op, &addr))
e5537457 4451 return false;
2eb8fe23 4452
e5537457 4453 return (TARGET_64BIT || addr.pointer);
64f977d6 4454}
2eb8fe23 4455
e5537457 4456/* Return true if it is valid *and* preferable to use LA to
c6061690 4457 compute the sum of OP1 and OP2. */
f81e845f 4458
e5537457 4459bool
c6061690 4460preferred_la_operand_p (rtx op1, rtx op2)
a40b2054 4461{
4462 struct s390_address addr;
c6061690 4463
4464 if (op2 != const0_rtx)
4465 op1 = gen_rtx_PLUS (Pmode, op1, op2);
4466
4467 if (!s390_decompose_address (op1, &addr))
e5537457 4468 return false;
1e280623 4469 if (addr.base && !REGNO_OK_FOR_BASE_P (REGNO (addr.base)))
e5537457 4470 return false;
1e280623 4471 if (addr.indx && !REGNO_OK_FOR_INDEX_P (REGNO (addr.indx)))
e5537457 4472 return false;
a40b2054 4473
33d033da 4474 /* Avoid LA instructions with index register on z196; it is
81769881 4475 preferable to use regular add instructions when possible.
4476 Starting with zEC12 the la with index register is "uncracked"
4477 again. */
33d033da 4478 if (addr.indx && s390_tune == PROCESSOR_2817_Z196)
4479 return false;
4480
a40b2054 4481 if (!TARGET_64BIT && !addr.pointer)
e5537457 4482 return false;
a40b2054 4483
4484 if (addr.pointer)
e5537457 4485 return true;
a40b2054 4486
ec3b9583 4487 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
4488 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
e5537457 4489 return true;
a40b2054 4490
e5537457 4491 return false;
a40b2054 4492}
4493
4fbc4db5 4494/* Emit a forced load-address operation to load SRC into DST.
4495 This will use the LOAD ADDRESS instruction even in situations
4496 where legitimate_la_operand_p (SRC) returns false. */
2eb8fe23 4497
4fbc4db5 4498void
b40da9a7 4499s390_load_address (rtx dst, rtx src)
64f977d6 4500{
4fbc4db5 4501 if (TARGET_64BIT)
4502 emit_move_insn (dst, src);
4503 else
4504 emit_insn (gen_force_la_31 (dst, src));
2eb8fe23 4505}
4506
4673c1a0 4507/* Return a legitimate reference for ORIG (an address) using the
4508 register REG. If REG is 0, a new pseudo is generated.
4509
4510 There are two types of references that must be handled:
4511
4512 1. Global data references must load the address from the GOT, via
4513 the PIC reg. An insn is emitted to do this load, and the reg is
4514 returned.
4515
4516 2. Static data references, constant pool addresses, and code labels
4517 compute the address as an offset from the GOT, whose base is in
a3e33162 4518 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
4673c1a0 4519 differentiate them from global data objects. The returned
4520 address is the PIC reg + an unspec constant.
4521
bc409cb4 4522 TARGET_LEGITIMIZE_ADDRESS_P rejects symbolic references unless the PIC
4673c1a0 4523 reg also appears in the address. */
4524
4525rtx
b40da9a7 4526legitimize_pic_address (rtx orig, rtx reg)
4673c1a0 4527{
4528 rtx addr = orig;
2a672556 4529 rtx addend = const0_rtx;
8deb3959 4530 rtx new_rtx = orig;
4673c1a0 4531
1ed004b7 4532 gcc_assert (!TLS_SYMBOLIC_CONST (addr));
4533
2a672556 4534 if (GET_CODE (addr) == CONST)
4535 addr = XEXP (addr, 0);
4536
4537 if (GET_CODE (addr) == PLUS)
4673c1a0 4538 {
2a672556 4539 addend = XEXP (addr, 1);
4540 addr = XEXP (addr, 0);
4541 }
4542
4543 if ((GET_CODE (addr) == LABEL_REF
4544 || (GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (addr))
4545 || (GET_CODE (addr) == UNSPEC &&
4546 (XINT (addr, 1) == UNSPEC_GOTENT
4547 || (TARGET_CPU_ZARCH && XINT (addr, 1) == UNSPEC_PLT))))
4548 && GET_CODE (addend) == CONST_INT)
4549 {
4550 /* This can be locally addressed. */
4551
4552 /* larl_operand requires UNSPECs to be wrapped in a const rtx. */
4553 rtx const_addr = (GET_CODE (addr) == UNSPEC ?
4554 gen_rtx_CONST (Pmode, addr) : addr);
4555
4556 if (TARGET_CPU_ZARCH
4557 && larl_operand (const_addr, VOIDmode)
4558 && INTVAL (addend) < (HOST_WIDE_INT)1 << 31
4559 && INTVAL (addend) >= -((HOST_WIDE_INT)1 << 31))
4560 {
4561 if (INTVAL (addend) & 1)
4562 {
4563 /* LARL can't handle odd offsets, so emit a pair of LARL
4564 and LA. */
4565 rtx temp = reg? reg : gen_reg_rtx (Pmode);
4566
4567 if (!DISP_IN_RANGE (INTVAL (addend)))
4568 {
4569 HOST_WIDE_INT even = INTVAL (addend) - 1;
4570 addr = gen_rtx_PLUS (Pmode, addr, GEN_INT (even));
4571 addr = gen_rtx_CONST (Pmode, addr);
4572 addend = const1_rtx;
4573 }
4574
4575 emit_move_insn (temp, addr);
4576 new_rtx = gen_rtx_PLUS (Pmode, temp, addend);
4577
4578 if (reg != 0)
4579 {
4580 s390_load_address (reg, new_rtx);
4581 new_rtx = reg;
4582 }
4583 }
4584 else
4585 {
4586 /* If the offset is even, we can just use LARL. This
4587 will happen automatically. */
4588 }
4589 }
4673c1a0 4590 else
2a672556 4591 {
4592 /* No larl - Access local symbols relative to the GOT. */
4673c1a0 4593
2a672556 4594 rtx temp = reg? reg : gen_reg_rtx (Pmode);
4673c1a0 4595
12ef3745 4596 if (reload_in_progress || reload_completed)
3072d30e 4597 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
12ef3745 4598
2a672556 4599 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTOFF);
4600 if (addend != const0_rtx)
4601 addr = gen_rtx_PLUS (Pmode, addr, addend);
4602 addr = gen_rtx_CONST (Pmode, addr);
4603 addr = force_const_mem (Pmode, addr);
4673c1a0 4604 emit_move_insn (temp, addr);
4605
2a672556 4606 new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
4607 if (reg != 0)
4608 {
4609 s390_load_address (reg, new_rtx);
4610 new_rtx = reg;
4611 }
4612 }
4673c1a0 4613 }
2a672556 4614 else if (GET_CODE (addr) == SYMBOL_REF && addend == const0_rtx)
4673c1a0 4615 {
2a672556 4616 /* A non-local symbol reference without addend.
4617
4618 The symbol ref is wrapped into an UNSPEC to make sure the
4619 proper operand modifier (@GOT or @GOTENT) will be emitted.
4620 This will tell the linker to put the symbol into the GOT.
4621
4622 Additionally the code dereferencing the GOT slot is emitted here.
4623
4624 An addend to the symref needs to be added afterwards.
4625 legitimize_pic_address calls itself recursively to handle
4626 that case. So no need to do it here. */
4627
4673c1a0 4628 if (reg == 0)
4629 reg = gen_reg_rtx (Pmode);
4630
2a672556 4631 if (TARGET_Z10)
4632 {
4633 /* Use load relative if possible.
4634 lgrl <target>, sym@GOTENT */
4635 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTENT);
4636 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
4637 new_rtx = gen_const_mem (GET_MODE (reg), new_rtx);
4638
4639 emit_move_insn (reg, new_rtx);
4640 new_rtx = reg;
4641 }
4642 else if (flag_pic == 1)
4673c1a0 4643 {
2a672556 4644 /* Assume GOT offset is a valid displacement operand (< 4k
4645 or < 512k with z990). This is handled the same way in
4646 both 31- and 64-bit code (@GOT).
4647 lg <target>, sym@GOT(r12) */
4673c1a0 4648
9a2a66ae 4649 if (reload_in_progress || reload_completed)
3072d30e 4650 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
4673c1a0 4651
8deb3959 4652 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
4653 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
4654 new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new_rtx);
4655 new_rtx = gen_const_mem (Pmode, new_rtx);
4656 emit_move_insn (reg, new_rtx);
4657 new_rtx = reg;
4673c1a0 4658 }
dafc8d45 4659 else if (TARGET_CPU_ZARCH)
4673c1a0 4660 {
4661 /* If the GOT offset might be >= 4k, we determine the position
2a672556 4662 of the GOT entry via a PC-relative LARL (@GOTENT).
4663 larl temp, sym@GOTENT
4664 lg <target>, 0(temp) */
4673c1a0 4665
08b5e262 4666 rtx temp = reg ? reg : gen_reg_rtx (Pmode);
4667
4668 gcc_assert (REGNO (temp) >= FIRST_PSEUDO_REGISTER
4669 || REGNO_REG_CLASS (REGNO (temp)) == ADDR_REGS);
4673c1a0 4670
8deb3959 4671 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTENT);
4672 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
2a672556 4673 emit_move_insn (temp, new_rtx);
4673c1a0 4674
2a672556 4675 new_rtx = gen_const_mem (Pmode, temp);
8deb3959 4676 emit_move_insn (reg, new_rtx);
2a672556 4677
8deb3959 4678 new_rtx = reg;
4673c1a0 4679 }
4680 else
4681 {
f81e845f 4682 /* If the GOT offset might be >= 4k, we have to load it
2a672556 4683 from the literal pool (@GOT).
4684
4685 lg temp, lit-litbase(r13)
4686 lg <target>, 0(temp)
4687 lit: .long sym@GOT */
4673c1a0 4688
08b5e262 4689 rtx temp = reg ? reg : gen_reg_rtx (Pmode);
4690
4691 gcc_assert (REGNO (temp) >= FIRST_PSEUDO_REGISTER
4692 || REGNO_REG_CLASS (REGNO (temp)) == ADDR_REGS);
4673c1a0 4693
9a2a66ae 4694 if (reload_in_progress || reload_completed)
3072d30e 4695 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
4673c1a0 4696
12ef3745 4697 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
525d1294 4698 addr = gen_rtx_CONST (Pmode, addr);
4699 addr = force_const_mem (Pmode, addr);
4673c1a0 4700 emit_move_insn (temp, addr);
4701
8deb3959 4702 new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
4703 new_rtx = gen_const_mem (Pmode, new_rtx);
4704 emit_move_insn (reg, new_rtx);
4705 new_rtx = reg;
4673c1a0 4706 }
f81e845f 4707 }
2a672556 4708 else if (GET_CODE (addr) == UNSPEC && GET_CODE (addend) == CONST_INT)
4673c1a0 4709 {
2a672556 4710 gcc_assert (XVECLEN (addr, 0) == 1);
4711 switch (XINT (addr, 1))
4673c1a0 4712 {
2a672556 4713 /* These address symbols (or PLT slots) relative to the GOT
4714 (not GOT slots!). In general this will exceed the
4715 displacement range so these value belong into the literal
4716 pool. */
4717 case UNSPEC_GOTOFF:
4718 case UNSPEC_PLTOFF:
4719 new_rtx = force_const_mem (Pmode, orig);
4720 break;
4673c1a0 4721
2a672556 4722 /* For -fPIC the GOT size might exceed the displacement
4723 range so make sure the value is in the literal pool. */
4724 case UNSPEC_GOT:
4725 if (flag_pic == 2)
4726 new_rtx = force_const_mem (Pmode, orig);
4727 break;
4673c1a0 4728
2a672556 4729 /* For @GOTENT larl is used. This is handled like local
4730 symbol refs. */
4731 case UNSPEC_GOTENT:
4732 gcc_unreachable ();
4733 break;
4673c1a0 4734
2a672556 4735 /* @PLT is OK as is on 64-bit, must be converted to
4736 GOT-relative @PLTOFF on 31-bit. */
4737 case UNSPEC_PLT:
4738 if (!TARGET_CPU_ZARCH)
4673c1a0 4739 {
2a672556 4740 rtx temp = reg? reg : gen_reg_rtx (Pmode);
4741
4742 if (reload_in_progress || reload_completed)
4743 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
4744
4745 addr = XVECEXP (addr, 0, 0);
4746 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
4747 UNSPEC_PLTOFF);
4748 if (addend != const0_rtx)
4749 addr = gen_rtx_PLUS (Pmode, addr, addend);
4750 addr = gen_rtx_CONST (Pmode, addr);
4751 addr = force_const_mem (Pmode, addr);
4752 emit_move_insn (temp, addr);
4753
4754 new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
4755 if (reg != 0)
4673c1a0 4756 {
2a672556 4757 s390_load_address (reg, new_rtx);
4758 new_rtx = reg;
4673c1a0 4759 }
2a672556 4760 }
4761 else
4762 /* On 64 bit larl can be used. This case is handled like
4763 local symbol refs. */
4764 gcc_unreachable ();
4765 break;
4766
4767 /* Everything else cannot happen. */
4768 default:
4769 gcc_unreachable ();
4770 }
4771 }
4772 else if (addend != const0_rtx)
4773 {
4774 /* Otherwise, compute the sum. */
4673c1a0 4775
2a672556 4776 rtx base = legitimize_pic_address (addr, reg);
4777 new_rtx = legitimize_pic_address (addend,
4778 base == reg ? NULL_RTX : reg);
4779 if (GET_CODE (new_rtx) == CONST_INT)
4780 new_rtx = plus_constant (Pmode, base, INTVAL (new_rtx));
4781 else
4782 {
4783 if (GET_CODE (new_rtx) == PLUS && CONSTANT_P (XEXP (new_rtx, 1)))
4784 {
4785 base = gen_rtx_PLUS (Pmode, base, XEXP (new_rtx, 0));
4786 new_rtx = XEXP (new_rtx, 1);
4673c1a0 4787 }
2a672556 4788 new_rtx = gen_rtx_PLUS (Pmode, base, new_rtx);
4673c1a0 4789 }
2a672556 4790
4791 if (GET_CODE (new_rtx) == CONST)
4792 new_rtx = XEXP (new_rtx, 0);
4793 new_rtx = force_operand (new_rtx, 0);
4673c1a0 4794 }
2a672556 4795
8deb3959 4796 return new_rtx;
4673c1a0 4797}
4798
be00aaa8 4799/* Load the thread pointer into a register. */
4800
cc87d0c5 4801rtx
4802s390_get_thread_pointer (void)
be00aaa8 4803{
923cf36d 4804 rtx tp = gen_reg_rtx (Pmode);
be00aaa8 4805
923cf36d 4806 emit_move_insn (tp, gen_rtx_REG (Pmode, TP_REGNUM));
be00aaa8 4807 mark_reg_pointer (tp, BITS_PER_WORD);
4808
4809 return tp;
4810}
4811
7346ca58 4812/* Emit a tls call insn. The call target is the SYMBOL_REF stored
4813 in s390_tls_symbol which always refers to __tls_get_offset.
4814 The returned offset is written to RESULT_REG and an USE rtx is
4815 generated for TLS_CALL. */
be00aaa8 4816
4817static GTY(()) rtx s390_tls_symbol;
7346ca58 4818
4819static void
4820s390_emit_tls_call_insn (rtx result_reg, rtx tls_call)
be00aaa8 4821{
7346ca58 4822 rtx insn;
f588eb9f 4823
c60a7572 4824 if (!flag_pic)
4825 emit_insn (s390_load_got ());
7346ca58 4826
be00aaa8 4827 if (!s390_tls_symbol)
4828 s390_tls_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_offset");
4829
f588eb9f 4830 insn = s390_emit_call (s390_tls_symbol, tls_call, result_reg,
4831 gen_rtx_REG (Pmode, RETURN_REGNUM));
7346ca58 4832
4833 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), result_reg);
9c2a0c05 4834 RTL_CONST_CALL_P (insn) = 1;
be00aaa8 4835}
4836
4837/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
4838 this (thread-local) address. REG may be used as temporary. */
4839
4840static rtx
b40da9a7 4841legitimize_tls_address (rtx addr, rtx reg)
be00aaa8 4842{
db7dd023 4843 rtx new_rtx, tls_call, temp, base, r2;
4844 rtx_insn *insn;
be00aaa8 4845
4846 if (GET_CODE (addr) == SYMBOL_REF)
4847 switch (tls_symbolic_operand (addr))
4848 {
4849 case TLS_MODEL_GLOBAL_DYNAMIC:
4850 start_sequence ();
4851 r2 = gen_rtx_REG (Pmode, 2);
4852 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_TLSGD);
8deb3959 4853 new_rtx = gen_rtx_CONST (Pmode, tls_call);
4854 new_rtx = force_const_mem (Pmode, new_rtx);
4855 emit_move_insn (r2, new_rtx);
7346ca58 4856 s390_emit_tls_call_insn (r2, tls_call);
be00aaa8 4857 insn = get_insns ();
4858 end_sequence ();
4859
8deb3959 4860 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
be00aaa8 4861 temp = gen_reg_rtx (Pmode);
8deb3959 4862 emit_libcall_block (insn, temp, r2, new_rtx);
be00aaa8 4863
8deb3959 4864 new_rtx = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
be00aaa8 4865 if (reg != 0)
4866 {
8deb3959 4867 s390_load_address (reg, new_rtx);
4868 new_rtx = reg;
be00aaa8 4869 }
4870 break;
4871
4872 case TLS_MODEL_LOCAL_DYNAMIC:
4873 start_sequence ();
4874 r2 = gen_rtx_REG (Pmode, 2);
4875 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM);
8deb3959 4876 new_rtx = gen_rtx_CONST (Pmode, tls_call);
4877 new_rtx = force_const_mem (Pmode, new_rtx);
4878 emit_move_insn (r2, new_rtx);
7346ca58 4879 s390_emit_tls_call_insn (r2, tls_call);
be00aaa8 4880 insn = get_insns ();
4881 end_sequence ();
4882
8deb3959 4883 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_NTPOFF);
be00aaa8 4884 temp = gen_reg_rtx (Pmode);
8deb3959 4885 emit_libcall_block (insn, temp, r2, new_rtx);
be00aaa8 4886
8deb3959 4887 new_rtx = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
be00aaa8 4888 base = gen_reg_rtx (Pmode);
8deb3959 4889 s390_load_address (base, new_rtx);
be00aaa8 4890
8deb3959 4891 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_DTPOFF);
4892 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
4893 new_rtx = force_const_mem (Pmode, new_rtx);
be00aaa8 4894 temp = gen_reg_rtx (Pmode);
8deb3959 4895 emit_move_insn (temp, new_rtx);
be00aaa8 4896
8deb3959 4897 new_rtx = gen_rtx_PLUS (Pmode, base, temp);
be00aaa8 4898 if (reg != 0)
4899 {
8deb3959 4900 s390_load_address (reg, new_rtx);
4901 new_rtx = reg;
be00aaa8 4902 }
4903 break;
4904
4905 case TLS_MODEL_INITIAL_EXEC:
4906 if (flag_pic == 1)
4907 {
4908 /* Assume GOT offset < 4k. This is handled the same way
4909 in both 31- and 64-bit code. */
4910
4911 if (reload_in_progress || reload_completed)
3072d30e 4912 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
be00aaa8 4913
8deb3959 4914 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
4915 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
4916 new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new_rtx);
4917 new_rtx = gen_const_mem (Pmode, new_rtx);
be00aaa8 4918 temp = gen_reg_rtx (Pmode);
8deb3959 4919 emit_move_insn (temp, new_rtx);
be00aaa8 4920 }
dafc8d45 4921 else if (TARGET_CPU_ZARCH)
be00aaa8 4922 {
4923 /* If the GOT offset might be >= 4k, we determine the position
4924 of the GOT entry via a PC-relative LARL. */
4925
8deb3959 4926 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
4927 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
be00aaa8 4928 temp = gen_reg_rtx (Pmode);
8deb3959 4929 emit_move_insn (temp, new_rtx);
be00aaa8 4930
8deb3959 4931 new_rtx = gen_const_mem (Pmode, temp);
be00aaa8 4932 temp = gen_reg_rtx (Pmode);
8deb3959 4933 emit_move_insn (temp, new_rtx);
be00aaa8 4934 }
4935 else if (flag_pic)
4936 {
f81e845f 4937 /* If the GOT offset might be >= 4k, we have to load it
be00aaa8 4938 from the literal pool. */
4939
4940 if (reload_in_progress || reload_completed)
3072d30e 4941 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
be00aaa8 4942
8deb3959 4943 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
4944 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
4945 new_rtx = force_const_mem (Pmode, new_rtx);
be00aaa8 4946 temp = gen_reg_rtx (Pmode);
8deb3959 4947 emit_move_insn (temp, new_rtx);
be00aaa8 4948
8deb3959 4949 new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
4950 new_rtx = gen_const_mem (Pmode, new_rtx);
be00aaa8 4951
8deb3959 4952 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new_rtx, addr), UNSPEC_TLS_LOAD);
be00aaa8 4953 temp = gen_reg_rtx (Pmode);
d1f9b275 4954 emit_insn (gen_rtx_SET (temp, new_rtx));
be00aaa8 4955 }
4956 else
4957 {
4958 /* In position-dependent code, load the absolute address of
4959 the GOT entry from the literal pool. */
4960
8deb3959 4961 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
4962 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
4963 new_rtx = force_const_mem (Pmode, new_rtx);
be00aaa8 4964 temp = gen_reg_rtx (Pmode);
8deb3959 4965 emit_move_insn (temp, new_rtx);
be00aaa8 4966
8deb3959 4967 new_rtx = temp;
4968 new_rtx = gen_const_mem (Pmode, new_rtx);
4969 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new_rtx, addr), UNSPEC_TLS_LOAD);
be00aaa8 4970 temp = gen_reg_rtx (Pmode);
d1f9b275 4971 emit_insn (gen_rtx_SET (temp, new_rtx));
be00aaa8 4972 }
4973
8deb3959 4974 new_rtx = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
be00aaa8 4975 if (reg != 0)
4976 {
8deb3959 4977 s390_load_address (reg, new_rtx);
4978 new_rtx = reg;
be00aaa8 4979 }
4980 break;
4981
4982 case TLS_MODEL_LOCAL_EXEC:
8deb3959 4983 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
4984 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
4985 new_rtx = force_const_mem (Pmode, new_rtx);
be00aaa8 4986 temp = gen_reg_rtx (Pmode);
8deb3959 4987 emit_move_insn (temp, new_rtx);
be00aaa8 4988
8deb3959 4989 new_rtx = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
be00aaa8 4990 if (reg != 0)
4991 {
8deb3959 4992 s390_load_address (reg, new_rtx);
4993 new_rtx = reg;
be00aaa8 4994 }
4995 break;
4996
4997 default:
32eda510 4998 gcc_unreachable ();
be00aaa8 4999 }
5000
5001 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == UNSPEC)
5002 {
5003 switch (XINT (XEXP (addr, 0), 1))
5004 {
5005 case UNSPEC_INDNTPOFF:
32eda510 5006 gcc_assert (TARGET_CPU_ZARCH);
8deb3959 5007 new_rtx = addr;
be00aaa8 5008 break;
5009
5010 default:
32eda510 5011 gcc_unreachable ();
be00aaa8 5012 }
5013 }
5014
b7ace65c 5015 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
5016 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
5017 {
8deb3959 5018 new_rtx = XEXP (XEXP (addr, 0), 0);
5019 if (GET_CODE (new_rtx) != SYMBOL_REF)
5020 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
b7ace65c 5021
8deb3959 5022 new_rtx = legitimize_tls_address (new_rtx, reg);
29c05e22 5023 new_rtx = plus_constant (Pmode, new_rtx,
5024 INTVAL (XEXP (XEXP (addr, 0), 1)));
8deb3959 5025 new_rtx = force_operand (new_rtx, 0);
b7ace65c 5026 }
5027
be00aaa8 5028 else
32eda510 5029 gcc_unreachable (); /* for now ... */
be00aaa8 5030
8deb3959 5031 return new_rtx;
be00aaa8 5032}
5033
08b5e262 5034/* Emit insns making the address in operands[1] valid for a standard
5035 move to operands[0]. operands[1] is replaced by an address which
5036 should be used instead of the former RTX to emit the move
5037 pattern. */
4673c1a0 5038
5039void
b40da9a7 5040emit_symbolic_move (rtx *operands)
4673c1a0 5041{
e1ba4a27 5042 rtx temp = !can_create_pseudo_p () ? operands[0] : gen_reg_rtx (Pmode);
4673c1a0 5043
be00aaa8 5044 if (GET_CODE (operands[0]) == MEM)
4673c1a0 5045 operands[1] = force_reg (Pmode, operands[1]);
be00aaa8 5046 else if (TLS_SYMBOLIC_CONST (operands[1]))
5047 operands[1] = legitimize_tls_address (operands[1], temp);
5048 else if (flag_pic)
4673c1a0 5049 operands[1] = legitimize_pic_address (operands[1], temp);
5050}
5051
56769981 5052/* Try machine-dependent ways of modifying an illegitimate address X
4673c1a0 5053 to be legitimate. If we find one, return the new, valid address.
4673c1a0 5054
5055 OLDX is the address as it was before break_out_memory_refs was called.
5056 In some cases it is useful to look at this to decide what needs to be done.
5057
56769981 5058 MODE is the mode of the operand pointed to by X.
4673c1a0 5059
5060 When -fpic is used, special handling is needed for symbolic references.
5061 See comments by legitimize_pic_address for details. */
5062
41e3a0c7 5063static rtx
5064s390_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3754d046 5065 machine_mode mode ATTRIBUTE_UNUSED)
4673c1a0 5066{
2eb8fe23 5067 rtx constant_term = const0_rtx;
4673c1a0 5068
be00aaa8 5069 if (TLS_SYMBOLIC_CONST (x))
5070 {
5071 x = legitimize_tls_address (x, 0);
5072
fd50b071 5073 if (s390_legitimate_address_p (mode, x, FALSE))
be00aaa8 5074 return x;
5075 }
1ed004b7 5076 else if (GET_CODE (x) == PLUS
ffead1ca 5077 && (TLS_SYMBOLIC_CONST (XEXP (x, 0))
1ed004b7 5078 || TLS_SYMBOLIC_CONST (XEXP (x, 1))))
5079 {
5080 return x;
5081 }
be00aaa8 5082 else if (flag_pic)
4673c1a0 5083 {
2eb8fe23 5084 if (SYMBOLIC_CONST (x)
f81e845f 5085 || (GET_CODE (x) == PLUS
5086 && (SYMBOLIC_CONST (XEXP (x, 0))
2eb8fe23 5087 || SYMBOLIC_CONST (XEXP (x, 1)))))
5088 x = legitimize_pic_address (x, 0);
5089
fd50b071 5090 if (s390_legitimate_address_p (mode, x, FALSE))
2eb8fe23 5091 return x;
4673c1a0 5092 }
4673c1a0 5093
2eb8fe23 5094 x = eliminate_constant_term (x, &constant_term);
56769981 5095
de84f805 5096 /* Optimize loading of large displacements by splitting them
5097 into the multiple of 4K and the rest; this allows the
f81e845f 5098 former to be CSE'd if possible.
de84f805 5099
5100 Don't do this if the displacement is added to a register
5101 pointing into the stack frame, as the offsets will
5102 change later anyway. */
5103
5104 if (GET_CODE (constant_term) == CONST_INT
51aa1e9c 5105 && !TARGET_LONG_DISPLACEMENT
5106 && !DISP_IN_RANGE (INTVAL (constant_term))
de84f805 5107 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
5108 {
5109 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
5110 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
5111
5112 rtx temp = gen_reg_rtx (Pmode);
5113 rtx val = force_operand (GEN_INT (upper), temp);
5114 if (val != temp)
5115 emit_move_insn (temp, val);
5116
5117 x = gen_rtx_PLUS (Pmode, x, temp);
5118 constant_term = GEN_INT (lower);
5119 }
5120
2eb8fe23 5121 if (GET_CODE (x) == PLUS)
4673c1a0 5122 {
2eb8fe23 5123 if (GET_CODE (XEXP (x, 0)) == REG)
5124 {
edd89d66 5125 rtx temp = gen_reg_rtx (Pmode);
5126 rtx val = force_operand (XEXP (x, 1), temp);
2eb8fe23 5127 if (val != temp)
5128 emit_move_insn (temp, val);
5129
5130 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
5131 }
5132
5133 else if (GET_CODE (XEXP (x, 1)) == REG)
5134 {
edd89d66 5135 rtx temp = gen_reg_rtx (Pmode);
5136 rtx val = force_operand (XEXP (x, 0), temp);
2eb8fe23 5137 if (val != temp)
5138 emit_move_insn (temp, val);
5139
5140 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
5141 }
4673c1a0 5142 }
2eb8fe23 5143
5144 if (constant_term != const0_rtx)
5145 x = gen_rtx_PLUS (Pmode, x, constant_term);
5146
5147 return x;
4673c1a0 5148}
5149
e4542435 5150/* Try a machine-dependent way of reloading an illegitimate address AD
851d9296 5151 operand. If we find one, push the reload and return the new address.
e4542435 5152
5153 MODE is the mode of the enclosing MEM. OPNUM is the operand number
5154 and TYPE is the reload type of the current reload. */
5155
ffead1ca 5156rtx
3754d046 5157legitimize_reload_address (rtx ad, machine_mode mode ATTRIBUTE_UNUSED,
e4542435 5158 int opnum, int type)
5159{
5160 if (!optimize || TARGET_LONG_DISPLACEMENT)
5161 return NULL_RTX;
5162
5163 if (GET_CODE (ad) == PLUS)
5164 {
5165 rtx tem = simplify_binary_operation (PLUS, Pmode,
5166 XEXP (ad, 0), XEXP (ad, 1));
5167 if (tem)
5168 ad = tem;
5169 }
5170
5171 if (GET_CODE (ad) == PLUS
5172 && GET_CODE (XEXP (ad, 0)) == REG
5173 && GET_CODE (XEXP (ad, 1)) == CONST_INT
5174 && !DISP_IN_RANGE (INTVAL (XEXP (ad, 1))))
5175 {
5176 HOST_WIDE_INT lower = INTVAL (XEXP (ad, 1)) & 0xfff;
5177 HOST_WIDE_INT upper = INTVAL (XEXP (ad, 1)) ^ lower;
8deb3959 5178 rtx cst, tem, new_rtx;
e4542435 5179
5180 cst = GEN_INT (upper);
5181 if (!legitimate_reload_constant_p (cst))
5182 cst = force_const_mem (Pmode, cst);
5183
5184 tem = gen_rtx_PLUS (Pmode, XEXP (ad, 0), cst);
8deb3959 5185 new_rtx = gen_rtx_PLUS (Pmode, tem, GEN_INT (lower));
e4542435 5186
5187 push_reload (XEXP (tem, 1), 0, &XEXP (tem, 1), 0,
ffead1ca 5188 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
e4542435 5189 opnum, (enum reload_type) type);
8deb3959 5190 return new_rtx;
e4542435 5191 }
5192
5193 return NULL_RTX;
5194}
5195
4fbc4db5 5196/* Emit code to move LEN bytes from DST to SRC. */
5197
daa87e5a 5198bool
008c057d 5199s390_expand_movmem (rtx dst, rtx src, rtx len)
4fbc4db5 5200{
daa87e5a 5201 /* When tuning for z10 or higher we rely on the Glibc functions to
5202 do the right thing. Only for constant lengths below 64k we will
5203 generate inline code. */
5204 if (s390_tune >= PROCESSOR_2097_Z10
5205 && (GET_CODE (len) != CONST_INT || INTVAL (len) > (1<<16)))
5206 return false;
5207
4fbc4db5 5208 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
5209 {
5210 if (INTVAL (len) > 0)
008c057d 5211 emit_insn (gen_movmem_short (dst, src, GEN_INT (INTVAL (len) - 1)));
4fbc4db5 5212 }
5213
5214 else if (TARGET_MVCLE)
5215 {
008c057d 5216 emit_insn (gen_movmem_long (dst, src, convert_to_mode (Pmode, len, 1)));
4fbc4db5 5217 }
5218
5219 else
5220 {
5221 rtx dst_addr, src_addr, count, blocks, temp;
79f6a8ed 5222 rtx_code_label *loop_start_label = gen_label_rtx ();
5223 rtx_code_label *loop_end_label = gen_label_rtx ();
5224 rtx_code_label *end_label = gen_label_rtx ();
3754d046 5225 machine_mode mode;
4fbc4db5 5226
5227 mode = GET_MODE (len);
5228 if (mode == VOIDmode)
31838f66 5229 mode = Pmode;
4fbc4db5 5230
4fbc4db5 5231 dst_addr = gen_reg_rtx (Pmode);
5232 src_addr = gen_reg_rtx (Pmode);
5233 count = gen_reg_rtx (mode);
5234 blocks = gen_reg_rtx (mode);
5235
5236 convert_move (count, len, 1);
f81e845f 5237 emit_cmp_and_jump_insns (count, const0_rtx,
4fbc4db5 5238 EQ, NULL_RTX, mode, 1, end_label);
5239
5240 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
5241 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
5242 dst = change_address (dst, VOIDmode, dst_addr);
5243 src = change_address (src, VOIDmode, src_addr);
f81e845f 5244
b9c74b4d 5245 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1,
5246 OPTAB_DIRECT);
4fbc4db5 5247 if (temp != count)
5248 emit_move_insn (count, temp);
5249
b9c74b4d 5250 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1,
5251 OPTAB_DIRECT);
4fbc4db5 5252 if (temp != blocks)
5253 emit_move_insn (blocks, temp);
5254
4ee9c684 5255 emit_cmp_and_jump_insns (blocks, const0_rtx,
5256 EQ, NULL_RTX, mode, 1, loop_end_label);
7746964e 5257
5258 emit_label (loop_start_label);
4fbc4db5 5259
d5de7805 5260 if (TARGET_Z10
5261 && (GET_CODE (len) != CONST_INT || INTVAL (len) > 768))
5262 {
5263 rtx prefetch;
5264
5265 /* Issue a read prefetch for the +3 cache line. */
5266 prefetch = gen_prefetch (gen_rtx_PLUS (Pmode, src_addr, GEN_INT (768)),
5267 const0_rtx, const0_rtx);
5268 PREFETCH_SCHEDULE_BARRIER_P (prefetch) = true;
5269 emit_insn (prefetch);
5270
5271 /* Issue a write prefetch for the +3 cache line. */
5272 prefetch = gen_prefetch (gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (768)),
5273 const1_rtx, const0_rtx);
5274 PREFETCH_SCHEDULE_BARRIER_P (prefetch) = true;
5275 emit_insn (prefetch);
5276 }
5277
008c057d 5278 emit_insn (gen_movmem_short (dst, src, GEN_INT (255)));
f81e845f 5279 s390_load_address (dst_addr,
4fbc4db5 5280 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
f81e845f 5281 s390_load_address (src_addr,
4fbc4db5 5282 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
f81e845f 5283
b9c74b4d 5284 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1,
5285 OPTAB_DIRECT);
4fbc4db5 5286 if (temp != blocks)
5287 emit_move_insn (blocks, temp);
5288
4ee9c684 5289 emit_cmp_and_jump_insns (blocks, const0_rtx,
5290 EQ, NULL_RTX, mode, 1, loop_end_label);
7746964e 5291
5292 emit_jump (loop_start_label);
4ee9c684 5293 emit_label (loop_end_label);
4fbc4db5 5294
008c057d 5295 emit_insn (gen_movmem_short (dst, src,
31838f66 5296 convert_to_mode (Pmode, count, 1)));
4fbc4db5 5297 emit_label (end_label);
5298 }
daa87e5a 5299 return true;
4fbc4db5 5300}
5301
805a133b 5302/* Emit code to set LEN bytes at DST to VAL.
5303 Make use of clrmem if VAL is zero. */
4fbc4db5 5304
5305void
805a133b 5306s390_expand_setmem (rtx dst, rtx len, rtx val)
4fbc4db5 5307{
1ed6fd08 5308 if (GET_CODE (len) == CONST_INT && INTVAL (len) == 0)
5309 return;
5310
805a133b 5311 gcc_assert (GET_CODE (val) == CONST_INT || GET_MODE (val) == QImode);
ffead1ca 5312
1ed6fd08 5313 if (GET_CODE (len) == CONST_INT && INTVAL (len) > 0 && INTVAL (len) <= 257)
4fbc4db5 5314 {
805a133b 5315 if (val == const0_rtx && INTVAL (len) <= 256)
008c057d 5316 emit_insn (gen_clrmem_short (dst, GEN_INT (INTVAL (len) - 1)));
805a133b 5317 else
5318 {
5319 /* Initialize memory by storing the first byte. */
5320 emit_move_insn (adjust_address (dst, QImode, 0), val);
ffead1ca 5321
805a133b 5322 if (INTVAL (len) > 1)
5323 {
5324 /* Initiate 1 byte overlap move.
5325 The first byte of DST is propagated through DSTP1.
5326 Prepare a movmem for: DST+1 = DST (length = LEN - 1).
5327 DST is set to size 1 so the rest of the memory location
5328 does not count as source operand. */
5329 rtx dstp1 = adjust_address (dst, VOIDmode, 1);
5b2a69fa 5330 set_mem_size (dst, 1);
805a133b 5331
ffead1ca 5332 emit_insn (gen_movmem_short (dstp1, dst,
805a133b 5333 GEN_INT (INTVAL (len) - 2)));
5334 }
5335 }
4fbc4db5 5336 }
5337
5338 else if (TARGET_MVCLE)
5339 {
805a133b 5340 val = force_not_mem (convert_modes (Pmode, QImode, val, 1));
00091884 5341 if (TARGET_64BIT)
5342 emit_insn (gen_setmem_long_di (dst, convert_to_mode (Pmode, len, 1),
5343 val));
5344 else
5345 emit_insn (gen_setmem_long_si (dst, convert_to_mode (Pmode, len, 1),
5346 val));
4fbc4db5 5347 }
5348
5349 else
5350 {
b5fdc416 5351 rtx dst_addr, count, blocks, temp, dstp1 = NULL_RTX;
79f6a8ed 5352 rtx_code_label *loop_start_label = gen_label_rtx ();
5353 rtx_code_label *loop_end_label = gen_label_rtx ();
5354 rtx_code_label *end_label = gen_label_rtx ();
3754d046 5355 machine_mode mode;
4fbc4db5 5356
5357 mode = GET_MODE (len);
5358 if (mode == VOIDmode)
31838f66 5359 mode = Pmode;
4fbc4db5 5360
4fbc4db5 5361 dst_addr = gen_reg_rtx (Pmode);
4fbc4db5 5362 count = gen_reg_rtx (mode);
5363 blocks = gen_reg_rtx (mode);
5364
5365 convert_move (count, len, 1);
f81e845f 5366 emit_cmp_and_jump_insns (count, const0_rtx,
4fbc4db5 5367 EQ, NULL_RTX, mode, 1, end_label);
5368
5369 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
5370 dst = change_address (dst, VOIDmode, dst_addr);
f81e845f 5371
805a133b 5372 if (val == const0_rtx)
b9c74b4d 5373 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1,
5374 OPTAB_DIRECT);
805a133b 5375 else
5376 {
5377 dstp1 = adjust_address (dst, VOIDmode, 1);
5b2a69fa 5378 set_mem_size (dst, 1);
805a133b 5379
5380 /* Initialize memory by storing the first byte. */
5381 emit_move_insn (adjust_address (dst, QImode, 0), val);
ffead1ca 5382
805a133b 5383 /* If count is 1 we are done. */
5384 emit_cmp_and_jump_insns (count, const1_rtx,
5385 EQ, NULL_RTX, mode, 1, end_label);
5386
b9c74b4d 5387 temp = expand_binop (mode, add_optab, count, GEN_INT (-2), count, 1,
5388 OPTAB_DIRECT);
805a133b 5389 }
4fbc4db5 5390 if (temp != count)
5391 emit_move_insn (count, temp);
5392
b9c74b4d 5393 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1,
5394 OPTAB_DIRECT);
4fbc4db5 5395 if (temp != blocks)
5396 emit_move_insn (blocks, temp);
5397
4ee9c684 5398 emit_cmp_and_jump_insns (blocks, const0_rtx,
5399 EQ, NULL_RTX, mode, 1, loop_end_label);
7746964e 5400
5401 emit_label (loop_start_label);
4fbc4db5 5402
d5de7805 5403 if (TARGET_Z10
5404 && (GET_CODE (len) != CONST_INT || INTVAL (len) > 1024))
5405 {
5406 /* Issue a write prefetch for the +4 cache line. */
5407 rtx prefetch = gen_prefetch (gen_rtx_PLUS (Pmode, dst_addr,
5408 GEN_INT (1024)),
5409 const1_rtx, const0_rtx);
5410 emit_insn (prefetch);
5411 PREFETCH_SCHEDULE_BARRIER_P (prefetch) = true;
5412 }
5413
805a133b 5414 if (val == const0_rtx)
5415 emit_insn (gen_clrmem_short (dst, GEN_INT (255)));
5416 else
5417 emit_insn (gen_movmem_short (dstp1, dst, GEN_INT (255)));
f81e845f 5418 s390_load_address (dst_addr,
4fbc4db5 5419 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
f81e845f 5420
b9c74b4d 5421 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1,
5422 OPTAB_DIRECT);
4fbc4db5 5423 if (temp != blocks)
5424 emit_move_insn (blocks, temp);
5425
4ee9c684 5426 emit_cmp_and_jump_insns (blocks, const0_rtx,
5427 EQ, NULL_RTX, mode, 1, loop_end_label);
7746964e 5428
5429 emit_jump (loop_start_label);
4ee9c684 5430 emit_label (loop_end_label);
4fbc4db5 5431
805a133b 5432 if (val == const0_rtx)
5433 emit_insn (gen_clrmem_short (dst, convert_to_mode (Pmode, count, 1)));
5434 else
5435 emit_insn (gen_movmem_short (dstp1, dst, convert_to_mode (Pmode, count, 1)));
4fbc4db5 5436 emit_label (end_label);
5437 }
5438}
5439
5440/* Emit code to compare LEN bytes at OP0 with those at OP1,
5441 and return the result in TARGET. */
5442
daa87e5a 5443bool
b40da9a7 5444s390_expand_cmpmem (rtx target, rtx op0, rtx op1, rtx len)
4fbc4db5 5445{
80b53886 5446 rtx ccreg = gen_rtx_REG (CCUmode, CC_REGNUM);
dd16a4bd 5447 rtx tmp;
5448
daa87e5a 5449 /* When tuning for z10 or higher we rely on the Glibc functions to
5450 do the right thing. Only for constant lengths below 64k we will
5451 generate inline code. */
5452 if (s390_tune >= PROCESSOR_2097_Z10
5453 && (GET_CODE (len) != CONST_INT || INTVAL (len) > (1<<16)))
5454 return false;
5455
dd16a4bd 5456 /* As the result of CMPINT is inverted compared to what we need,
5457 we have to swap the operands. */
5458 tmp = op0; op0 = op1; op1 = tmp;
4fbc4db5 5459
4fbc4db5 5460 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
5461 {
5462 if (INTVAL (len) > 0)
5463 {
31838f66 5464 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (INTVAL (len) - 1)));
dd16a4bd 5465 emit_insn (gen_cmpint (target, ccreg));
4fbc4db5 5466 }
5467 else
5468 emit_move_insn (target, const0_rtx);
5469 }
bcbf02a5 5470 else if (TARGET_MVCLE)
4fbc4db5 5471 {
31838f66 5472 emit_insn (gen_cmpmem_long (op0, op1, convert_to_mode (Pmode, len, 1)));
dd16a4bd 5473 emit_insn (gen_cmpint (target, ccreg));
4fbc4db5 5474 }
4fbc4db5 5475 else
5476 {
5477 rtx addr0, addr1, count, blocks, temp;
79f6a8ed 5478 rtx_code_label *loop_start_label = gen_label_rtx ();
5479 rtx_code_label *loop_end_label = gen_label_rtx ();
5480 rtx_code_label *end_label = gen_label_rtx ();
3754d046 5481 machine_mode mode;
4fbc4db5 5482
5483 mode = GET_MODE (len);
5484 if (mode == VOIDmode)
31838f66 5485 mode = Pmode;
4fbc4db5 5486
4fbc4db5 5487 addr0 = gen_reg_rtx (Pmode);
5488 addr1 = gen_reg_rtx (Pmode);
5489 count = gen_reg_rtx (mode);
5490 blocks = gen_reg_rtx (mode);
5491
5492 convert_move (count, len, 1);
f81e845f 5493 emit_cmp_and_jump_insns (count, const0_rtx,
4fbc4db5 5494 EQ, NULL_RTX, mode, 1, end_label);
5495
5496 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
5497 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
5498 op0 = change_address (op0, VOIDmode, addr0);
5499 op1 = change_address (op1, VOIDmode, addr1);
f81e845f 5500
b9c74b4d 5501 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1,
5502 OPTAB_DIRECT);
4fbc4db5 5503 if (temp != count)
5504 emit_move_insn (count, temp);
5505
b9c74b4d 5506 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1,
5507 OPTAB_DIRECT);
4fbc4db5 5508 if (temp != blocks)
5509 emit_move_insn (blocks, temp);
5510
4ee9c684 5511 emit_cmp_and_jump_insns (blocks, const0_rtx,
5512 EQ, NULL_RTX, mode, 1, loop_end_label);
7746964e 5513
5514 emit_label (loop_start_label);
4fbc4db5 5515
d5de7805 5516 if (TARGET_Z10
5517 && (GET_CODE (len) != CONST_INT || INTVAL (len) > 512))
5518 {
5519 rtx prefetch;
5520
5521 /* Issue a read prefetch for the +2 cache line of operand 1. */
5522 prefetch = gen_prefetch (gen_rtx_PLUS (Pmode, addr0, GEN_INT (512)),
5523 const0_rtx, const0_rtx);
5524 emit_insn (prefetch);
5525 PREFETCH_SCHEDULE_BARRIER_P (prefetch) = true;
5526
5527 /* Issue a read prefetch for the +2 cache line of operand 2. */
5528 prefetch = gen_prefetch (gen_rtx_PLUS (Pmode, addr1, GEN_INT (512)),
5529 const0_rtx, const0_rtx);
5530 emit_insn (prefetch);
5531 PREFETCH_SCHEDULE_BARRIER_P (prefetch) = true;
5532 }
5533
31838f66 5534 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (255)));
80b53886 5535 temp = gen_rtx_NE (VOIDmode, ccreg, const0_rtx);
f81e845f 5536 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
4fbc4db5 5537 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
d1f9b275 5538 temp = gen_rtx_SET (pc_rtx, temp);
4fbc4db5 5539 emit_jump_insn (temp);
5540
f81e845f 5541 s390_load_address (addr0,
4fbc4db5 5542 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
f81e845f 5543 s390_load_address (addr1,
4fbc4db5 5544 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
f81e845f 5545
b9c74b4d 5546 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1,
5547 OPTAB_DIRECT);
4fbc4db5 5548 if (temp != blocks)
5549 emit_move_insn (blocks, temp);
5550
4ee9c684 5551 emit_cmp_and_jump_insns (blocks, const0_rtx,
5552 EQ, NULL_RTX, mode, 1, loop_end_label);
7746964e 5553
5554 emit_jump (loop_start_label);
4ee9c684 5555 emit_label (loop_end_label);
4fbc4db5 5556
f588eb9f 5557 emit_insn (gen_cmpmem_short (op0, op1,
31838f66 5558 convert_to_mode (Pmode, count, 1)));
4fbc4db5 5559 emit_label (end_label);
5560
dd16a4bd 5561 emit_insn (gen_cmpint (target, ccreg));
4fbc4db5 5562 }
daa87e5a 5563 return true;
4fbc4db5 5564}
5565
76a4c804 5566/* Emit a conditional jump to LABEL for condition code mask MASK using
5567 comparsion operator COMPARISON. Return the emitted jump insn. */
5568
26cd1198 5569static rtx_insn *
76a4c804 5570s390_emit_ccraw_jump (HOST_WIDE_INT mask, enum rtx_code comparison, rtx label)
5571{
5572 rtx temp;
5573
5574 gcc_assert (comparison == EQ || comparison == NE);
5575 gcc_assert (mask > 0 && mask < 15);
5576
5577 temp = gen_rtx_fmt_ee (comparison, VOIDmode,
5578 gen_rtx_REG (CCRAWmode, CC_REGNUM), GEN_INT (mask));
5579 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
5580 gen_rtx_LABEL_REF (VOIDmode, label), pc_rtx);
5581 temp = gen_rtx_SET (pc_rtx, temp);
5582 return emit_jump_insn (temp);
5583}
5584
5585/* Emit the instructions to implement strlen of STRING and store the
5586 result in TARGET. The string has the known ALIGNMENT. This
5587 version uses vector instructions and is therefore not appropriate
5588 for targets prior to z13. */
5589
5590void
5591s390_expand_vec_strlen (rtx target, rtx string, rtx alignment)
5592{
5593 int very_unlikely = REG_BR_PROB_BASE / 100 - 1;
5594 int very_likely = REG_BR_PROB_BASE - 1;
5595 rtx highest_index_to_load_reg = gen_reg_rtx (Pmode);
5596 rtx str_reg = gen_reg_rtx (V16QImode);
5597 rtx str_addr_base_reg = gen_reg_rtx (Pmode);
5598 rtx str_idx_reg = gen_reg_rtx (Pmode);
5599 rtx result_reg = gen_reg_rtx (V16QImode);
5600 rtx is_aligned_label = gen_label_rtx ();
5601 rtx into_loop_label = NULL_RTX;
5602 rtx loop_start_label = gen_label_rtx ();
5603 rtx temp;
5604 rtx len = gen_reg_rtx (QImode);
5605 rtx cond;
5606
5607 s390_load_address (str_addr_base_reg, XEXP (string, 0));
5608 emit_move_insn (str_idx_reg, const0_rtx);
5609
5610 if (INTVAL (alignment) < 16)
5611 {
5612 /* Check whether the address happens to be aligned properly so
5613 jump directly to the aligned loop. */
5614 emit_cmp_and_jump_insns (gen_rtx_AND (Pmode,
5615 str_addr_base_reg, GEN_INT (15)),
5616 const0_rtx, EQ, NULL_RTX,
5617 Pmode, 1, is_aligned_label);
5618
5619 temp = gen_reg_rtx (Pmode);
5620 temp = expand_binop (Pmode, and_optab, str_addr_base_reg,
5621 GEN_INT (15), temp, 1, OPTAB_DIRECT);
5622 gcc_assert (REG_P (temp));
5623 highest_index_to_load_reg =
5624 expand_binop (Pmode, sub_optab, GEN_INT (15), temp,
5625 highest_index_to_load_reg, 1, OPTAB_DIRECT);
5626 gcc_assert (REG_P (highest_index_to_load_reg));
5627 emit_insn (gen_vllv16qi (str_reg,
5628 convert_to_mode (SImode, highest_index_to_load_reg, 1),
5629 gen_rtx_MEM (BLKmode, str_addr_base_reg)));
5630
5631 into_loop_label = gen_label_rtx ();
5632 s390_emit_jump (into_loop_label, NULL_RTX);
5633 emit_barrier ();
5634 }
5635
5636 emit_label (is_aligned_label);
5637 LABEL_NUSES (is_aligned_label) = INTVAL (alignment) < 16 ? 2 : 1;
5638
5639 /* Reaching this point we are only performing 16 bytes aligned
5640 loads. */
5641 emit_move_insn (highest_index_to_load_reg, GEN_INT (15));
5642
5643 emit_label (loop_start_label);
5644 LABEL_NUSES (loop_start_label) = 1;
5645
5646 /* Load 16 bytes of the string into VR. */
5647 emit_move_insn (str_reg,
5648 gen_rtx_MEM (V16QImode,
5649 gen_rtx_PLUS (Pmode, str_idx_reg,
5650 str_addr_base_reg)));
5651 if (into_loop_label != NULL_RTX)
5652 {
5653 emit_label (into_loop_label);
5654 LABEL_NUSES (into_loop_label) = 1;
5655 }
5656
5657 /* Increment string index by 16 bytes. */
5658 expand_binop (Pmode, add_optab, str_idx_reg, GEN_INT (16),
5659 str_idx_reg, 1, OPTAB_DIRECT);
5660
5661 emit_insn (gen_vec_vfenesv16qi (result_reg, str_reg, str_reg,
5662 GEN_INT (VSTRING_FLAG_ZS | VSTRING_FLAG_CS)));
5663
5664 add_int_reg_note (s390_emit_ccraw_jump (8, NE, loop_start_label),
5665 REG_BR_PROB, very_likely);
5666 emit_insn (gen_vec_extractv16qi (len, result_reg, GEN_INT (7)));
5667
5668 /* If the string pointer wasn't aligned we have loaded less then 16
5669 bytes and the remaining bytes got filled with zeros (by vll).
5670 Now we have to check whether the resulting index lies within the
5671 bytes actually part of the string. */
5672
5673 cond = s390_emit_compare (GT, convert_to_mode (Pmode, len, 1),
5674 highest_index_to_load_reg);
5675 s390_load_address (highest_index_to_load_reg,
5676 gen_rtx_PLUS (Pmode, highest_index_to_load_reg,
5677 const1_rtx));
5678 if (TARGET_64BIT)
5679 emit_insn (gen_movdicc (str_idx_reg, cond,
5680 highest_index_to_load_reg, str_idx_reg));
5681 else
5682 emit_insn (gen_movsicc (str_idx_reg, cond,
5683 highest_index_to_load_reg, str_idx_reg));
5684
5685 add_int_reg_note (s390_emit_jump (is_aligned_label, cond), REG_BR_PROB,
5686 very_unlikely);
5687
5688 expand_binop (Pmode, add_optab, str_idx_reg,
5689 GEN_INT (-16), str_idx_reg, 1, OPTAB_DIRECT);
5690 /* FIXME: len is already zero extended - so avoid the llgcr emitted
5691 here. */
5692 temp = expand_binop (Pmode, add_optab, str_idx_reg,
5693 convert_to_mode (Pmode, len, 1),
5694 target, 1, OPTAB_DIRECT);
5695 if (temp != target)
5696 emit_move_insn (target, temp);
5697}
3b699fc7 5698
664ff6a0 5699void
5700s390_expand_vec_movstr (rtx result, rtx dst, rtx src)
5701{
5702 int very_unlikely = REG_BR_PROB_BASE / 100 - 1;
5703 rtx temp = gen_reg_rtx (Pmode);
5704 rtx src_addr = XEXP (src, 0);
5705 rtx dst_addr = XEXP (dst, 0);
5706 rtx src_addr_reg = gen_reg_rtx (Pmode);
5707 rtx dst_addr_reg = gen_reg_rtx (Pmode);
5708 rtx offset = gen_reg_rtx (Pmode);
5709 rtx vsrc = gen_reg_rtx (V16QImode);
5710 rtx vpos = gen_reg_rtx (V16QImode);
5711 rtx loadlen = gen_reg_rtx (SImode);
5712 rtx gpos_qi = gen_reg_rtx(QImode);
5713 rtx gpos = gen_reg_rtx (SImode);
5714 rtx done_label = gen_label_rtx ();
5715 rtx loop_label = gen_label_rtx ();
5716 rtx exit_label = gen_label_rtx ();
5717 rtx full_label = gen_label_rtx ();
5718
5719 /* Perform a quick check for string ending on the first up to 16
5720 bytes and exit early if successful. */
5721
5722 emit_insn (gen_vlbb (vsrc, src, GEN_INT (6)));
5723 emit_insn (gen_lcbb (loadlen, src_addr, GEN_INT (6)));
5724 emit_insn (gen_vfenezv16qi (vpos, vsrc, vsrc));
5725 emit_insn (gen_vec_extractv16qi (gpos_qi, vpos, GEN_INT (7)));
5726 emit_move_insn (gpos, gen_rtx_SUBREG (SImode, gpos_qi, 0));
5727 /* gpos is the byte index if a zero was found and 16 otherwise.
5728 So if it is lower than the loaded bytes we have a hit. */
5729 emit_cmp_and_jump_insns (gpos, loadlen, GE, NULL_RTX, SImode, 1,
5730 full_label);
5731 emit_insn (gen_vstlv16qi (vsrc, gpos, dst));
5732
5733 force_expand_binop (Pmode, add_optab, dst_addr, gpos, result,
5734 1, OPTAB_DIRECT);
5735 emit_jump (exit_label);
5736 emit_barrier ();
5737
5738 emit_label (full_label);
5739 LABEL_NUSES (full_label) = 1;
5740
5741 /* Calculate `offset' so that src + offset points to the last byte
5742 before 16 byte alignment. */
5743
5744 /* temp = src_addr & 0xf */
5745 force_expand_binop (Pmode, and_optab, src_addr, GEN_INT (15), temp,
5746 1, OPTAB_DIRECT);
5747
5748 /* offset = 0xf - temp */
5749 emit_move_insn (offset, GEN_INT (15));
5750 force_expand_binop (Pmode, sub_optab, offset, temp, offset,
5751 1, OPTAB_DIRECT);
5752
5753 /* Store `offset' bytes in the dstination string. The quick check
5754 has loaded at least `offset' bytes into vsrc. */
5755
5756 emit_insn (gen_vstlv16qi (vsrc, gen_lowpart (SImode, offset), dst));
5757
5758 /* Advance to the next byte to be loaded. */
5759 force_expand_binop (Pmode, add_optab, offset, const1_rtx, offset,
5760 1, OPTAB_DIRECT);
5761
5762 /* Make sure the addresses are single regs which can be used as a
5763 base. */
5764 emit_move_insn (src_addr_reg, src_addr);
5765 emit_move_insn (dst_addr_reg, dst_addr);
5766
5767 /* MAIN LOOP */
5768
5769 emit_label (loop_label);
5770 LABEL_NUSES (loop_label) = 1;
5771
5772 emit_move_insn (vsrc,
5773 gen_rtx_MEM (V16QImode,
5774 gen_rtx_PLUS (Pmode, src_addr_reg, offset)));
5775
5776 emit_insn (gen_vec_vfenesv16qi (vpos, vsrc, vsrc,
5777 GEN_INT (VSTRING_FLAG_ZS | VSTRING_FLAG_CS)));
5778 add_int_reg_note (s390_emit_ccraw_jump (8, EQ, done_label),
5779 REG_BR_PROB, very_unlikely);
5780
5781 emit_move_insn (gen_rtx_MEM (V16QImode,
5782 gen_rtx_PLUS (Pmode, dst_addr_reg, offset)),
5783 vsrc);
5784 /* offset += 16 */
5785 force_expand_binop (Pmode, add_optab, offset, GEN_INT (16),
5786 offset, 1, OPTAB_DIRECT);
5787
5788 emit_jump (loop_label);
5789 emit_barrier ();
5790
5791 /* REGULAR EXIT */
5792
5793 /* We are done. Add the offset of the zero character to the dst_addr
5794 pointer to get the result. */
5795
5796 emit_label (done_label);
5797 LABEL_NUSES (done_label) = 1;
5798
5799 force_expand_binop (Pmode, add_optab, dst_addr_reg, offset, dst_addr_reg,
5800 1, OPTAB_DIRECT);
5801
5802 emit_insn (gen_vec_extractv16qi (gpos_qi, vpos, GEN_INT (7)));
5803 emit_move_insn (gpos, gen_rtx_SUBREG (SImode, gpos_qi, 0));
5804
5805 emit_insn (gen_vstlv16qi (vsrc, gpos, gen_rtx_MEM (BLKmode, dst_addr_reg)));
5806
5807 force_expand_binop (Pmode, add_optab, dst_addr_reg, gpos, result,
5808 1, OPTAB_DIRECT);
5809
5810 /* EARLY EXIT */
5811
5812 emit_label (exit_label);
5813 LABEL_NUSES (exit_label) = 1;
5814}
5815
5816
3b699fc7 5817/* Expand conditional increment or decrement using alc/slb instructions.
5818 Should generate code setting DST to either SRC or SRC + INCREMENT,
5819 depending on the result of the comparison CMP_OP0 CMP_CODE CMP_OP1.
eeba5f25 5820 Returns true if successful, false otherwise.
5821
5822 That makes it possible to implement some if-constructs without jumps e.g.:
5823 (borrow = CC0 | CC1 and carry = CC2 | CC3)
5824 unsigned int a, b, c;
5825 if (a < b) c++; -> CCU b > a -> CC2; c += carry;
5826 if (a < b) c--; -> CCL3 a - b -> borrow; c -= borrow;
5827 if (a <= b) c++; -> CCL3 b - a -> borrow; c += carry;
5828 if (a <= b) c--; -> CCU a <= b -> borrow; c -= borrow;
5829
5830 Checks for EQ and NE with a nonzero value need an additional xor e.g.:
5831 if (a == b) c++; -> CCL3 a ^= b; 0 - a -> borrow; c += carry;
5832 if (a == b) c--; -> CCU a ^= b; a <= 0 -> CC0 | CC1; c -= borrow;
5833 if (a != b) c++; -> CCU a ^= b; a > 0 -> CC2; c += carry;
5834 if (a != b) c--; -> CCL3 a ^= b; 0 - a -> borrow; c -= borrow; */
3b699fc7 5835
5836bool
5837s390_expand_addcc (enum rtx_code cmp_code, rtx cmp_op0, rtx cmp_op1,
5838 rtx dst, rtx src, rtx increment)
5839{
3754d046 5840 machine_mode cmp_mode;
5841 machine_mode cc_mode;
3b699fc7 5842 rtx op_res;
5843 rtx insn;
5844 rtvec p;
32eda510 5845 int ret;
3b699fc7 5846
5847 if ((GET_MODE (cmp_op0) == SImode || GET_MODE (cmp_op0) == VOIDmode)
5848 && (GET_MODE (cmp_op1) == SImode || GET_MODE (cmp_op1) == VOIDmode))
5849 cmp_mode = SImode;
5850 else if ((GET_MODE (cmp_op0) == DImode || GET_MODE (cmp_op0) == VOIDmode)
5851 && (GET_MODE (cmp_op1) == DImode || GET_MODE (cmp_op1) == VOIDmode))
5852 cmp_mode = DImode;
5853 else
5854 return false;
5855
5856 /* Try ADD LOGICAL WITH CARRY. */
5857 if (increment == const1_rtx)
5858 {
5859 /* Determine CC mode to use. */
5860 if (cmp_code == EQ || cmp_code == NE)
5861 {
5862 if (cmp_op1 != const0_rtx)
5863 {
5864 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
5865 NULL_RTX, 0, OPTAB_WIDEN);
5866 cmp_op1 = const0_rtx;
5867 }
5868
5869 cmp_code = cmp_code == EQ ? LEU : GTU;
5870 }
5871
5872 if (cmp_code == LTU || cmp_code == LEU)
5873 {
5874 rtx tem = cmp_op0;
5875 cmp_op0 = cmp_op1;
5876 cmp_op1 = tem;
5877 cmp_code = swap_condition (cmp_code);
5878 }
5879
5880 switch (cmp_code)
5881 {
5882 case GTU:
5883 cc_mode = CCUmode;
5884 break;
5885
5886 case GEU:
5887 cc_mode = CCL3mode;
5888 break;
5889
5890 default:
5891 return false;
5892 }
5893
5894 /* Emit comparison instruction pattern. */
5895 if (!register_operand (cmp_op0, cmp_mode))
5896 cmp_op0 = force_reg (cmp_mode, cmp_op0);
5897
d1f9b275 5898 insn = gen_rtx_SET (gen_rtx_REG (cc_mode, CC_REGNUM),
3b699fc7 5899 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
5900 /* We use insn_invalid_p here to add clobbers if required. */
dae9d0e7 5901 ret = insn_invalid_p (emit_insn (insn), false);
32eda510 5902 gcc_assert (!ret);
3b699fc7 5903
5904 /* Emit ALC instruction pattern. */
5905 op_res = gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
5906 gen_rtx_REG (cc_mode, CC_REGNUM),
5907 const0_rtx);
5908
5909 if (src != const0_rtx)
5910 {
5911 if (!register_operand (src, GET_MODE (dst)))
5912 src = force_reg (GET_MODE (dst), src);
5913
6f4afa7e 5914 op_res = gen_rtx_PLUS (GET_MODE (dst), op_res, src);
5915 op_res = gen_rtx_PLUS (GET_MODE (dst), op_res, const0_rtx);
3b699fc7 5916 }
5917
5918 p = rtvec_alloc (2);
ffead1ca 5919 RTVEC_ELT (p, 0) =
d1f9b275 5920 gen_rtx_SET (dst, op_res);
ffead1ca 5921 RTVEC_ELT (p, 1) =
3b699fc7 5922 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
5923 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
5924
5925 return true;
5926 }
5927
5928 /* Try SUBTRACT LOGICAL WITH BORROW. */
5929 if (increment == constm1_rtx)
5930 {
5931 /* Determine CC mode to use. */
5932 if (cmp_code == EQ || cmp_code == NE)
5933 {
5934 if (cmp_op1 != const0_rtx)
5935 {
5936 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
5937 NULL_RTX, 0, OPTAB_WIDEN);
5938 cmp_op1 = const0_rtx;
5939 }
5940
5941 cmp_code = cmp_code == EQ ? LEU : GTU;
5942 }
5943
5944 if (cmp_code == GTU || cmp_code == GEU)
5945 {
5946 rtx tem = cmp_op0;
5947 cmp_op0 = cmp_op1;
5948 cmp_op1 = tem;
5949 cmp_code = swap_condition (cmp_code);
5950 }
5951
5952 switch (cmp_code)
5953 {
5954 case LEU:
5955 cc_mode = CCUmode;
5956 break;
5957
5958 case LTU:
5959 cc_mode = CCL3mode;
5960 break;
5961
5962 default:
5963 return false;
5964 }
5965
5966 /* Emit comparison instruction pattern. */
5967 if (!register_operand (cmp_op0, cmp_mode))
5968 cmp_op0 = force_reg (cmp_mode, cmp_op0);
5969
d1f9b275 5970 insn = gen_rtx_SET (gen_rtx_REG (cc_mode, CC_REGNUM),
3b699fc7 5971 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
5972 /* We use insn_invalid_p here to add clobbers if required. */
dae9d0e7 5973 ret = insn_invalid_p (emit_insn (insn), false);
32eda510 5974 gcc_assert (!ret);
3b699fc7 5975
5976 /* Emit SLB instruction pattern. */
5977 if (!register_operand (src, GET_MODE (dst)))
5978 src = force_reg (GET_MODE (dst), src);
5979
ffead1ca 5980 op_res = gen_rtx_MINUS (GET_MODE (dst),
5981 gen_rtx_MINUS (GET_MODE (dst), src, const0_rtx),
5982 gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
5983 gen_rtx_REG (cc_mode, CC_REGNUM),
3b699fc7 5984 const0_rtx));
5985 p = rtvec_alloc (2);
ffead1ca 5986 RTVEC_ELT (p, 0) =
d1f9b275 5987 gen_rtx_SET (dst, op_res);
ffead1ca 5988 RTVEC_ELT (p, 1) =
3b699fc7 5989 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
5990 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
5991
5992 return true;
5993 }
5994
5995 return false;
5996}
5997
e68d6a13 5998/* Expand code for the insv template. Return true if successful. */
0349cc73 5999
e68d6a13 6000bool
0349cc73 6001s390_expand_insv (rtx dest, rtx op1, rtx op2, rtx src)
6002{
6003 int bitsize = INTVAL (op1);
6004 int bitpos = INTVAL (op2);
3754d046 6005 machine_mode mode = GET_MODE (dest);
6006 machine_mode smode;
8c753480 6007 int smode_bsize, mode_bsize;
6008 rtx op, clobber;
0349cc73 6009
0bc377b9 6010 if (bitsize + bitpos > GET_MODE_BITSIZE (mode))
31efd1ec 6011 return false;
6012
8c753480 6013 /* Generate INSERT IMMEDIATE (IILL et al). */
6014 /* (set (ze (reg)) (const_int)). */
6015 if (TARGET_ZARCH
6016 && register_operand (dest, word_mode)
6017 && (bitpos % 16) == 0
6018 && (bitsize % 16) == 0
6019 && const_int_operand (src, VOIDmode))
e68d6a13 6020 {
8c753480 6021 HOST_WIDE_INT val = INTVAL (src);
6022 int regpos = bitpos + bitsize;
e68d6a13 6023
8c753480 6024 while (regpos > bitpos)
6025 {
3754d046 6026 machine_mode putmode;
8c753480 6027 int putsize;
6028
6029 if (TARGET_EXTIMM && (regpos % 32 == 0) && (regpos >= bitpos + 32))
6030 putmode = SImode;
6031 else
6032 putmode = HImode;
e68d6a13 6033
8c753480 6034 putsize = GET_MODE_BITSIZE (putmode);
6035 regpos -= putsize;
6036 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest,
6037 GEN_INT (putsize),
6038 GEN_INT (regpos)),
6039 gen_int_mode (val, putmode));
6040 val >>= putsize;
6041 }
6042 gcc_assert (regpos == bitpos);
e68d6a13 6043 return true;
6044 }
6045
8c753480 6046 smode = smallest_mode_for_size (bitsize, MODE_INT);
6047 smode_bsize = GET_MODE_BITSIZE (smode);
6048 mode_bsize = GET_MODE_BITSIZE (mode);
0349cc73 6049
8c753480 6050 /* Generate STORE CHARACTERS UNDER MASK (STCM et al). */
0349cc73 6051 if (bitpos == 0
8c753480 6052 && (bitsize % BITS_PER_UNIT) == 0
6053 && MEM_P (dest)
0349cc73 6054 && (register_operand (src, word_mode)
6055 || const_int_operand (src, VOIDmode)))
6056 {
6057 /* Emit standard pattern if possible. */
8c753480 6058 if (smode_bsize == bitsize)
6059 {
6060 emit_move_insn (adjust_address (dest, smode, 0),
6061 gen_lowpart (smode, src));
6062 return true;
6063 }
0349cc73 6064
6065 /* (set (ze (mem)) (const_int)). */
6066 else if (const_int_operand (src, VOIDmode))
6067 {
6068 int size = bitsize / BITS_PER_UNIT;
8c753480 6069 rtx src_mem = adjust_address (force_const_mem (word_mode, src),
6070 BLKmode,
6071 UNITS_PER_WORD - size);
0349cc73 6072
6073 dest = adjust_address (dest, BLKmode, 0);
5b2a69fa 6074 set_mem_size (dest, size);
0349cc73 6075 s390_expand_movmem (dest, src_mem, GEN_INT (size));
8c753480 6076 return true;
0349cc73 6077 }
ffead1ca 6078
0349cc73 6079 /* (set (ze (mem)) (reg)). */
6080 else if (register_operand (src, word_mode))
6081 {
8c753480 6082 if (bitsize <= 32)
0349cc73 6083 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, op1,
6084 const0_rtx), src);
6085 else
6086 {
6087 /* Emit st,stcmh sequence. */
8c753480 6088 int stcmh_width = bitsize - 32;
0349cc73 6089 int size = stcmh_width / BITS_PER_UNIT;
6090
ffead1ca 6091 emit_move_insn (adjust_address (dest, SImode, size),
0349cc73 6092 gen_lowpart (SImode, src));
5b2a69fa 6093 set_mem_size (dest, size);
8c753480 6094 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest,
6095 GEN_INT (stcmh_width),
6096 const0_rtx),
6097 gen_rtx_LSHIFTRT (word_mode, src, GEN_INT (32)));
0349cc73 6098 }
8c753480 6099 return true;
0349cc73 6100 }
8c753480 6101 }
0349cc73 6102
8c753480 6103 /* Generate INSERT CHARACTERS UNDER MASK (IC, ICM et al). */
6104 if ((bitpos % BITS_PER_UNIT) == 0
6105 && (bitsize % BITS_PER_UNIT) == 0
6106 && (bitpos & 32) == ((bitpos + bitsize - 1) & 32)
6107 && MEM_P (src)
6108 && (mode == DImode || mode == SImode)
6109 && register_operand (dest, mode))
6110 {
6111 /* Emit a strict_low_part pattern if possible. */
6112 if (smode_bsize == bitsize && bitpos == mode_bsize - smode_bsize)
6113 {
6114 op = gen_rtx_STRICT_LOW_PART (VOIDmode, gen_lowpart (smode, dest));
d1f9b275 6115 op = gen_rtx_SET (op, gen_lowpart (smode, src));
8c753480 6116 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
6117 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, op, clobber)));
6118 return true;
6119 }
6120
6121 /* ??? There are more powerful versions of ICM that are not
6122 completely represented in the md file. */
0349cc73 6123 }
6124
8c753480 6125 /* For z10, generate ROTATE THEN INSERT SELECTED BITS (RISBG et al). */
6126 if (TARGET_Z10 && (mode == DImode || mode == SImode))
0349cc73 6127 {
3754d046 6128 machine_mode mode_s = GET_MODE (src);
0349cc73 6129
678c417b 6130 if (CONSTANT_P (src))
0349cc73 6131 {
02a8efd2 6132 /* For constant zero values the representation with AND
6133 appears to be folded in more situations than the (set
6134 (zero_extract) ...).
6135 We only do this when the start and end of the bitfield
6136 remain in the same SImode chunk. That way nihf or nilf
6137 can be used.
6138 The AND patterns might still generate a risbg for this. */
6139 if (src == const0_rtx && bitpos / 32 == (bitpos + bitsize - 1) / 32)
6140 return false;
6141 else
6142 src = force_reg (mode, src);
8c753480 6143 }
6144 else if (mode_s != mode)
6145 {
6146 gcc_assert (GET_MODE_BITSIZE (mode_s) >= bitsize);
6147 src = force_reg (mode_s, src);
6148 src = gen_lowpart (mode, src);
6149 }
0349cc73 6150
99274008 6151 op = gen_rtx_ZERO_EXTRACT (mode, dest, op1, op2),
d1f9b275 6152 op = gen_rtx_SET (op, src);
81769881 6153
6154 if (!TARGET_ZEC12)
6155 {
6156 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
6157 op = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, op, clobber));
6158 }
6159 emit_insn (op);
0349cc73 6160
0349cc73 6161 return true;
6162 }
6163
6164 return false;
6165}
3b699fc7 6166
7cc66daf 6167/* A subroutine of s390_expand_cs_hqi and s390_expand_atomic which returns a
6168 register that holds VAL of mode MODE shifted by COUNT bits. */
182f815e 6169
6170static inline rtx
3754d046 6171s390_expand_mask_and_shift (rtx val, machine_mode mode, rtx count)
182f815e 6172{
6173 val = expand_simple_binop (SImode, AND, val, GEN_INT (GET_MODE_MASK (mode)),
6174 NULL_RTX, 1, OPTAB_DIRECT);
ffead1ca 6175 return expand_simple_binop (SImode, ASHIFT, val, count,
182f815e 6176 NULL_RTX, 1, OPTAB_DIRECT);
6177}
6178
76a4c804 6179/* Generate a vector comparison COND of CMP_OP1 and CMP_OP2 and store
6180 the result in TARGET. */
6181
6182void
6183s390_expand_vec_compare (rtx target, enum rtx_code cond,
6184 rtx cmp_op1, rtx cmp_op2)
6185{
6186 machine_mode mode = GET_MODE (target);
6187 bool neg_p = false, swap_p = false;
6188 rtx tmp;
6189
6190 if (GET_MODE (cmp_op1) == V2DFmode)
6191 {
6192 switch (cond)
6193 {
6194 /* NE a != b -> !(a == b) */
6195 case NE: cond = EQ; neg_p = true; break;
6196 /* UNGT a u> b -> !(b >= a) */
6197 case UNGT: cond = GE; neg_p = true; swap_p = true; break;
6198 /* UNGE a u>= b -> !(b > a) */
6199 case UNGE: cond = GT; neg_p = true; swap_p = true; break;
6200 /* LE: a <= b -> b >= a */
6201 case LE: cond = GE; swap_p = true; break;
6202 /* UNLE: a u<= b -> !(a > b) */
6203 case UNLE: cond = GT; neg_p = true; break;
6204 /* LT: a < b -> b > a */
6205 case LT: cond = GT; swap_p = true; break;
6206 /* UNLT: a u< b -> !(a >= b) */
6207 case UNLT: cond = GE; neg_p = true; break;
6208 case UNEQ:
6209 emit_insn (gen_vec_cmpuneqv2df (target, cmp_op1, cmp_op2));
6210 return;
6211 case LTGT:
6212 emit_insn (gen_vec_cmpltgtv2df (target, cmp_op1, cmp_op2));
6213 return;
6214 case ORDERED:
6215 emit_insn (gen_vec_orderedv2df (target, cmp_op1, cmp_op2));
6216 return;
6217 case UNORDERED:
6218 emit_insn (gen_vec_unorderedv2df (target, cmp_op1, cmp_op2));
6219 return;
6220 default: break;
6221 }
6222 }
6223 else
6224 {
6225 switch (cond)
6226 {
6227 /* NE: a != b -> !(a == b) */
6228 case NE: cond = EQ; neg_p = true; break;
6229 /* GE: a >= b -> !(b > a) */
6230 case GE: cond = GT; neg_p = true; swap_p = true; break;
6231 /* GEU: a >= b -> !(b > a) */
6232 case GEU: cond = GTU; neg_p = true; swap_p = true; break;
6233 /* LE: a <= b -> !(a > b) */
6234 case LE: cond = GT; neg_p = true; break;
6235 /* LEU: a <= b -> !(a > b) */
6236 case LEU: cond = GTU; neg_p = true; break;
6237 /* LT: a < b -> b > a */
6238 case LT: cond = GT; swap_p = true; break;
6239 /* LTU: a < b -> b > a */
6240 case LTU: cond = GTU; swap_p = true; break;
6241 default: break;
6242 }
6243 }
6244
6245 if (swap_p)
6246 {
6247 tmp = cmp_op1; cmp_op1 = cmp_op2; cmp_op2 = tmp;
6248 }
6249
6250 emit_insn (gen_rtx_SET (target, gen_rtx_fmt_ee (cond,
6251 mode,
6252 cmp_op1, cmp_op2)));
6253 if (neg_p)
6254 emit_insn (gen_rtx_SET (target, gen_rtx_NOT (mode, target)));
6255}
6256
07f32359 6257/* Expand the comparison CODE of CMP1 and CMP2 and copy 1 or 0 into
6258 TARGET if either all (ALL_P is true) or any (ALL_P is false) of the
6259 elements in CMP1 and CMP2 fulfill the comparison. */
6260void
6261s390_expand_vec_compare_cc (rtx target, enum rtx_code code,
6262 rtx cmp1, rtx cmp2, bool all_p)
6263{
6264 enum rtx_code new_code = code;
6265 machine_mode cmp_mode, full_cmp_mode, scratch_mode;
6266 rtx tmp_reg = gen_reg_rtx (SImode);
6267 bool swap_p = false;
6268
6269 if (GET_MODE_CLASS (GET_MODE (cmp1)) == MODE_VECTOR_INT)
6270 {
6271 switch (code)
6272 {
6273 case EQ: cmp_mode = CCVEQmode; break;
6274 case NE: cmp_mode = CCVEQmode; break;
6275 case GT: cmp_mode = CCVHmode; break;
6276 case GE: cmp_mode = CCVHmode; new_code = LE; swap_p = true; break;
6277 case LT: cmp_mode = CCVHmode; new_code = GT; swap_p = true; break;
6278 case LE: cmp_mode = CCVHmode; new_code = LE; break;
6279 case GTU: cmp_mode = CCVHUmode; break;
6280 case GEU: cmp_mode = CCVHUmode; new_code = LEU; swap_p = true; break;
6281 case LTU: cmp_mode = CCVHUmode; new_code = GTU; swap_p = true; break;
6282 case LEU: cmp_mode = CCVHUmode; new_code = LEU; break;
6283 default: gcc_unreachable ();
6284 }
6285 scratch_mode = GET_MODE (cmp1);
6286 }
6287 else if (GET_MODE (cmp1) == V2DFmode)
6288 {
6289 switch (code)
6290 {
6291 case EQ: cmp_mode = CCVEQmode; break;
6292 case NE: cmp_mode = CCVEQmode; break;
6293 case GT: cmp_mode = CCVFHmode; break;
6294 case GE: cmp_mode = CCVFHEmode; break;
6295 case UNLE: cmp_mode = CCVFHmode; break;
6296 case UNLT: cmp_mode = CCVFHEmode; break;
6297 case LT: cmp_mode = CCVFHmode; new_code = GT; swap_p = true; break;
6298 case LE: cmp_mode = CCVFHEmode; new_code = GE; swap_p = true; break;
6299 default: gcc_unreachable ();
6300 }
6301 scratch_mode = V2DImode;
6302 }
6303 else
6304 gcc_unreachable ();
6305
6306 if (!all_p)
6307 switch (cmp_mode)
6308 {
6309 case CCVEQmode: full_cmp_mode = CCVEQANYmode; break;
6310 case CCVHmode: full_cmp_mode = CCVHANYmode; break;
6311 case CCVHUmode: full_cmp_mode = CCVHUANYmode; break;
6312 case CCVFHmode: full_cmp_mode = CCVFHANYmode; break;
6313 case CCVFHEmode: full_cmp_mode = CCVFHEANYmode; break;
6314 default: gcc_unreachable ();
6315 }
6316 else
6317 /* The modes without ANY match the ALL modes. */
6318 full_cmp_mode = cmp_mode;
6319
6320 if (swap_p)
6321 {
6322 rtx tmp = cmp2;
6323 cmp2 = cmp1;
6324 cmp1 = tmp;
6325 }
6326
6327 emit_insn (gen_rtx_PARALLEL (VOIDmode,
6328 gen_rtvec (2, gen_rtx_SET (
6329 gen_rtx_REG (cmp_mode, CC_REGNUM),
6330 gen_rtx_COMPARE (cmp_mode, cmp1, cmp2)),
6331 gen_rtx_CLOBBER (VOIDmode,
6332 gen_rtx_SCRATCH (scratch_mode)))));
6333 emit_move_insn (target, const0_rtx);
6334 emit_move_insn (tmp_reg, const1_rtx);
6335
6336 emit_move_insn (target,
6337 gen_rtx_IF_THEN_ELSE (SImode,
6338 gen_rtx_fmt_ee (new_code, VOIDmode,
6339 gen_rtx_REG (full_cmp_mode, CC_REGNUM),
6340 const0_rtx),
6341 target, tmp_reg));
6342}
6343
76a4c804 6344/* Generate a vector comparison expression loading either elements of
6345 THEN or ELS into TARGET depending on the comparison COND of CMP_OP1
6346 and CMP_OP2. */
6347
6348void
6349s390_expand_vcond (rtx target, rtx then, rtx els,
6350 enum rtx_code cond, rtx cmp_op1, rtx cmp_op2)
6351{
6352 rtx tmp;
6353 machine_mode result_mode;
6354 rtx result_target;
6355
651e0407 6356 machine_mode target_mode = GET_MODE (target);
6357 machine_mode cmp_mode = GET_MODE (cmp_op1);
6358 rtx op = (cond == LT) ? els : then;
6359
6360 /* Try to optimize x < 0 ? -1 : 0 into (signed) x >> 31
6361 and x < 0 ? 1 : 0 into (unsigned) x >> 31. Likewise
6362 for short and byte (x >> 15 and x >> 7 respectively). */
6363 if ((cond == LT || cond == GE)
6364 && target_mode == cmp_mode
6365 && cmp_op2 == CONST0_RTX (cmp_mode)
6366 && op == CONST0_RTX (target_mode)
6367 && s390_vector_mode_supported_p (target_mode)
6368 && GET_MODE_CLASS (target_mode) == MODE_VECTOR_INT)
6369 {
6370 rtx negop = (cond == LT) ? then : els;
6371
6372 int shift = GET_MODE_BITSIZE (GET_MODE_INNER (target_mode)) - 1;
6373
6374 /* if x < 0 ? 1 : 0 or if x >= 0 ? 0 : 1 */
6375 if (negop == CONST1_RTX (target_mode))
6376 {
6377 rtx res = expand_simple_binop (cmp_mode, LSHIFTRT, cmp_op1,
6378 GEN_INT (shift), target,
6379 1, OPTAB_DIRECT);
6380 if (res != target)
6381 emit_move_insn (target, res);
6382 return;
6383 }
6384
6385 /* if x < 0 ? -1 : 0 or if x >= 0 ? 0 : -1 */
a991c8aa 6386 else if (all_ones_operand (negop, target_mode))
651e0407 6387 {
6388 rtx res = expand_simple_binop (cmp_mode, ASHIFTRT, cmp_op1,
6389 GEN_INT (shift), target,
6390 0, OPTAB_DIRECT);
6391 if (res != target)
6392 emit_move_insn (target, res);
6393 return;
6394 }
6395 }
6396
76a4c804 6397 /* We always use an integral type vector to hold the comparison
6398 result. */
651e0407 6399 result_mode = cmp_mode == V2DFmode ? V2DImode : cmp_mode;
76a4c804 6400 result_target = gen_reg_rtx (result_mode);
6401
651e0407 6402 /* We allow vector immediates as comparison operands that
6403 can be handled by the optimization above but not by the
6404 following code. Hence, force them into registers here. */
76a4c804 6405 if (!REG_P (cmp_op1))
b088ff4b 6406 cmp_op1 = force_reg (GET_MODE (cmp_op1), cmp_op1);
76a4c804 6407
6408 if (!REG_P (cmp_op2))
b088ff4b 6409 cmp_op2 = force_reg (GET_MODE (cmp_op2), cmp_op2);
76a4c804 6410
6411 s390_expand_vec_compare (result_target, cond,
6412 cmp_op1, cmp_op2);
6413
6414 /* If the results are supposed to be either -1 or 0 we are done
6415 since this is what our compare instructions generate anyway. */
a991c8aa 6416 if (all_ones_operand (then, GET_MODE (then))
76a4c804 6417 && const0_operand (els, GET_MODE (els)))
6418 {
651e0407 6419 emit_move_insn (target, gen_rtx_SUBREG (target_mode,
76a4c804 6420 result_target, 0));
6421 return;
6422 }
6423
6424 /* Otherwise we will do a vsel afterwards. */
6425 /* This gets triggered e.g.
6426 with gcc.c-torture/compile/pr53410-1.c */
6427 if (!REG_P (then))
651e0407 6428 then = force_reg (target_mode, then);
76a4c804 6429
6430 if (!REG_P (els))
651e0407 6431 els = force_reg (target_mode, els);
76a4c804 6432
6433 tmp = gen_rtx_fmt_ee (EQ, VOIDmode,
6434 result_target,
6435 CONST0_RTX (result_mode));
6436
6437 /* We compared the result against zero above so we have to swap then
6438 and els here. */
651e0407 6439 tmp = gen_rtx_IF_THEN_ELSE (target_mode, tmp, els, then);
76a4c804 6440
651e0407 6441 gcc_assert (target_mode == GET_MODE (then));
76a4c804 6442 emit_insn (gen_rtx_SET (target, tmp));
6443}
6444
6445/* Emit the RTX necessary to initialize the vector TARGET with values
6446 in VALS. */
6447void
6448s390_expand_vec_init (rtx target, rtx vals)
6449{
6450 machine_mode mode = GET_MODE (target);
6451 machine_mode inner_mode = GET_MODE_INNER (mode);
6452 int n_elts = GET_MODE_NUNITS (mode);
6453 bool all_same = true, all_regs = true, all_const_int = true;
6454 rtx x;
6455 int i;
6456
6457 for (i = 0; i < n_elts; ++i)
6458 {
6459 x = XVECEXP (vals, 0, i);
6460
6461 if (!CONST_INT_P (x))
6462 all_const_int = false;
6463
6464 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
6465 all_same = false;
6466
6467 if (!REG_P (x))
6468 all_regs = false;
6469 }
6470
6471 /* Use vector gen mask or vector gen byte mask if possible. */
6472 if (all_same && all_const_int
6473 && (XVECEXP (vals, 0, 0) == const0_rtx
6474 || s390_contiguous_bitmask_vector_p (XVECEXP (vals, 0, 0),
6475 NULL, NULL)
6476 || s390_bytemask_vector_p (XVECEXP (vals, 0, 0), NULL)))
6477 {
6478 emit_insn (gen_rtx_SET (target,
6479 gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0))));
6480 return;
6481 }
6482
6483 if (all_same)
6484 {
6485 emit_insn (gen_rtx_SET (target,
6486 gen_rtx_VEC_DUPLICATE (mode,
6487 XVECEXP (vals, 0, 0))));
6488 return;
6489 }
6490
6491 if (all_regs && REG_P (target) && n_elts == 2 && inner_mode == DImode)
6492 {
6493 /* Use vector load pair. */
6494 emit_insn (gen_rtx_SET (target,
6495 gen_rtx_VEC_CONCAT (mode,
6496 XVECEXP (vals, 0, 0),
6497 XVECEXP (vals, 0, 1))));
6498 return;
6499 }
6500
6501 /* We are about to set the vector elements one by one. Zero out the
6502 full register first in order to help the data flow framework to
6503 detect it as full VR set. */
6504 emit_insn (gen_rtx_SET (target, CONST0_RTX (mode)));
6505
6506 /* Unfortunately the vec_init expander is not allowed to fail. So
6507 we have to implement the fallback ourselves. */
6508 for (i = 0; i < n_elts; i++)
4ba2579e 6509 {
6510 rtx elem = XVECEXP (vals, 0, i);
6511 if (!general_operand (elem, GET_MODE (elem)))
6512 elem = force_reg (inner_mode, elem);
6513
6514 emit_insn (gen_rtx_SET (target,
6515 gen_rtx_UNSPEC (mode,
6516 gen_rtvec (3, elem,
6517 GEN_INT (i), target),
6518 UNSPEC_VEC_SET)));
6519 }
76a4c804 6520}
6521
182f815e 6522/* Structure to hold the initial parameters for a compare_and_swap operation
ffead1ca 6523 in HImode and QImode. */
182f815e 6524
6525struct alignment_context
6526{
ffead1ca 6527 rtx memsi; /* SI aligned memory location. */
182f815e 6528 rtx shift; /* Bit offset with regard to lsb. */
6529 rtx modemask; /* Mask of the HQImode shifted by SHIFT bits. */
6530 rtx modemaski; /* ~modemask */
191ec5a2 6531 bool aligned; /* True if memory is aligned, false else. */
182f815e 6532};
6533
7cc66daf 6534/* A subroutine of s390_expand_cs_hqi and s390_expand_atomic to initialize
6535 structure AC for transparent simplifying, if the memory alignment is known
6536 to be at least 32bit. MEM is the memory location for the actual operation
6537 and MODE its mode. */
182f815e 6538
6539static void
6540init_alignment_context (struct alignment_context *ac, rtx mem,
3754d046 6541 machine_mode mode)
182f815e 6542{
6543 ac->shift = GEN_INT (GET_MODE_SIZE (SImode) - GET_MODE_SIZE (mode));
6544 ac->aligned = (MEM_ALIGN (mem) >= GET_MODE_BITSIZE (SImode));
6545
6546 if (ac->aligned)
6547 ac->memsi = adjust_address (mem, SImode, 0); /* Memory is aligned. */
6548 else
6549 {
6550 /* Alignment is unknown. */
6551 rtx byteoffset, addr, align;
6552
6553 /* Force the address into a register. */
6554 addr = force_reg (Pmode, XEXP (mem, 0));
6555
6556 /* Align it to SImode. */
6557 align = expand_simple_binop (Pmode, AND, addr,
6558 GEN_INT (-GET_MODE_SIZE (SImode)),
6559 NULL_RTX, 1, OPTAB_DIRECT);
6560 /* Generate MEM. */
6561 ac->memsi = gen_rtx_MEM (SImode, align);
6562 MEM_VOLATILE_P (ac->memsi) = MEM_VOLATILE_P (mem);
bd1da572 6563 set_mem_alias_set (ac->memsi, ALIAS_SET_MEMORY_BARRIER);
182f815e 6564 set_mem_align (ac->memsi, GET_MODE_BITSIZE (SImode));
6565
6566 /* Calculate shiftcount. */
6567 byteoffset = expand_simple_binop (Pmode, AND, addr,
6568 GEN_INT (GET_MODE_SIZE (SImode) - 1),
6569 NULL_RTX, 1, OPTAB_DIRECT);
6570 /* As we already have some offset, evaluate the remaining distance. */
6571 ac->shift = expand_simple_binop (SImode, MINUS, ac->shift, byteoffset,
6572 NULL_RTX, 1, OPTAB_DIRECT);
182f815e 6573 }
8c753480 6574
182f815e 6575 /* Shift is the byte count, but we need the bitcount. */
8c753480 6576 ac->shift = expand_simple_binop (SImode, ASHIFT, ac->shift, GEN_INT (3),
6577 NULL_RTX, 1, OPTAB_DIRECT);
6578
182f815e 6579 /* Calculate masks. */
ffead1ca 6580 ac->modemask = expand_simple_binop (SImode, ASHIFT,
8c753480 6581 GEN_INT (GET_MODE_MASK (mode)),
6582 ac->shift, NULL_RTX, 1, OPTAB_DIRECT);
6583 ac->modemaski = expand_simple_unop (SImode, NOT, ac->modemask,
6584 NULL_RTX, 1);
6585}
6586
6587/* A subroutine of s390_expand_cs_hqi. Insert INS into VAL. If possible,
6588 use a single insv insn into SEQ2. Otherwise, put prep insns in SEQ1 and
6589 perform the merge in SEQ2. */
6590
6591static rtx
6592s390_two_part_insv (struct alignment_context *ac, rtx *seq1, rtx *seq2,
3754d046 6593 machine_mode mode, rtx val, rtx ins)
8c753480 6594{
6595 rtx tmp;
6596
6597 if (ac->aligned)
6598 {
6599 start_sequence ();
6600 tmp = copy_to_mode_reg (SImode, val);
6601 if (s390_expand_insv (tmp, GEN_INT (GET_MODE_BITSIZE (mode)),
6602 const0_rtx, ins))
6603 {
6604 *seq1 = NULL;
6605 *seq2 = get_insns ();
6606 end_sequence ();
6607 return tmp;
6608 }
6609 end_sequence ();
6610 }
6611
6612 /* Failed to use insv. Generate a two part shift and mask. */
6613 start_sequence ();
6614 tmp = s390_expand_mask_and_shift (ins, mode, ac->shift);
6615 *seq1 = get_insns ();
6616 end_sequence ();
6617
6618 start_sequence ();
6619 tmp = expand_simple_binop (SImode, IOR, tmp, val, NULL_RTX, 1, OPTAB_DIRECT);
6620 *seq2 = get_insns ();
6621 end_sequence ();
6622
6623 return tmp;
182f815e 6624}
6625
6626/* Expand an atomic compare and swap operation for HImode and QImode. MEM is
8c753480 6627 the memory location, CMP the old value to compare MEM with and NEW_RTX the
6628 value to set if CMP == MEM. */
182f815e 6629
6630void
3754d046 6631s390_expand_cs_hqi (machine_mode mode, rtx btarget, rtx vtarget, rtx mem,
8c753480 6632 rtx cmp, rtx new_rtx, bool is_weak)
182f815e 6633{
6634 struct alignment_context ac;
77e58889 6635 rtx cmpv, newv, val, cc, seq0, seq1, seq2, seq3;
182f815e 6636 rtx res = gen_reg_rtx (SImode);
79f6a8ed 6637 rtx_code_label *csloop = NULL, *csend = NULL;
182f815e 6638
182f815e 6639 gcc_assert (MEM_P (mem));
6640
6641 init_alignment_context (&ac, mem, mode);
6642
182f815e 6643 /* Load full word. Subsequent loads are performed by CS. */
6644 val = expand_simple_binop (SImode, AND, ac.memsi, ac.modemaski,
6645 NULL_RTX, 1, OPTAB_DIRECT);
6646
8c753480 6647 /* Prepare insertions of cmp and new_rtx into the loaded value. When
6648 possible, we try to use insv to make this happen efficiently. If
6649 that fails we'll generate code both inside and outside the loop. */
6650 cmpv = s390_two_part_insv (&ac, &seq0, &seq2, mode, val, cmp);
6651 newv = s390_two_part_insv (&ac, &seq1, &seq3, mode, val, new_rtx);
6652
6653 if (seq0)
6654 emit_insn (seq0);
6655 if (seq1)
6656 emit_insn (seq1);
6657
182f815e 6658 /* Start CS loop. */
8c753480 6659 if (!is_weak)
6660 {
6661 /* Begin assuming success. */
6662 emit_move_insn (btarget, const1_rtx);
6663
6664 csloop = gen_label_rtx ();
6665 csend = gen_label_rtx ();
6666 emit_label (csloop);
6667 }
6668
ffead1ca 6669 /* val = "<mem>00..0<mem>"
182f815e 6670 * cmp = "00..0<cmp>00..0"
ffead1ca 6671 * new = "00..0<new>00..0"
182f815e 6672 */
6673
8c753480 6674 emit_insn (seq2);
6675 emit_insn (seq3);
6676
6677 cc = s390_emit_compare_and_swap (EQ, res, ac.memsi, cmpv, newv);
6678 if (is_weak)
6679 emit_insn (gen_cstorecc4 (btarget, cc, XEXP (cc, 0), XEXP (cc, 1)));
182f815e 6680 else
182f815e 6681 {
77e58889 6682 rtx tmp;
6683
8c753480 6684 /* Jump to end if we're done (likely?). */
6685 s390_emit_jump (csend, cc);
6686
77e58889 6687 /* Check for changes outside mode, and loop internal if so.
6688 Arrange the moves so that the compare is adjacent to the
6689 branch so that we can generate CRJ. */
6690 tmp = copy_to_reg (val);
6691 force_expand_binop (SImode, and_optab, res, ac.modemaski, val,
6692 1, OPTAB_DIRECT);
6693 cc = s390_emit_compare (NE, val, tmp);
8c753480 6694 s390_emit_jump (csloop, cc);
6695
6696 /* Failed. */
6697 emit_move_insn (btarget, const0_rtx);
6698 emit_label (csend);
182f815e 6699 }
ffead1ca 6700
182f815e 6701 /* Return the correct part of the bitfield. */
8c753480 6702 convert_move (vtarget, expand_simple_binop (SImode, LSHIFTRT, res, ac.shift,
6703 NULL_RTX, 1, OPTAB_DIRECT), 1);
182f815e 6704}
6705
7cc66daf 6706/* Expand an atomic operation CODE of mode MODE. MEM is the memory location
85694bac 6707 and VAL the value to play with. If AFTER is true then store the value
7cc66daf 6708 MEM holds after the operation, if AFTER is false then store the value MEM
6709 holds before the operation. If TARGET is zero then discard that value, else
6710 store it to TARGET. */
6711
6712void
3754d046 6713s390_expand_atomic (machine_mode mode, enum rtx_code code,
7cc66daf 6714 rtx target, rtx mem, rtx val, bool after)
6715{
6716 struct alignment_context ac;
6717 rtx cmp;
8deb3959 6718 rtx new_rtx = gen_reg_rtx (SImode);
7cc66daf 6719 rtx orig = gen_reg_rtx (SImode);
79f6a8ed 6720 rtx_code_label *csloop = gen_label_rtx ();
7cc66daf 6721
6722 gcc_assert (!target || register_operand (target, VOIDmode));
6723 gcc_assert (MEM_P (mem));
6724
6725 init_alignment_context (&ac, mem, mode);
6726
6727 /* Shift val to the correct bit positions.
6728 Preserve "icm", but prevent "ex icm". */
6729 if (!(ac.aligned && code == SET && MEM_P (val)))
6730 val = s390_expand_mask_and_shift (val, mode, ac.shift);
6731
6732 /* Further preparation insns. */
6733 if (code == PLUS || code == MINUS)
6734 emit_move_insn (orig, val);
6735 else if (code == MULT || code == AND) /* val = "11..1<val>11..1" */
6736 val = expand_simple_binop (SImode, XOR, val, ac.modemaski,
6737 NULL_RTX, 1, OPTAB_DIRECT);
6738
6739 /* Load full word. Subsequent loads are performed by CS. */
6740 cmp = force_reg (SImode, ac.memsi);
6741
6742 /* Start CS loop. */
6743 emit_label (csloop);
8deb3959 6744 emit_move_insn (new_rtx, cmp);
7cc66daf 6745
6746 /* Patch new with val at correct position. */
6747 switch (code)
6748 {
6749 case PLUS:
6750 case MINUS:
8deb3959 6751 val = expand_simple_binop (SImode, code, new_rtx, orig,
7cc66daf 6752 NULL_RTX, 1, OPTAB_DIRECT);
6753 val = expand_simple_binop (SImode, AND, val, ac.modemask,
6754 NULL_RTX, 1, OPTAB_DIRECT);
6755 /* FALLTHRU */
ffead1ca 6756 case SET:
7cc66daf 6757 if (ac.aligned && MEM_P (val))
b634c730 6758 store_bit_field (new_rtx, GET_MODE_BITSIZE (mode), 0,
292237f3 6759 0, 0, SImode, val, false);
7cc66daf 6760 else
6761 {
8deb3959 6762 new_rtx = expand_simple_binop (SImode, AND, new_rtx, ac.modemaski,
7cc66daf 6763 NULL_RTX, 1, OPTAB_DIRECT);
8deb3959 6764 new_rtx = expand_simple_binop (SImode, IOR, new_rtx, val,
7cc66daf 6765 NULL_RTX, 1, OPTAB_DIRECT);
6766 }
6767 break;
6768 case AND:
6769 case IOR:
6770 case XOR:
8deb3959 6771 new_rtx = expand_simple_binop (SImode, code, new_rtx, val,
7cc66daf 6772 NULL_RTX, 1, OPTAB_DIRECT);
6773 break;
6774 case MULT: /* NAND */
8deb3959 6775 new_rtx = expand_simple_binop (SImode, AND, new_rtx, val,
7cc66daf 6776 NULL_RTX, 1, OPTAB_DIRECT);
636c17b8 6777 new_rtx = expand_simple_binop (SImode, XOR, new_rtx, ac.modemask,
6778 NULL_RTX, 1, OPTAB_DIRECT);
7cc66daf 6779 break;
6780 default:
6781 gcc_unreachable ();
6782 }
7cc66daf 6783
db1f11e3 6784 s390_emit_jump (csloop, s390_emit_compare_and_swap (NE, cmp,
8deb3959 6785 ac.memsi, cmp, new_rtx));
7cc66daf 6786
6787 /* Return the correct part of the bitfield. */
6788 if (target)
6789 convert_move (target, expand_simple_binop (SImode, LSHIFTRT,
8deb3959 6790 after ? new_rtx : cmp, ac.shift,
7cc66daf 6791 NULL_RTX, 1, OPTAB_DIRECT), 1);
6792}
6793
40af64cc 6794/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
03c118d5 6795 We need to emit DTP-relative relocations. */
6796
40af64cc 6797static void s390_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
6798
6799static void
b40da9a7 6800s390_output_dwarf_dtprel (FILE *file, int size, rtx x)
03c118d5 6801{
6802 switch (size)
6803 {
6804 case 4:
6805 fputs ("\t.long\t", file);
6806 break;
6807 case 8:
6808 fputs ("\t.quad\t", file);
6809 break;
6810 default:
32eda510 6811 gcc_unreachable ();
03c118d5 6812 }
6813 output_addr_const (file, x);
6814 fputs ("@DTPOFF", file);
6815}
6816
76a4c804 6817/* Return the proper mode for REGNO being represented in the dwarf
6818 unwind table. */
6819machine_mode
6820s390_dwarf_frame_reg_mode (int regno)
6821{
6822 machine_mode save_mode = default_dwarf_frame_reg_mode (regno);
6823
52de7525 6824 /* Make sure not to return DImode for any GPR with -m31 -mzarch. */
6825 if (GENERAL_REGNO_P (regno))
6826 save_mode = Pmode;
6827
76a4c804 6828 /* The rightmost 64 bits of vector registers are call-clobbered. */
6829 if (GET_MODE_SIZE (save_mode) > 8)
6830 save_mode = DImode;
6831
6832 return save_mode;
6833}
6834
4257b08a 6835#ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
eddcdde1 6836/* Implement TARGET_MANGLE_TYPE. */
4257b08a 6837
6838static const char *
a9f1838b 6839s390_mangle_type (const_tree type)
4257b08a 6840{
07f32359 6841 type = TYPE_MAIN_VARIANT (type);
6842
6843 if (TREE_CODE (type) != VOID_TYPE && TREE_CODE (type) != BOOLEAN_TYPE
6844 && TREE_CODE (type) != INTEGER_TYPE && TREE_CODE (type) != REAL_TYPE)
6845 return NULL;
6846
6847 if (type == s390_builtin_types[BT_BV16QI]) return "U6__boolc";
6848 if (type == s390_builtin_types[BT_BV8HI]) return "U6__bools";
6849 if (type == s390_builtin_types[BT_BV4SI]) return "U6__booli";
6850 if (type == s390_builtin_types[BT_BV2DI]) return "U6__booll";
6851
4257b08a 6852 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
6853 && TARGET_LONG_DOUBLE_128)
6854 return "g";
6855
6856 /* For all other types, use normal C++ mangling. */
6857 return NULL;
6858}
6859#endif
6860
e93986bb 6861/* In the name of slightly smaller debug output, and to cater to
06b27565 6862 general assembler lossage, recognize various UNSPEC sequences
e93986bb 6863 and turn them back into a direct symbol reference. */
6864
07576557 6865static rtx
b40da9a7 6866s390_delegitimize_address (rtx orig_x)
e93986bb 6867{
3b6b647c 6868 rtx x, y;
e93986bb 6869
3b6b647c 6870 orig_x = delegitimize_mem_from_attrs (orig_x);
6871 x = orig_x;
4796d433 6872
6873 /* Extract the symbol ref from:
6874 (plus:SI (reg:SI 12 %r12)
6875 (const:SI (unspec:SI [(symbol_ref/f:SI ("*.LC0"))]
2b2b857a 6876 UNSPEC_GOTOFF/PLTOFF)))
6877 and
6878 (plus:SI (reg:SI 12 %r12)
6879 (const:SI (plus:SI (unspec:SI [(symbol_ref:SI ("L"))]
6880 UNSPEC_GOTOFF/PLTOFF)
6881 (const_int 4 [0x4])))) */
4796d433 6882 if (GET_CODE (x) == PLUS
6883 && REG_P (XEXP (x, 0))
6884 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM
6885 && GET_CODE (XEXP (x, 1)) == CONST)
6886 {
b6382e93 6887 HOST_WIDE_INT offset = 0;
6888
4796d433 6889 /* The const operand. */
6890 y = XEXP (XEXP (x, 1), 0);
2b2b857a 6891
6892 if (GET_CODE (y) == PLUS
6893 && GET_CODE (XEXP (y, 1)) == CONST_INT)
b6382e93 6894 {
6895 offset = INTVAL (XEXP (y, 1));
6896 y = XEXP (y, 0);
6897 }
2b2b857a 6898
4796d433 6899 if (GET_CODE (y) == UNSPEC
2b2b857a 6900 && (XINT (y, 1) == UNSPEC_GOTOFF
6901 || XINT (y, 1) == UNSPEC_PLTOFF))
29c05e22 6902 return plus_constant (Pmode, XVECEXP (y, 0, 0), offset);
4796d433 6903 }
6904
e93986bb 6905 if (GET_CODE (x) != MEM)
6906 return orig_x;
6907
6908 x = XEXP (x, 0);
6909 if (GET_CODE (x) == PLUS
6910 && GET_CODE (XEXP (x, 1)) == CONST
6911 && GET_CODE (XEXP (x, 0)) == REG
6912 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
6913 {
6914 y = XEXP (XEXP (x, 1), 0);
6915 if (GET_CODE (y) == UNSPEC
12ef3745 6916 && XINT (y, 1) == UNSPEC_GOT)
54cb44a3 6917 y = XVECEXP (y, 0, 0);
6918 else
6919 return orig_x;
e93986bb 6920 }
54cb44a3 6921 else if (GET_CODE (x) == CONST)
e93986bb 6922 {
2b2b857a 6923 /* Extract the symbol ref from:
6924 (mem:QI (const:DI (unspec:DI [(symbol_ref:DI ("foo"))]
6925 UNSPEC_PLT/GOTENT))) */
6926
e93986bb 6927 y = XEXP (x, 0);
6928 if (GET_CODE (y) == UNSPEC
2b2b857a 6929 && (XINT (y, 1) == UNSPEC_GOTENT
6930 || XINT (y, 1) == UNSPEC_PLT))
54cb44a3 6931 y = XVECEXP (y, 0, 0);
6932 else
6933 return orig_x;
e93986bb 6934 }
54cb44a3 6935 else
6936 return orig_x;
e93986bb 6937
54cb44a3 6938 if (GET_MODE (orig_x) != Pmode)
6939 {
2b03de53 6940 if (GET_MODE (orig_x) == BLKmode)
6941 return orig_x;
54cb44a3 6942 y = lowpart_subreg (GET_MODE (orig_x), y, Pmode);
6943 if (y == NULL_RTX)
6944 return orig_x;
6945 }
6946 return y;
e93986bb 6947}
2eb8fe23 6948
805a133b 6949/* Output operand OP to stdio stream FILE.
6950 OP is an address (register + offset) which is not used to address data;
6951 instead the rightmost bits are interpreted as the value. */
63ebd742 6952
6953static void
2be7449b 6954print_addrstyle_operand (FILE *file, rtx op)
63ebd742 6955{
6d6be381 6956 HOST_WIDE_INT offset;
6957 rtx base;
9a09ba70 6958
6d6be381 6959 /* Extract base register and offset. */
2be7449b 6960 if (!s390_decompose_addrstyle_without_index (op, &base, &offset))
6d6be381 6961 gcc_unreachable ();
63ebd742 6962
6963 /* Sanity check. */
6d6be381 6964 if (base)
32eda510 6965 {
6d6be381 6966 gcc_assert (GET_CODE (base) == REG);
6967 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
6968 gcc_assert (REGNO_REG_CLASS (REGNO (base)) == ADDR_REGS);
32eda510 6969 }
63ebd742 6970
805a133b 6971 /* Offsets are constricted to twelve bits. */
6972 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset & ((1 << 12) - 1));
6d6be381 6973 if (base)
6974 fprintf (file, "(%s)", reg_names[REGNO (base)]);
63ebd742 6975}
6976
06877232 6977/* Assigns the number of NOP halfwords to be emitted before and after the
6978 function label to *HW_BEFORE and *HW_AFTER. Both pointers must not be NULL.
6979 If hotpatching is disabled for the function, the values are set to zero.
6980*/
77bc9912 6981
06877232 6982static void
11762b83 6983s390_function_num_hotpatch_hw (tree decl,
6984 int *hw_before,
6985 int *hw_after)
77bc9912 6986{
6987 tree attr;
6988
11762b83 6989 attr = lookup_attribute ("hotpatch", DECL_ATTRIBUTES (decl));
6990
6991 /* Handle the arguments of the hotpatch attribute. The values
6992 specified via attribute might override the cmdline argument
6993 values. */
6994 if (attr)
77bc9912 6995 {
11762b83 6996 tree args = TREE_VALUE (attr);
6997
6998 *hw_before = TREE_INT_CST_LOW (TREE_VALUE (args));
6999 *hw_after = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args)));
77bc9912 7000 }
11762b83 7001 else
77bc9912 7002 {
11762b83 7003 /* Use the values specified by the cmdline arguments. */
7004 *hw_before = s390_hotpatch_hw_before_label;
7005 *hw_after = s390_hotpatch_hw_after_label;
77bc9912 7006 }
77bc9912 7007}
7008
7a0cee35 7009/* Write the current .machine and .machinemode specification to the assembler
7010 file. */
7011
14d7e7e6 7012#ifdef HAVE_AS_MACHINE_MACHINEMODE
7a0cee35 7013static void
7014s390_asm_output_machine_for_arch (FILE *asm_out_file)
7015{
7016 fprintf (asm_out_file, "\t.machinemode %s\n",
7017 (TARGET_ZARCH) ? "zarch" : "esa");
7018 fprintf (asm_out_file, "\t.machine \"%s", processor_table[s390_arch].name);
7019 if (S390_USE_ARCHITECTURE_MODIFIERS)
7020 {
7021 int cpu_flags;
7022
7023 cpu_flags = processor_flags_table[(int) s390_arch];
7024 if (TARGET_HTM && !(cpu_flags & PF_TX))
7025 fprintf (asm_out_file, "+htm");
7026 else if (!TARGET_HTM && (cpu_flags & PF_TX))
7027 fprintf (asm_out_file, "+nohtm");
7028 if (TARGET_VX && !(cpu_flags & PF_VX))
7029 fprintf (asm_out_file, "+vx");
7030 else if (!TARGET_VX && (cpu_flags & PF_VX))
7031 fprintf (asm_out_file, "+novx");
7032 }
7033 fprintf (asm_out_file, "\"\n");
7034}
7035
7036/* Write an extra function header before the very start of the function. */
7037
7038void
7039s390_asm_output_function_prefix (FILE *asm_out_file,
7040 const char *fnname ATTRIBUTE_UNUSED)
7041{
7042 if (DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl) == NULL)
7043 return;
7044 /* Since only the function specific options are saved but not the indications
7045 which options are set, it's too much work here to figure out which options
7046 have actually changed. Thus, generate .machine and .machinemode whenever a
7047 function has the target attribute or pragma. */
7048 fprintf (asm_out_file, "\t.machinemode push\n");
7049 fprintf (asm_out_file, "\t.machine push\n");
7050 s390_asm_output_machine_for_arch (asm_out_file);
7051}
7052
7053/* Write an extra function footer after the very end of the function. */
7054
7055void
7056s390_asm_declare_function_size (FILE *asm_out_file,
0491d54f 7057 const char *fnname, tree decl)
7a0cee35 7058{
0491d54f 7059 if (!flag_inhibit_size_directive)
7060 ASM_OUTPUT_MEASURED_SIZE (asm_out_file, fnname);
7a0cee35 7061 if (DECL_FUNCTION_SPECIFIC_TARGET (decl) == NULL)
7062 return;
7063 fprintf (asm_out_file, "\t.machine pop\n");
7064 fprintf (asm_out_file, "\t.machinemode pop\n");
7065}
7066#endif
7067
77bc9912 7068/* Write the extra assembler code needed to declare a function properly. */
7069
7070void
7071s390_asm_output_function_label (FILE *asm_out_file, const char *fname,
7072 tree decl)
7073{
11762b83 7074 int hw_before, hw_after;
77bc9912 7075
06877232 7076 s390_function_num_hotpatch_hw (decl, &hw_before, &hw_after);
7077 if (hw_before > 0)
77bc9912 7078 {
f4252e72 7079 unsigned int function_alignment;
77bc9912 7080 int i;
7081
7082 /* Add a trampoline code area before the function label and initialize it
7083 with two-byte nop instructions. This area can be overwritten with code
7084 that jumps to a patched version of the function. */
06877232 7085 asm_fprintf (asm_out_file, "\tnopr\t%%r7"
7086 "\t# pre-label NOPs for hotpatch (%d halfwords)\n",
7087 hw_before);
7088 for (i = 1; i < hw_before; i++)
7089 fputs ("\tnopr\t%r7\n", asm_out_file);
7090
77bc9912 7091 /* Note: The function label must be aligned so that (a) the bytes of the
7092 following nop do not cross a cacheline boundary, and (b) a jump address
7093 (eight bytes for 64 bit targets, 4 bytes for 32 bit targets) can be
7094 stored directly before the label without crossing a cacheline
7095 boundary. All this is necessary to make sure the trampoline code can
06877232 7096 be changed atomically.
7097 This alignment is done automatically using the FOUNCTION_BOUNDARY, but
7098 if there are NOPs before the function label, the alignment is placed
7099 before them. So it is necessary to duplicate the alignment after the
7100 NOPs. */
f4252e72 7101 function_alignment = MAX (8, DECL_ALIGN (decl) / BITS_PER_UNIT);
7102 if (! DECL_USER_ALIGN (decl))
7103 function_alignment = MAX (function_alignment,
7104 (unsigned int) align_functions);
06877232 7105 fputs ("\t# alignment for hotpatch\n", asm_out_file);
f4252e72 7106 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (function_alignment));
77bc9912 7107 }
7108
7a0cee35 7109 if (S390_USE_TARGET_ATTRIBUTE && TARGET_DEBUG_ARG)
7110 {
7111 asm_fprintf (asm_out_file, "\t# fn:%s ar%d\n", fname, s390_arch);
7112 asm_fprintf (asm_out_file, "\t# fn:%s tu%d\n", fname, s390_tune);
7113 asm_fprintf (asm_out_file, "\t# fn:%s sg%d\n", fname, s390_stack_guard);
7114 asm_fprintf (asm_out_file, "\t# fn:%s ss%d\n", fname, s390_stack_size);
7115 asm_fprintf (asm_out_file, "\t# fn:%s bc%d\n", fname, s390_branch_cost);
7116 asm_fprintf (asm_out_file, "\t# fn:%s wf%d\n", fname,
7117 s390_warn_framesize);
7118 asm_fprintf (asm_out_file, "\t# fn:%s ba%d\n", fname, TARGET_BACKCHAIN);
7119 asm_fprintf (asm_out_file, "\t# fn:%s hd%d\n", fname, TARGET_HARD_DFP);
7120 asm_fprintf (asm_out_file, "\t# fn:%s hf%d\n", fname, !TARGET_SOFT_FLOAT);
7121 asm_fprintf (asm_out_file, "\t# fn:%s ht%d\n", fname, TARGET_OPT_HTM);
7122 asm_fprintf (asm_out_file, "\t# fn:%s vx%d\n", fname, TARGET_OPT_VX);
7123 asm_fprintf (asm_out_file, "\t# fn:%s ps%d\n", fname,
7124 TARGET_PACKED_STACK);
7125 asm_fprintf (asm_out_file, "\t# fn:%s se%d\n", fname, TARGET_SMALL_EXEC);
7126 asm_fprintf (asm_out_file, "\t# fn:%s mv%d\n", fname, TARGET_MVCLE);
7127 asm_fprintf (asm_out_file, "\t# fn:%s zv%d\n", fname, TARGET_ZVECTOR);
7128 asm_fprintf (asm_out_file, "\t# fn:%s wd%d\n", fname,
7129 s390_warn_dynamicstack_p);
7130 }
77bc9912 7131 ASM_OUTPUT_LABEL (asm_out_file, fname);
06877232 7132 if (hw_after > 0)
7133 asm_fprintf (asm_out_file,
7134 "\t# post-label NOPs for hotpatch (%d halfwords)\n",
7135 hw_after);
77bc9912 7136}
7137
f588eb9f 7138/* Output machine-dependent UNSPECs occurring in address constant X
74d2529d 7139 in assembler syntax to stdio stream FILE. Returns true if the
7140 constant X could be recognized, false otherwise. */
4673c1a0 7141
1a561788 7142static bool
74d2529d 7143s390_output_addr_const_extra (FILE *file, rtx x)
4673c1a0 7144{
74d2529d 7145 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
7146 switch (XINT (x, 1))
7147 {
7148 case UNSPEC_GOTENT:
7149 output_addr_const (file, XVECEXP (x, 0, 0));
7150 fprintf (file, "@GOTENT");
7151 return true;
7152 case UNSPEC_GOT:
7153 output_addr_const (file, XVECEXP (x, 0, 0));
7154 fprintf (file, "@GOT");
7155 return true;
7156 case UNSPEC_GOTOFF:
7157 output_addr_const (file, XVECEXP (x, 0, 0));
7158 fprintf (file, "@GOTOFF");
7159 return true;
7160 case UNSPEC_PLT:
7161 output_addr_const (file, XVECEXP (x, 0, 0));
7162 fprintf (file, "@PLT");
7163 return true;
7164 case UNSPEC_PLTOFF:
7165 output_addr_const (file, XVECEXP (x, 0, 0));
7166 fprintf (file, "@PLTOFF");
7167 return true;
7168 case UNSPEC_TLSGD:
7169 output_addr_const (file, XVECEXP (x, 0, 0));
7170 fprintf (file, "@TLSGD");
7171 return true;
7172 case UNSPEC_TLSLDM:
7173 assemble_name (file, get_some_local_dynamic_name ());
7174 fprintf (file, "@TLSLDM");
7175 return true;
7176 case UNSPEC_DTPOFF:
7177 output_addr_const (file, XVECEXP (x, 0, 0));
7178 fprintf (file, "@DTPOFF");
7179 return true;
7180 case UNSPEC_NTPOFF:
7181 output_addr_const (file, XVECEXP (x, 0, 0));
7182 fprintf (file, "@NTPOFF");
7183 return true;
7184 case UNSPEC_GOTNTPOFF:
7185 output_addr_const (file, XVECEXP (x, 0, 0));
7186 fprintf (file, "@GOTNTPOFF");
7187 return true;
7188 case UNSPEC_INDNTPOFF:
7189 output_addr_const (file, XVECEXP (x, 0, 0));
7190 fprintf (file, "@INDNTPOFF");
7191 return true;
7192 }
4673c1a0 7193
1ed7a160 7194 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 2)
7195 switch (XINT (x, 1))
7196 {
7197 case UNSPEC_POOL_OFFSET:
7198 x = gen_rtx_MINUS (GET_MODE (x), XVECEXP (x, 0, 0), XVECEXP (x, 0, 1));
7199 output_addr_const (file, x);
7200 return true;
7201 }
74d2529d 7202 return false;
4673c1a0 7203}
7204
f81e845f 7205/* Output address operand ADDR in assembler syntax to
56769981 7206 stdio stream FILE. */
4673c1a0 7207
7208void
b40da9a7 7209print_operand_address (FILE *file, rtx addr)
4673c1a0 7210{
7211 struct s390_address ad;
7212
2a672556 7213 if (s390_loadrelative_operand_p (addr, NULL, NULL))
e68d6a13 7214 {
53b9033c 7215 if (!TARGET_Z10)
7216 {
902602ef 7217 output_operand_lossage ("symbolic memory references are "
7218 "only supported on z10 or later");
53b9033c 7219 return;
7220 }
e68d6a13 7221 output_addr_const (file, addr);
7222 return;
7223 }
7224
8ba34dcd 7225 if (!s390_decompose_address (addr, &ad)
1e280623 7226 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
7227 || (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx))))
3284a242 7228 output_operand_lossage ("cannot decompose address");
f81e845f 7229
4673c1a0 7230 if (ad.disp)
74d2529d 7231 output_addr_const (file, ad.disp);
4673c1a0 7232 else
7233 fprintf (file, "0");
7234
7235 if (ad.base && ad.indx)
7236 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
7237 reg_names[REGNO (ad.base)]);
7238 else if (ad.base)
7239 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
7240}
7241
f81e845f 7242/* Output operand X in assembler syntax to stdio stream FILE.
7243 CODE specified the format flag. The following format flags
56769981 7244 are recognized:
7245
7246 'C': print opcode suffix for branch condition.
7247 'D': print opcode suffix for inverse branch condition.
f1443d23 7248 'E': print opcode suffix for branch on index instruction.
cc87d0c5 7249 'G': print the size of the operand in bytes.
0d46035f 7250 'J': print tls_load/tls_gdcall/tls_ldcall suffix
7251 'M': print the second word of a TImode operand.
7252 'N': print the second word of a DImode operand.
76a4c804 7253 'O': print only the displacement of a memory reference or address.
7254 'R': print only the base register of a memory reference or address.
0574acbe 7255 'S': print S-type memory reference (base+displacement).
2be7449b 7256 'Y': print address style operand without index (e.g. shift count or setmem
7257 operand).
56769981 7258
45981c0a 7259 'b': print integer X as if it's an unsigned byte.
e68d6a13 7260 'c': print integer X as if it's an signed byte.
76a4c804 7261 'e': "end" contiguous bitmask X in either DImode or vector inner mode.
7262 'f': "end" contiguous bitmask X in SImode.
b9059d39 7263 'h': print integer X as if it's a signed halfword.
64a1078f 7264 'i': print the first nonzero HImode part of X.
b9059d39 7265 'j': print the first HImode part unequal to -1 of X.
7266 'k': print the first nonzero SImode part of X.
7267 'm': print the first SImode part unequal to -1 of X.
0d46035f 7268 'o': print integer X as if it's an unsigned 32bit word.
76a4c804 7269 's': "start" of contiguous bitmask X in either DImode or vector inner mode.
7270 't': CONST_INT: "start" of contiguous bitmask X in SImode.
7271 CONST_VECTOR: Generate a bitmask for vgbm instruction.
0d46035f 7272 'x': print integer X as if it's an unsigned halfword.
76a4c804 7273 'v': print register number as vector register (v1 instead of f1).
0d46035f 7274*/
4673c1a0 7275
7276void
b40da9a7 7277print_operand (FILE *file, rtx x, int code)
4673c1a0 7278{
0d46035f 7279 HOST_WIDE_INT ival;
7280
4673c1a0 7281 switch (code)
7282 {
7283 case 'C':
2eb8fe23 7284 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
4673c1a0 7285 return;
7286
7287 case 'D':
2eb8fe23 7288 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
4673c1a0 7289 return;
7290
f1443d23 7291 case 'E':
7292 if (GET_CODE (x) == LE)
7293 fprintf (file, "l");
7294 else if (GET_CODE (x) == GT)
7295 fprintf (file, "h");
7296 else
902602ef 7297 output_operand_lossage ("invalid comparison operator "
7298 "for 'E' output modifier");
f1443d23 7299 return;
7300
be00aaa8 7301 case 'J':
7302 if (GET_CODE (x) == SYMBOL_REF)
7303 {
7304 fprintf (file, "%s", ":tls_load:");
7305 output_addr_const (file, x);
7306 }
7307 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD)
7308 {
7309 fprintf (file, "%s", ":tls_gdcall:");
7310 output_addr_const (file, XVECEXP (x, 0, 0));
7311 }
7312 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM)
7313 {
7314 fprintf (file, "%s", ":tls_ldcall:");
3677652f 7315 const char *name = get_some_local_dynamic_name ();
7316 gcc_assert (name);
7317 assemble_name (file, name);
be00aaa8 7318 }
7319 else
902602ef 7320 output_operand_lossage ("invalid reference for 'J' output modifier");
be00aaa8 7321 return;
7322
cc87d0c5 7323 case 'G':
7324 fprintf (file, "%u", GET_MODE_SIZE (GET_MODE (x)));
7325 return;
7326
4673c1a0 7327 case 'O':
7328 {
7329 struct s390_address ad;
32eda510 7330 int ret;
4673c1a0 7331
76a4c804 7332 ret = s390_decompose_address (MEM_P (x) ? XEXP (x, 0) : x, &ad);
53b9033c 7333
7334 if (!ret
7335 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
7336 || ad.indx)
7337 {
902602ef 7338 output_operand_lossage ("invalid address for 'O' output modifier");
53b9033c 7339 return;
7340 }
4673c1a0 7341
7342 if (ad.disp)
74d2529d 7343 output_addr_const (file, ad.disp);
4673c1a0 7344 else
7345 fprintf (file, "0");
7346 }
7347 return;
7348
7349 case 'R':
7350 {
7351 struct s390_address ad;
32eda510 7352 int ret;
4673c1a0 7353
76a4c804 7354 ret = s390_decompose_address (MEM_P (x) ? XEXP (x, 0) : x, &ad);
53b9033c 7355
7356 if (!ret
7357 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
7358 || ad.indx)
7359 {
902602ef 7360 output_operand_lossage ("invalid address for 'R' output modifier");
53b9033c 7361 return;
7362 }
4673c1a0 7363
7364 if (ad.base)
7365 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
7366 else
7367 fprintf (file, "0");
7368 }
7369 return;
7370
0574acbe 7371 case 'S':
7372 {
7373 struct s390_address ad;
32eda510 7374 int ret;
0574acbe 7375
53b9033c 7376 if (!MEM_P (x))
7377 {
902602ef 7378 output_operand_lossage ("memory reference expected for "
7379 "'S' output modifier");
53b9033c 7380 return;
7381 }
32eda510 7382 ret = s390_decompose_address (XEXP (x, 0), &ad);
53b9033c 7383
7384 if (!ret
7385 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
7386 || ad.indx)
7387 {
902602ef 7388 output_operand_lossage ("invalid address for 'S' output modifier");
53b9033c 7389 return;
7390 }
0574acbe 7391
7392 if (ad.disp)
7393 output_addr_const (file, ad.disp);
7394 else
7395 fprintf (file, "0");
7396
7397 if (ad.base)
7398 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
7399 }
7400 return;
7401
4673c1a0 7402 case 'N':
7403 if (GET_CODE (x) == REG)
7404 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
7405 else if (GET_CODE (x) == MEM)
29c05e22 7406 x = change_address (x, VOIDmode,
7407 plus_constant (Pmode, XEXP (x, 0), 4));
4673c1a0 7408 else
902602ef 7409 output_operand_lossage ("register or memory expression expected "
7410 "for 'N' output modifier");
4673c1a0 7411 break;
7412
7413 case 'M':
7414 if (GET_CODE (x) == REG)
7415 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
7416 else if (GET_CODE (x) == MEM)
29c05e22 7417 x = change_address (x, VOIDmode,
7418 plus_constant (Pmode, XEXP (x, 0), 8));
4673c1a0 7419 else
902602ef 7420 output_operand_lossage ("register or memory expression expected "
7421 "for 'M' output modifier");
4673c1a0 7422 break;
63ebd742 7423
7424 case 'Y':
2be7449b 7425 print_addrstyle_operand (file, x);
63ebd742 7426 return;
4673c1a0 7427 }
7428
7429 switch (GET_CODE (x))
7430 {
7431 case REG:
76a4c804 7432 /* Print FP regs as fx instead of vx when they are accessed
7433 through non-vector mode. */
7434 if (code == 'v'
7435 || VECTOR_NOFP_REG_P (x)
7436 || (FP_REG_P (x) && VECTOR_MODE_P (GET_MODE (x)))
7437 || (VECTOR_REG_P (x)
7438 && (GET_MODE_SIZE (GET_MODE (x)) /
7439 s390_class_max_nregs (FP_REGS, GET_MODE (x))) > 8))
7440 fprintf (file, "%%v%s", reg_names[REGNO (x)] + 2);
7441 else
7442 fprintf (file, "%s", reg_names[REGNO (x)]);
4673c1a0 7443 break;
7444
7445 case MEM:
3c047fe9 7446 output_address (GET_MODE (x), XEXP (x, 0));
4673c1a0 7447 break;
7448
7449 case CONST:
7450 case CODE_LABEL:
7451 case LABEL_REF:
7452 case SYMBOL_REF:
74d2529d 7453 output_addr_const (file, x);
4673c1a0 7454 break;
7455
7456 case CONST_INT:
0d46035f 7457 ival = INTVAL (x);
7458 switch (code)
7459 {
7460 case 0:
7461 break;
7462 case 'b':
7463 ival &= 0xff;
7464 break;
7465 case 'c':
7466 ival = ((ival & 0xff) ^ 0x80) - 0x80;
7467 break;
7468 case 'x':
7469 ival &= 0xffff;
7470 break;
7471 case 'h':
7472 ival = ((ival & 0xffff) ^ 0x8000) - 0x8000;
7473 break;
7474 case 'i':
7475 ival = s390_extract_part (x, HImode, 0);
7476 break;
7477 case 'j':
7478 ival = s390_extract_part (x, HImode, -1);
7479 break;
7480 case 'k':
7481 ival = s390_extract_part (x, SImode, 0);
7482 break;
7483 case 'm':
7484 ival = s390_extract_part (x, SImode, -1);
7485 break;
7486 case 'o':
7487 ival &= 0xffffffff;
7488 break;
7489 case 'e': case 'f':
7490 case 's': case 't':
7491 {
e64f5133 7492 int start, end;
7493 int len;
0d46035f 7494 bool ok;
7495
7496 len = (code == 's' || code == 'e' ? 64 : 32);
e64f5133 7497 ok = s390_contiguous_bitmask_p (ival, true, len, &start, &end);
0d46035f 7498 gcc_assert (ok);
7499 if (code == 's' || code == 't')
e64f5133 7500 ival = start;
0d46035f 7501 else
e64f5133 7502 ival = end;
0d46035f 7503 }
7504 break;
7505 default:
7506 output_operand_lossage ("invalid constant for output modifier '%c'", code);
7507 }
7508 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival);
8b4a4127 7509 break;
7510
ba0e61d6 7511 case CONST_WIDE_INT:
8b4a4127 7512 if (code == 'b')
ba0e61d6 7513 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7514 CONST_WIDE_INT_ELT (x, 0) & 0xff);
4673c1a0 7515 else if (code == 'x')
ba0e61d6 7516 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7517 CONST_WIDE_INT_ELT (x, 0) & 0xffff);
4673c1a0 7518 else if (code == 'h')
902602ef 7519 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
ba0e61d6 7520 ((CONST_WIDE_INT_ELT (x, 0) & 0xffff) ^ 0x8000) - 0x8000);
4673c1a0 7521 else
53b9033c 7522 {
7523 if (code == 0)
902602ef 7524 output_operand_lossage ("invalid constant - try using "
7525 "an output modifier");
53b9033c 7526 else
902602ef 7527 output_operand_lossage ("invalid constant for output modifier '%c'",
7528 code);
53b9033c 7529 }
4673c1a0 7530 break;
76a4c804 7531 case CONST_VECTOR:
7532 switch (code)
7533 {
80fc7f56 7534 case 'h':
7535 gcc_assert (const_vec_duplicate_p (x));
7536 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7537 ((INTVAL (XVECEXP (x, 0, 0)) & 0xffff) ^ 0x8000) - 0x8000);
7538 break;
76a4c804 7539 case 'e':
7540 case 's':
7541 {
e64f5133 7542 int start, end;
76a4c804 7543 bool ok;
7544
e64f5133 7545 ok = s390_contiguous_bitmask_vector_p (x, &start, &end);
76a4c804 7546 gcc_assert (ok);
e64f5133 7547 ival = (code == 's') ? start : end;
76a4c804 7548 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival);
7549 }
7550 break;
7551 case 't':
7552 {
7553 unsigned mask;
7554 bool ok = s390_bytemask_vector_p (x, &mask);
7555 gcc_assert (ok);
7556 fprintf (file, "%u", mask);
7557 }
7558 break;
7559
7560 default:
7561 output_operand_lossage ("invalid constant vector for output "
7562 "modifier '%c'", code);
7563 }
7564 break;
4673c1a0 7565
7566 default:
53b9033c 7567 if (code == 0)
902602ef 7568 output_operand_lossage ("invalid expression - try using "
7569 "an output modifier");
53b9033c 7570 else
902602ef 7571 output_operand_lossage ("invalid expression for output "
7572 "modifier '%c'", code);
4673c1a0 7573 break;
7574 }
7575}
7576
58356836 7577/* Target hook for assembling integer objects. We need to define it
7578 here to work a round a bug in some versions of GAS, which couldn't
7579 handle values smaller than INT_MIN when printed in decimal. */
7580
7581static bool
b40da9a7 7582s390_assemble_integer (rtx x, unsigned int size, int aligned_p)
58356836 7583{
7584 if (size == 8 && aligned_p
7585 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
7586 {
4840a03a 7587 fprintf (asm_out_file, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX "\n",
7588 INTVAL (x));
58356836 7589 return true;
7590 }
7591 return default_assemble_integer (x, size, aligned_p);
7592}
7593
f81e845f 7594/* Returns true if register REGNO is used for forming
56769981 7595 a memory address in expression X. */
4673c1a0 7596
e5537457 7597static bool
b40da9a7 7598reg_used_in_mem_p (int regno, rtx x)
4673c1a0 7599{
7600 enum rtx_code code = GET_CODE (x);
7601 int i, j;
7602 const char *fmt;
f81e845f 7603
4673c1a0 7604 if (code == MEM)
7605 {
2ec77a7c 7606 if (refers_to_regno_p (regno, XEXP (x, 0)))
e5537457 7607 return true;
4673c1a0 7608 }
f81e845f 7609 else if (code == SET
8b4a4127 7610 && GET_CODE (SET_DEST (x)) == PC)
7611 {
2ec77a7c 7612 if (refers_to_regno_p (regno, SET_SRC (x)))
e5537457 7613 return true;
8b4a4127 7614 }
4673c1a0 7615
7616 fmt = GET_RTX_FORMAT (code);
7617 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7618 {
7619 if (fmt[i] == 'e'
7620 && reg_used_in_mem_p (regno, XEXP (x, i)))
e5537457 7621 return true;
f81e845f 7622
4673c1a0 7623 else if (fmt[i] == 'E')
7624 for (j = 0; j < XVECLEN (x, i); j++)
7625 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
e5537457 7626 return true;
4673c1a0 7627 }
e5537457 7628 return false;
4673c1a0 7629}
7630
0c034860 7631/* Returns true if expression DEP_RTX sets an address register
56769981 7632 used by instruction INSN to address memory. */
4673c1a0 7633
e5537457 7634static bool
ed3e6e5d 7635addr_generation_dependency_p (rtx dep_rtx, rtx_insn *insn)
4673c1a0 7636{
8b4a4127 7637 rtx target, pat;
4673c1a0 7638
aa90bb35 7639 if (NONJUMP_INSN_P (dep_rtx))
77985f1a 7640 dep_rtx = PATTERN (dep_rtx);
71343e6b 7641
4673c1a0 7642 if (GET_CODE (dep_rtx) == SET)
7643 {
7644 target = SET_DEST (dep_rtx);
147b6a2d 7645 if (GET_CODE (target) == STRICT_LOW_PART)
7646 target = XEXP (target, 0);
7647 while (GET_CODE (target) == SUBREG)
7648 target = SUBREG_REG (target);
7649
4673c1a0 7650 if (GET_CODE (target) == REG)
7651 {
7652 int regno = REGNO (target);
7653
71343e6b 7654 if (s390_safe_attr_type (insn) == TYPE_LA)
8b4a4127 7655 {
7656 pat = PATTERN (insn);
7657 if (GET_CODE (pat) == PARALLEL)
7658 {
32eda510 7659 gcc_assert (XVECLEN (pat, 0) == 2);
8b4a4127 7660 pat = XVECEXP (pat, 0, 0);
7661 }
32eda510 7662 gcc_assert (GET_CODE (pat) == SET);
2ec77a7c 7663 return refers_to_regno_p (regno, SET_SRC (pat));
8b4a4127 7664 }
71343e6b 7665 else if (get_attr_atype (insn) == ATYPE_AGEN)
8b4a4127 7666 return reg_used_in_mem_p (regno, PATTERN (insn));
7667 }
4673c1a0 7668 }
e5537457 7669 return false;
4673c1a0 7670}
7671
71343e6b 7672/* Return 1, if dep_insn sets register used in insn in the agen unit. */
7673
f81e845f 7674int
ed3e6e5d 7675s390_agen_dep_p (rtx_insn *dep_insn, rtx_insn *insn)
f81e845f 7676{
71343e6b 7677 rtx dep_rtx = PATTERN (dep_insn);
7678 int i;
f81e845f 7679
7680 if (GET_CODE (dep_rtx) == SET
71343e6b 7681 && addr_generation_dependency_p (dep_rtx, insn))
7682 return 1;
7683 else if (GET_CODE (dep_rtx) == PARALLEL)
7684 {
7685 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
7686 {
7687 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
7688 return 1;
7689 }
7690 }
7691 return 0;
7692}
7693
510c2327 7694
e51ae8ff 7695/* A C statement (sans semicolon) to update the integer scheduling priority
7696 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
7697 reduce the priority to execute INSN later. Do not define this macro if
f81e845f 7698 you do not need to adjust the scheduling priorities of insns.
e51ae8ff 7699
f81e845f 7700 A STD instruction should be scheduled earlier,
e51ae8ff 7701 in order to use the bypass. */
e51ae8ff 7702static int
18282db0 7703s390_adjust_priority (rtx_insn *insn, int priority)
e51ae8ff 7704{
7705 if (! INSN_P (insn))
7706 return priority;
7707
9aae2901 7708 if (s390_tune <= PROCESSOR_2064_Z900)
e51ae8ff 7709 return priority;
7710
7711 switch (s390_safe_attr_type (insn))
7712 {
11f88fec 7713 case TYPE_FSTOREDF:
7714 case TYPE_FSTORESF:
e51ae8ff 7715 priority = priority << 3;
7716 break;
7717 case TYPE_STORE:
76dbb8df 7718 case TYPE_STM:
e51ae8ff 7719 priority = priority << 1;
7720 break;
7721 default:
7722 break;
7723 }
7724 return priority;
7725}
369293ed 7726
b0eacf26 7727
71343e6b 7728/* The number of instructions that can be issued per cycle. */
369293ed 7729
71343e6b 7730static int
b40da9a7 7731s390_issue_rate (void)
71343e6b 7732{
a850370e 7733 switch (s390_tune)
7734 {
7735 case PROCESSOR_2084_Z990:
7736 case PROCESSOR_2094_Z9_109:
9aae2901 7737 case PROCESSOR_2094_Z9_EC:
33d033da 7738 case PROCESSOR_2817_Z196:
a850370e 7739 return 3;
7740 case PROCESSOR_2097_Z10:
7741 return 2;
117d67d0 7742 case PROCESSOR_9672_G5:
7743 case PROCESSOR_9672_G6:
7744 case PROCESSOR_2064_Z900:
5ed1f72b 7745 /* Starting with EC12 we use the sched_reorder hook to take care
7746 of instruction dispatch constraints. The algorithm only
7747 picks the best instruction and assumes only a single
7748 instruction gets issued per cycle. */
7749 case PROCESSOR_2827_ZEC12:
117d67d0 7750 case PROCESSOR_2964_Z13:
a850370e 7751 default:
7752 return 1;
7753 }
71343e6b 7754}
369293ed 7755
e51ae8ff 7756static int
b40da9a7 7757s390_first_cycle_multipass_dfa_lookahead (void)
e51ae8ff 7758{
a65ea517 7759 return 4;
e51ae8ff 7760}
7761
20074f87 7762/* Annotate every literal pool reference in X by an UNSPEC_LTREF expression.
7763 Fix up MEMs as required. */
7764
7765static void
7766annotate_constant_pool_refs (rtx *x)
7767{
7768 int i, j;
7769 const char *fmt;
7770
32eda510 7771 gcc_assert (GET_CODE (*x) != SYMBOL_REF
7772 || !CONSTANT_POOL_ADDRESS_P (*x));
20074f87 7773
7774 /* Literal pool references can only occur inside a MEM ... */
7775 if (GET_CODE (*x) == MEM)
7776 {
7777 rtx memref = XEXP (*x, 0);
7778
7779 if (GET_CODE (memref) == SYMBOL_REF
7780 && CONSTANT_POOL_ADDRESS_P (memref))
7781 {
7782 rtx base = cfun->machine->base_reg;
7783 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, memref, base),
7784 UNSPEC_LTREF);
7785
7786 *x = replace_equiv_address (*x, addr);
7787 return;
7788 }
7789
7790 if (GET_CODE (memref) == CONST
7791 && GET_CODE (XEXP (memref, 0)) == PLUS
7792 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
7793 && GET_CODE (XEXP (XEXP (memref, 0), 0)) == SYMBOL_REF
7794 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (memref, 0), 0)))
7795 {
7796 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
7797 rtx sym = XEXP (XEXP (memref, 0), 0);
7798 rtx base = cfun->machine->base_reg;
7799 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
7800 UNSPEC_LTREF);
7801
29c05e22 7802 *x = replace_equiv_address (*x, plus_constant (Pmode, addr, off));
20074f87 7803 return;
7804 }
7805 }
7806
7807 /* ... or a load-address type pattern. */
7808 if (GET_CODE (*x) == SET)
7809 {
7810 rtx addrref = SET_SRC (*x);
7811
7812 if (GET_CODE (addrref) == SYMBOL_REF
7813 && CONSTANT_POOL_ADDRESS_P (addrref))
7814 {
7815 rtx base = cfun->machine->base_reg;
7816 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, addrref, base),
7817 UNSPEC_LTREF);
7818
7819 SET_SRC (*x) = addr;
7820 return;
7821 }
7822
7823 if (GET_CODE (addrref) == CONST
7824 && GET_CODE (XEXP (addrref, 0)) == PLUS
7825 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
7826 && GET_CODE (XEXP (XEXP (addrref, 0), 0)) == SYMBOL_REF
7827 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (addrref, 0), 0)))
7828 {
7829 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
7830 rtx sym = XEXP (XEXP (addrref, 0), 0);
7831 rtx base = cfun->machine->base_reg;
7832 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
7833 UNSPEC_LTREF);
7834
29c05e22 7835 SET_SRC (*x) = plus_constant (Pmode, addr, off);
20074f87 7836 return;
7837 }
7838 }
7839
7840 /* Annotate LTREL_BASE as well. */
7841 if (GET_CODE (*x) == UNSPEC
7842 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
7843 {
7844 rtx base = cfun->machine->base_reg;
7845 *x = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XVECEXP (*x, 0, 0), base),
7846 UNSPEC_LTREL_BASE);
7847 return;
7848 }
7849
7850 fmt = GET_RTX_FORMAT (GET_CODE (*x));
7851 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
7852 {
7853 if (fmt[i] == 'e')
7854 {
7855 annotate_constant_pool_refs (&XEXP (*x, i));
7856 }
7857 else if (fmt[i] == 'E')
7858 {
7859 for (j = 0; j < XVECLEN (*x, i); j++)
7860 annotate_constant_pool_refs (&XVECEXP (*x, i, j));
7861 }
7862 }
7863}
7864
875862bf 7865/* Split all branches that exceed the maximum distance.
7866 Returns true if this created a new literal pool entry. */
7867
7868static int
7869s390_split_branches (void)
7870{
7871 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
32eda510 7872 int new_literal = 0, ret;
93e0956b 7873 rtx_insn *insn;
ed7591be 7874 rtx pat, target;
875862bf 7875 rtx *label;
7876
7877 /* We need correct insn addresses. */
7878
7879 shorten_branches (get_insns ());
7880
7881 /* Find all branches that exceed 64KB, and split them. */
7882
7883 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
7884 {
245402e7 7885 if (! JUMP_P (insn) || tablejump_p (insn, NULL, NULL))
875862bf 7886 continue;
7887
7888 pat = PATTERN (insn);
245402e7 7889 if (GET_CODE (pat) == PARALLEL)
875862bf 7890 pat = XVECEXP (pat, 0, 0);
7891 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
7892 continue;
7893
7894 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
7895 {
7896 label = &SET_SRC (pat);
7897 }
7898 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
7899 {
7900 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
7901 label = &XEXP (SET_SRC (pat), 1);
7902 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
7903 label = &XEXP (SET_SRC (pat), 2);
7904 else
7905 continue;
7906 }
7907 else
7908 continue;
7909
7910 if (get_attr_length (insn) <= 4)
7911 continue;
7912
77beec48 7913 /* We are going to use the return register as scratch register,
7914 make sure it will be saved/restored by the prologue/epilogue. */
7915 cfun_frame_layout.save_return_addr_p = 1;
7916
875862bf 7917 if (!flag_pic)
7918 {
7919 new_literal = 1;
ed7591be 7920 rtx mem = force_const_mem (Pmode, *label);
d1f9b275 7921 rtx_insn *set_insn = emit_insn_before (gen_rtx_SET (temp_reg, mem),
7922 insn);
ed7591be 7923 INSN_ADDRESSES_NEW (set_insn, -1);
7924 annotate_constant_pool_refs (&PATTERN (set_insn));
875862bf 7925
7926 target = temp_reg;
7927 }
7928 else
7929 {
7930 new_literal = 1;
7931 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, *label),
7932 UNSPEC_LTREL_OFFSET);
7933 target = gen_rtx_CONST (Pmode, target);
7934 target = force_const_mem (Pmode, target);
d1f9b275 7935 rtx_insn *set_insn = emit_insn_before (gen_rtx_SET (temp_reg, target),
7936 insn);
ed7591be 7937 INSN_ADDRESSES_NEW (set_insn, -1);
7938 annotate_constant_pool_refs (&PATTERN (set_insn));
875862bf 7939
7940 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XEXP (target, 0),
7941 cfun->machine->base_reg),
7942 UNSPEC_LTREL_BASE);
7943 target = gen_rtx_PLUS (Pmode, temp_reg, target);
7944 }
7945
32eda510 7946 ret = validate_change (insn, label, target, 0);
7947 gcc_assert (ret);
875862bf 7948 }
7949
7950 return new_literal;
7951}
7952
0756cebb 7953
ffead1ca 7954/* Find an annotated literal pool symbol referenced in RTX X,
7955 and store it at REF. Will abort if X contains references to
20074f87 7956 more than one such pool symbol; multiple references to the same
7957 symbol are allowed, however.
0756cebb 7958
f81e845f 7959 The rtx pointed to by REF must be initialized to NULL_RTX
0756cebb 7960 by the caller before calling this routine. */
7961
7962static void
b40da9a7 7963find_constant_pool_ref (rtx x, rtx *ref)
0756cebb 7964{
7965 int i, j;
7966 const char *fmt;
7967
12ef3745 7968 /* Ignore LTREL_BASE references. */
7969 if (GET_CODE (x) == UNSPEC
7970 && XINT (x, 1) == UNSPEC_LTREL_BASE)
7971 return;
c2c1332a 7972 /* Likewise POOL_ENTRY insns. */
7973 if (GET_CODE (x) == UNSPEC_VOLATILE
7974 && XINT (x, 1) == UNSPECV_POOL_ENTRY)
7975 return;
12ef3745 7976
32eda510 7977 gcc_assert (GET_CODE (x) != SYMBOL_REF
7978 || !CONSTANT_POOL_ADDRESS_P (x));
20074f87 7979
7980 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_LTREF)
0756cebb 7981 {
20074f87 7982 rtx sym = XVECEXP (x, 0, 0);
32eda510 7983 gcc_assert (GET_CODE (sym) == SYMBOL_REF
7984 && CONSTANT_POOL_ADDRESS_P (sym));
20074f87 7985
0756cebb 7986 if (*ref == NULL_RTX)
20074f87 7987 *ref = sym;
ffead1ca 7988 else
32eda510 7989 gcc_assert (*ref == sym);
20074f87 7990
7991 return;
0756cebb 7992 }
7993
7994 fmt = GET_RTX_FORMAT (GET_CODE (x));
7995 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
7996 {
7997 if (fmt[i] == 'e')
7998 {
7999 find_constant_pool_ref (XEXP (x, i), ref);
8000 }
8001 else if (fmt[i] == 'E')
8002 {
8003 for (j = 0; j < XVECLEN (x, i); j++)
8004 find_constant_pool_ref (XVECEXP (x, i, j), ref);
8005 }
8006 }
8007}
8008
ffead1ca 8009/* Replace every reference to the annotated literal pool
20074f87 8010 symbol REF in X by its base plus OFFSET. */
0756cebb 8011
8012static void
20074f87 8013replace_constant_pool_ref (rtx *x, rtx ref, rtx offset)
0756cebb 8014{
8015 int i, j;
8016 const char *fmt;
8017
32eda510 8018 gcc_assert (*x != ref);
0756cebb 8019
20074f87 8020 if (GET_CODE (*x) == UNSPEC
8021 && XINT (*x, 1) == UNSPEC_LTREF
8022 && XVECEXP (*x, 0, 0) == ref)
0756cebb 8023 {
20074f87 8024 *x = gen_rtx_PLUS (Pmode, XVECEXP (*x, 0, 1), offset);
8025 return;
0756cebb 8026 }
8027
20074f87 8028 if (GET_CODE (*x) == PLUS
8029 && GET_CODE (XEXP (*x, 1)) == CONST_INT
8030 && GET_CODE (XEXP (*x, 0)) == UNSPEC
8031 && XINT (XEXP (*x, 0), 1) == UNSPEC_LTREF
8032 && XVECEXP (XEXP (*x, 0), 0, 0) == ref)
0756cebb 8033 {
20074f87 8034 rtx addr = gen_rtx_PLUS (Pmode, XVECEXP (XEXP (*x, 0), 0, 1), offset);
29c05e22 8035 *x = plus_constant (Pmode, addr, INTVAL (XEXP (*x, 1)));
20074f87 8036 return;
0756cebb 8037 }
8038
8039 fmt = GET_RTX_FORMAT (GET_CODE (*x));
8040 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
8041 {
8042 if (fmt[i] == 'e')
8043 {
20074f87 8044 replace_constant_pool_ref (&XEXP (*x, i), ref, offset);
0756cebb 8045 }
8046 else if (fmt[i] == 'E')
8047 {
8048 for (j = 0; j < XVECLEN (*x, i); j++)
20074f87 8049 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, offset);
0756cebb 8050 }
8051 }
8052}
8053
f81e845f 8054/* Check whether X contains an UNSPEC_LTREL_BASE.
12ef3745 8055 Return its constant pool symbol if found, NULL_RTX otherwise. */
96be3ab6 8056
12ef3745 8057static rtx
b40da9a7 8058find_ltrel_base (rtx x)
96be3ab6 8059{
96be3ab6 8060 int i, j;
8061 const char *fmt;
8062
12ef3745 8063 if (GET_CODE (x) == UNSPEC
8064 && XINT (x, 1) == UNSPEC_LTREL_BASE)
8065 return XVECEXP (x, 0, 0);
96be3ab6 8066
8067 fmt = GET_RTX_FORMAT (GET_CODE (x));
8068 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
8069 {
8070 if (fmt[i] == 'e')
8071 {
12ef3745 8072 rtx fnd = find_ltrel_base (XEXP (x, i));
8073 if (fnd)
8074 return fnd;
96be3ab6 8075 }
8076 else if (fmt[i] == 'E')
8077 {
8078 for (j = 0; j < XVECLEN (x, i); j++)
12ef3745 8079 {
8080 rtx fnd = find_ltrel_base (XVECEXP (x, i, j));
8081 if (fnd)
8082 return fnd;
8083 }
96be3ab6 8084 }
8085 }
8086
12ef3745 8087 return NULL_RTX;
96be3ab6 8088}
8089
20074f87 8090/* Replace any occurrence of UNSPEC_LTREL_BASE in X with its base. */
96be3ab6 8091
8092static void
20074f87 8093replace_ltrel_base (rtx *x)
96be3ab6 8094{
12ef3745 8095 int i, j;
96be3ab6 8096 const char *fmt;
8097
12ef3745 8098 if (GET_CODE (*x) == UNSPEC
8099 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
96be3ab6 8100 {
20074f87 8101 *x = XVECEXP (*x, 0, 1);
12ef3745 8102 return;
96be3ab6 8103 }
8104
8105 fmt = GET_RTX_FORMAT (GET_CODE (*x));
8106 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
8107 {
8108 if (fmt[i] == 'e')
8109 {
20074f87 8110 replace_ltrel_base (&XEXP (*x, i));
96be3ab6 8111 }
8112 else if (fmt[i] == 'E')
8113 {
8114 for (j = 0; j < XVECLEN (*x, i); j++)
20074f87 8115 replace_ltrel_base (&XVECEXP (*x, i, j));
96be3ab6 8116 }
8117 }
8118}
8119
8120
12ef3745 8121/* We keep a list of constants which we have to add to internal
0756cebb 8122 constant tables in the middle of large functions. */
8123
02b901ef 8124#define NR_C_MODES 32
3754d046 8125machine_mode constant_modes[NR_C_MODES] =
0756cebb 8126{
36868490 8127 TFmode, TImode, TDmode,
02b901ef 8128 V16QImode, V8HImode, V4SImode, V2DImode, V1TImode,
8129 V4SFmode, V2DFmode, V1TFmode,
36868490 8130 DFmode, DImode, DDmode,
76a4c804 8131 V8QImode, V4HImode, V2SImode, V1DImode, V2SFmode, V1DFmode,
36868490 8132 SFmode, SImode, SDmode,
76a4c804 8133 V4QImode, V2HImode, V1SImode, V1SFmode,
0756cebb 8134 HImode,
76a4c804 8135 V2QImode, V1HImode,
8136 QImode,
8137 V1QImode
0756cebb 8138};
8139
0756cebb 8140struct constant
8141{
8142 struct constant *next;
8143 rtx value;
93e0956b 8144 rtx_code_label *label;
0756cebb 8145};
8146
8147struct constant_pool
8148{
8149 struct constant_pool *next;
93e0956b 8150 rtx_insn *first_insn;
8151 rtx_insn *pool_insn;
96be3ab6 8152 bitmap insns;
93e0956b 8153 rtx_insn *emit_pool_after;
0756cebb 8154
8155 struct constant *constants[NR_C_MODES];
d345b493 8156 struct constant *execute;
93e0956b 8157 rtx_code_label *label;
0756cebb 8158 int size;
8159};
8160
875862bf 8161/* Allocate new constant_pool structure. */
8162
8163static struct constant_pool *
8164s390_alloc_pool (void)
8165{
8166 struct constant_pool *pool;
8167 int i;
8168
8169 pool = (struct constant_pool *) xmalloc (sizeof *pool);
8170 pool->next = NULL;
8171 for (i = 0; i < NR_C_MODES; i++)
8172 pool->constants[i] = NULL;
8173
8174 pool->execute = NULL;
8175 pool->label = gen_label_rtx ();
93e0956b 8176 pool->first_insn = NULL;
8177 pool->pool_insn = NULL;
875862bf 8178 pool->insns = BITMAP_ALLOC (NULL);
8179 pool->size = 0;
93e0956b 8180 pool->emit_pool_after = NULL;
875862bf 8181
8182 return pool;
8183}
0756cebb 8184
8185/* Create new constant pool covering instructions starting at INSN
8186 and chain it to the end of POOL_LIST. */
8187
8188static struct constant_pool *
93e0956b 8189s390_start_pool (struct constant_pool **pool_list, rtx_insn *insn)
0756cebb 8190{
8191 struct constant_pool *pool, **prev;
0756cebb 8192
c2c1332a 8193 pool = s390_alloc_pool ();
0756cebb 8194 pool->first_insn = insn;
96be3ab6 8195
0756cebb 8196 for (prev = pool_list; *prev; prev = &(*prev)->next)
8197 ;
8198 *prev = pool;
8199
8200 return pool;
8201}
8202
96be3ab6 8203/* End range of instructions covered by POOL at INSN and emit
8204 placeholder insn representing the pool. */
0756cebb 8205
8206static void
93e0956b 8207s390_end_pool (struct constant_pool *pool, rtx_insn *insn)
0756cebb 8208{
96be3ab6 8209 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
8210
8211 if (!insn)
8212 insn = get_last_insn ();
8213
8214 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
8215 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
8216}
8217
8218/* Add INSN to the list of insns covered by POOL. */
8219
8220static void
b40da9a7 8221s390_add_pool_insn (struct constant_pool *pool, rtx insn)
96be3ab6 8222{
8223 bitmap_set_bit (pool->insns, INSN_UID (insn));
0756cebb 8224}
8225
8226/* Return pool out of POOL_LIST that covers INSN. */
8227
8228static struct constant_pool *
b40da9a7 8229s390_find_pool (struct constant_pool *pool_list, rtx insn)
0756cebb 8230{
0756cebb 8231 struct constant_pool *pool;
8232
0756cebb 8233 for (pool = pool_list; pool; pool = pool->next)
96be3ab6 8234 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
0756cebb 8235 break;
8236
8237 return pool;
8238}
8239
96be3ab6 8240/* Add constant VAL of mode MODE to the constant pool POOL. */
0756cebb 8241
96be3ab6 8242static void
3754d046 8243s390_add_constant (struct constant_pool *pool, rtx val, machine_mode mode)
0756cebb 8244{
8245 struct constant *c;
0756cebb 8246 int i;
8247
8248 for (i = 0; i < NR_C_MODES; i++)
8249 if (constant_modes[i] == mode)
8250 break;
32eda510 8251 gcc_assert (i != NR_C_MODES);
0756cebb 8252
8253 for (c = pool->constants[i]; c != NULL; c = c->next)
8254 if (rtx_equal_p (val, c->value))
8255 break;
8256
8257 if (c == NULL)
8258 {
8259 c = (struct constant *) xmalloc (sizeof *c);
8260 c->value = val;
8261 c->label = gen_label_rtx ();
8262 c->next = pool->constants[i];
8263 pool->constants[i] = c;
8264 pool->size += GET_MODE_SIZE (mode);
8265 }
96be3ab6 8266}
0756cebb 8267
1ed7a160 8268/* Return an rtx that represents the offset of X from the start of
8269 pool POOL. */
8270
8271static rtx
8272s390_pool_offset (struct constant_pool *pool, rtx x)
8273{
8274 rtx label;
8275
8276 label = gen_rtx_LABEL_REF (GET_MODE (x), pool->label);
8277 x = gen_rtx_UNSPEC (GET_MODE (x), gen_rtvec (2, x, label),
8278 UNSPEC_POOL_OFFSET);
8279 return gen_rtx_CONST (GET_MODE (x), x);
8280}
8281
96be3ab6 8282/* Find constant VAL of mode MODE in the constant pool POOL.
8283 Return an RTX describing the distance from the start of
8284 the pool to the location of the new constant. */
f81e845f 8285
96be3ab6 8286static rtx
b40da9a7 8287s390_find_constant (struct constant_pool *pool, rtx val,
3754d046 8288 machine_mode mode)
96be3ab6 8289{
8290 struct constant *c;
96be3ab6 8291 int i;
f81e845f 8292
96be3ab6 8293 for (i = 0; i < NR_C_MODES; i++)
8294 if (constant_modes[i] == mode)
8295 break;
32eda510 8296 gcc_assert (i != NR_C_MODES);
f81e845f 8297
96be3ab6 8298 for (c = pool->constants[i]; c != NULL; c = c->next)
8299 if (rtx_equal_p (val, c->value))
8300 break;
f81e845f 8301
32eda510 8302 gcc_assert (c);
f81e845f 8303
1ed7a160 8304 return s390_pool_offset (pool, gen_rtx_LABEL_REF (Pmode, c->label));
0756cebb 8305}
8306
875862bf 8307/* Check whether INSN is an execute. Return the label_ref to its
8308 execute target template if so, NULL_RTX otherwise. */
8309
8310static rtx
8311s390_execute_label (rtx insn)
8312{
aa90bb35 8313 if (NONJUMP_INSN_P (insn)
875862bf 8314 && GET_CODE (PATTERN (insn)) == PARALLEL
8315 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == UNSPEC
8316 && XINT (XVECEXP (PATTERN (insn), 0, 0), 1) == UNSPEC_EXECUTE)
8317 return XVECEXP (XVECEXP (PATTERN (insn), 0, 0), 0, 2);
8318
8319 return NULL_RTX;
8320}
8321
d345b493 8322/* Add execute target for INSN to the constant pool POOL. */
8323
8324static void
8325s390_add_execute (struct constant_pool *pool, rtx insn)
8326{
8327 struct constant *c;
8328
8329 for (c = pool->execute; c != NULL; c = c->next)
8330 if (INSN_UID (insn) == INSN_UID (c->value))
8331 break;
8332
8333 if (c == NULL)
8334 {
d345b493 8335 c = (struct constant *) xmalloc (sizeof *c);
8336 c->value = insn;
babfdedf 8337 c->label = gen_label_rtx ();
d345b493 8338 c->next = pool->execute;
8339 pool->execute = c;
babfdedf 8340 pool->size += 6;
d345b493 8341 }
8342}
8343
8344/* Find execute target for INSN in the constant pool POOL.
8345 Return an RTX describing the distance from the start of
8346 the pool to the location of the execute target. */
8347
8348static rtx
8349s390_find_execute (struct constant_pool *pool, rtx insn)
8350{
8351 struct constant *c;
d345b493 8352
8353 for (c = pool->execute; c != NULL; c = c->next)
8354 if (INSN_UID (insn) == INSN_UID (c->value))
8355 break;
8356
32eda510 8357 gcc_assert (c);
d345b493 8358
1ed7a160 8359 return s390_pool_offset (pool, gen_rtx_LABEL_REF (Pmode, c->label));
d345b493 8360}
8361
875862bf 8362/* For an execute INSN, extract the execute target template. */
d345b493 8363
8364static rtx
875862bf 8365s390_execute_target (rtx insn)
d345b493 8366{
875862bf 8367 rtx pattern = PATTERN (insn);
8368 gcc_assert (s390_execute_label (insn));
d345b493 8369
8370 if (XVECLEN (pattern, 0) == 2)
8371 {
8372 pattern = copy_rtx (XVECEXP (pattern, 0, 1));
8373 }
8374 else
8375 {
8376 rtvec vec = rtvec_alloc (XVECLEN (pattern, 0) - 1);
8377 int i;
8378
8379 for (i = 0; i < XVECLEN (pattern, 0) - 1; i++)
8380 RTVEC_ELT (vec, i) = copy_rtx (XVECEXP (pattern, 0, i + 1));
8381
8382 pattern = gen_rtx_PARALLEL (VOIDmode, vec);
8383 }
8384
8385 return pattern;
8386}
8387
8388/* Indicate that INSN cannot be duplicated. This is the case for
8389 execute insns that carry a unique label. */
8390
8391static bool
18282db0 8392s390_cannot_copy_insn_p (rtx_insn *insn)
d345b493 8393{
8394 rtx label = s390_execute_label (insn);
8395 return label && label != const0_rtx;
8396}
8397
c2c1332a 8398/* Dump out the constants in POOL. If REMOTE_LABEL is true,
8399 do not emit the pool base label. */
0756cebb 8400
d345b493 8401static void
c2c1332a 8402s390_dump_pool (struct constant_pool *pool, bool remote_label)
0756cebb 8403{
8404 struct constant *c;
93e0956b 8405 rtx_insn *insn = pool->pool_insn;
0756cebb 8406 int i;
8407
d345b493 8408 /* Switch to rodata section. */
8409 if (TARGET_CPU_ZARCH)
8410 {
8411 insn = emit_insn_after (gen_pool_section_start (), insn);
8412 INSN_ADDRESSES_NEW (insn, -1);
8413 }
8414
8415 /* Ensure minimum pool alignment. */
dafc8d45 8416 if (TARGET_CPU_ZARCH)
d345b493 8417 insn = emit_insn_after (gen_pool_align (GEN_INT (8)), insn);
0756cebb 8418 else
d345b493 8419 insn = emit_insn_after (gen_pool_align (GEN_INT (4)), insn);
0756cebb 8420 INSN_ADDRESSES_NEW (insn, -1);
8421
d345b493 8422 /* Emit pool base label. */
c2c1332a 8423 if (!remote_label)
8424 {
8425 insn = emit_label_after (pool->label, insn);
8426 INSN_ADDRESSES_NEW (insn, -1);
8427 }
0756cebb 8428
8429 /* Dump constants in descending alignment requirement order,
8430 ensuring proper alignment for every constant. */
8431 for (i = 0; i < NR_C_MODES; i++)
8432 for (c = pool->constants[i]; c; c = c->next)
8433 {
12ef3745 8434 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
b2ed6df1 8435 rtx value = copy_rtx (c->value);
96be3ab6 8436 if (GET_CODE (value) == CONST
8437 && GET_CODE (XEXP (value, 0)) == UNSPEC
12ef3745 8438 && XINT (XEXP (value, 0), 1) == UNSPEC_LTREL_OFFSET
96be3ab6 8439 && XVECLEN (XEXP (value, 0), 0) == 1)
1ed7a160 8440 value = s390_pool_offset (pool, XVECEXP (XEXP (value, 0), 0, 0));
96be3ab6 8441
0756cebb 8442 insn = emit_label_after (c->label, insn);
8443 INSN_ADDRESSES_NEW (insn, -1);
df82fb76 8444
f588eb9f 8445 value = gen_rtx_UNSPEC_VOLATILE (constant_modes[i],
df82fb76 8446 gen_rtvec (1, value),
8447 UNSPECV_POOL_ENTRY);
8448 insn = emit_insn_after (value, insn);
0756cebb 8449 INSN_ADDRESSES_NEW (insn, -1);
8450 }
8451
d345b493 8452 /* Ensure minimum alignment for instructions. */
8453 insn = emit_insn_after (gen_pool_align (GEN_INT (2)), insn);
0756cebb 8454 INSN_ADDRESSES_NEW (insn, -1);
8455
d345b493 8456 /* Output in-pool execute template insns. */
8457 for (c = pool->execute; c; c = c->next)
8458 {
d345b493 8459 insn = emit_label_after (c->label, insn);
8460 INSN_ADDRESSES_NEW (insn, -1);
8461
8462 insn = emit_insn_after (s390_execute_target (c->value), insn);
8463 INSN_ADDRESSES_NEW (insn, -1);
8464 }
8465
8466 /* Switch back to previous section. */
8467 if (TARGET_CPU_ZARCH)
8468 {
8469 insn = emit_insn_after (gen_pool_section_end (), insn);
8470 INSN_ADDRESSES_NEW (insn, -1);
8471 }
8472
0756cebb 8473 insn = emit_barrier_after (insn);
8474 INSN_ADDRESSES_NEW (insn, -1);
8475
96be3ab6 8476 /* Remove placeholder insn. */
8477 remove_insn (pool->pool_insn);
d345b493 8478}
8479
0756cebb 8480/* Free all memory used by POOL. */
8481
8482static void
b40da9a7 8483s390_free_pool (struct constant_pool *pool)
0756cebb 8484{
d345b493 8485 struct constant *c, *next;
0756cebb 8486 int i;
8487
8488 for (i = 0; i < NR_C_MODES; i++)
d345b493 8489 for (c = pool->constants[i]; c; c = next)
8490 {
8491 next = c->next;
8492 free (c);
8493 }
8494
8495 for (c = pool->execute; c; c = next)
0756cebb 8496 {
d345b493 8497 next = c->next;
8498 free (c);
0756cebb 8499 }
8500
4d6e8511 8501 BITMAP_FREE (pool->insns);
0756cebb 8502 free (pool);
f81e845f 8503}
0756cebb 8504
0756cebb 8505
c2c1332a 8506/* Collect main literal pool. Return NULL on overflow. */
8507
8508static struct constant_pool *
8509s390_mainpool_start (void)
8510{
8511 struct constant_pool *pool;
93e0956b 8512 rtx_insn *insn;
c2c1332a 8513
8514 pool = s390_alloc_pool ();
8515
8516 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8517 {
aa90bb35 8518 if (NONJUMP_INSN_P (insn)
20074f87 8519 && GET_CODE (PATTERN (insn)) == SET
8520 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC_VOLATILE
8521 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPECV_MAIN_POOL)
c2c1332a 8522 {
7a64c761 8523 /* There might be two main_pool instructions if base_reg
8524 is call-clobbered; one for shrink-wrapped code and one
8525 for the rest. We want to keep the first. */
8526 if (pool->pool_insn)
8527 {
8528 insn = PREV_INSN (insn);
8529 delete_insn (NEXT_INSN (insn));
8530 continue;
8531 }
c2c1332a 8532 pool->pool_insn = insn;
8533 }
8534
babfdedf 8535 if (!TARGET_CPU_ZARCH && s390_execute_label (insn))
d345b493 8536 {
8537 s390_add_execute (pool, insn);
8538 }
aa90bb35 8539 else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
c2c1332a 8540 {
8541 rtx pool_ref = NULL_RTX;
8542 find_constant_pool_ref (PATTERN (insn), &pool_ref);
8543 if (pool_ref)
8544 {
8545 rtx constant = get_pool_constant (pool_ref);
3754d046 8546 machine_mode mode = get_pool_mode (pool_ref);
c2c1332a 8547 s390_add_constant (pool, constant, mode);
8548 }
8549 }
86428198 8550
8551 /* If hot/cold partitioning is enabled we have to make sure that
8552 the literal pool is emitted in the same section where the
8553 initialization of the literal pool base pointer takes place.
8554 emit_pool_after is only used in the non-overflow case on non
8555 Z cpus where we can emit the literal pool at the end of the
8556 function body within the text section. */
8557 if (NOTE_P (insn)
7338c728 8558 && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS
8559 && !pool->emit_pool_after)
8560 pool->emit_pool_after = PREV_INSN (insn);
c2c1332a 8561 }
8562
32eda510 8563 gcc_assert (pool->pool_insn || pool->size == 0);
c2c1332a 8564
8565 if (pool->size >= 4096)
8566 {
7de9f7aa 8567 /* We're going to chunkify the pool, so remove the main
8568 pool placeholder insn. */
8569 remove_insn (pool->pool_insn);
8570
c2c1332a 8571 s390_free_pool (pool);
8572 pool = NULL;
8573 }
8574
86428198 8575 /* If the functions ends with the section where the literal pool
8576 should be emitted set the marker to its end. */
7338c728 8577 if (pool && !pool->emit_pool_after)
86428198 8578 pool->emit_pool_after = get_last_insn ();
8579
c2c1332a 8580 return pool;
8581}
8582
8583/* POOL holds the main literal pool as collected by s390_mainpool_start.
8584 Modify the current function to output the pool constants as well as
20074f87 8585 the pool register setup instruction. */
c2c1332a 8586
8587static void
20074f87 8588s390_mainpool_finish (struct constant_pool *pool)
c2c1332a 8589{
4fed3f99 8590 rtx base_reg = cfun->machine->base_reg;
c2c1332a 8591
8592 /* If the pool is empty, we're done. */
8593 if (pool->size == 0)
8594 {
4fed3f99 8595 /* We don't actually need a base register after all. */
8596 cfun->machine->base_reg = NULL_RTX;
8597
8598 if (pool->pool_insn)
8599 remove_insn (pool->pool_insn);
c2c1332a 8600 s390_free_pool (pool);
8601 return;
8602 }
8603
8604 /* We need correct insn addresses. */
8605 shorten_branches (get_insns ());
8606
dafc8d45 8607 /* On zSeries, we use a LARL to load the pool register. The pool is
c2c1332a 8608 located in the .rodata section, so we emit it after the function. */
dafc8d45 8609 if (TARGET_CPU_ZARCH)
c2c1332a 8610 {
ed7591be 8611 rtx set = gen_main_base_64 (base_reg, pool->label);
8612 rtx_insn *insn = emit_insn_after (set, pool->pool_insn);
c2c1332a 8613 INSN_ADDRESSES_NEW (insn, -1);
8614 remove_insn (pool->pool_insn);
f588eb9f 8615
8616 insn = get_last_insn ();
c2c1332a 8617 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
8618 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
8619
8620 s390_dump_pool (pool, 0);
8621 }
8622
dafc8d45 8623 /* On S/390, if the total size of the function's code plus literal pool
c2c1332a 8624 does not exceed 4096 bytes, we use BASR to set up a function base
8625 pointer, and emit the literal pool at the end of the function. */
86428198 8626 else if (INSN_ADDRESSES (INSN_UID (pool->emit_pool_after))
c2c1332a 8627 + pool->size + 8 /* alignment slop */ < 4096)
8628 {
ed7591be 8629 rtx set = gen_main_base_31_small (base_reg, pool->label);
8630 rtx_insn *insn = emit_insn_after (set, pool->pool_insn);
c2c1332a 8631 INSN_ADDRESSES_NEW (insn, -1);
8632 remove_insn (pool->pool_insn);
8633
8634 insn = emit_label_after (pool->label, insn);
8635 INSN_ADDRESSES_NEW (insn, -1);
8636
86428198 8637 /* emit_pool_after will be set by s390_mainpool_start to the
8638 last insn of the section where the literal pool should be
8639 emitted. */
8640 insn = pool->emit_pool_after;
8641
c2c1332a 8642 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
8643 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
8644
8645 s390_dump_pool (pool, 1);
8646 }
8647
8648 /* Otherwise, we emit an inline literal pool and use BASR to branch
8649 over it, setting up the pool register at the same time. */
8650 else
8651 {
ed7591be 8652 rtx_code_label *pool_end = gen_label_rtx ();
c2c1332a 8653
ed7591be 8654 rtx pat = gen_main_base_31_large (base_reg, pool->label, pool_end);
8655 rtx_insn *insn = emit_jump_insn_after (pat, pool->pool_insn);
12f0f6d7 8656 JUMP_LABEL (insn) = pool_end;
c2c1332a 8657 INSN_ADDRESSES_NEW (insn, -1);
8658 remove_insn (pool->pool_insn);
8659
8660 insn = emit_label_after (pool->label, insn);
8661 INSN_ADDRESSES_NEW (insn, -1);
8662
8663 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
8664 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
8665
8666 insn = emit_label_after (pool_end, pool->pool_insn);
8667 INSN_ADDRESSES_NEW (insn, -1);
8668
8669 s390_dump_pool (pool, 1);
8670 }
8671
8672
8673 /* Replace all literal pool references. */
8674
91a55c11 8675 for (rtx_insn *insn = get_insns (); insn; insn = NEXT_INSN (insn))
c2c1332a 8676 {
8677 if (INSN_P (insn))
20074f87 8678 replace_ltrel_base (&PATTERN (insn));
c2c1332a 8679
aa90bb35 8680 if (NONJUMP_INSN_P (insn) || CALL_P (insn))
c2c1332a 8681 {
8682 rtx addr, pool_ref = NULL_RTX;
8683 find_constant_pool_ref (PATTERN (insn), &pool_ref);
8684 if (pool_ref)
8685 {
d345b493 8686 if (s390_execute_label (insn))
8687 addr = s390_find_execute (pool, insn);
8688 else
8689 addr = s390_find_constant (pool, get_pool_constant (pool_ref),
8690 get_pool_mode (pool_ref));
8691
c2c1332a 8692 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
8693 INSN_CODE (insn) = -1;
8694 }
8695 }
8696 }
8697
8698
8699 /* Free the pool. */
8700 s390_free_pool (pool);
8701}
8702
8703/* POOL holds the main literal pool as collected by s390_mainpool_start.
8704 We have decided we cannot use this pool, so revert all changes
8705 to the current function that were done by s390_mainpool_start. */
8706static void
8707s390_mainpool_cancel (struct constant_pool *pool)
8708{
8709 /* We didn't actually change the instruction stream, so simply
8710 free the pool memory. */
8711 s390_free_pool (pool);
8712}
8713
8714
20074f87 8715/* Chunkify the literal pool. */
4673c1a0 8716
0756cebb 8717#define S390_POOL_CHUNK_MIN 0xc00
8718#define S390_POOL_CHUNK_MAX 0xe00
8719
f81e845f 8720static struct constant_pool *
20074f87 8721s390_chunkify_start (void)
4673c1a0 8722{
0756cebb 8723 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
8724 int extra_size = 0;
8725 bitmap far_labels;
12ef3745 8726 rtx pending_ltrel = NULL_RTX;
93e0956b 8727 rtx_insn *insn;
4673c1a0 8728
b40da9a7 8729 rtx (*gen_reload_base) (rtx, rtx) =
dafc8d45 8730 TARGET_CPU_ZARCH? gen_reload_base_64 : gen_reload_base_31;
96be3ab6 8731
8732
9a2a66ae 8733 /* We need correct insn addresses. */
8734
8735 shorten_branches (get_insns ());
8736
12ef3745 8737 /* Scan all insns and move literals to pool chunks. */
479ca6e8 8738
479ca6e8 8739 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4673c1a0 8740 {
86428198 8741 bool section_switch_p = false;
8742
12ef3745 8743 /* Check for pending LTREL_BASE. */
8744 if (INSN_P (insn))
8745 {
8746 rtx ltrel_base = find_ltrel_base (PATTERN (insn));
8747 if (ltrel_base)
8748 {
32eda510 8749 gcc_assert (ltrel_base == pending_ltrel);
8750 pending_ltrel = NULL_RTX;
12ef3745 8751 }
8752 }
8753
babfdedf 8754 if (!TARGET_CPU_ZARCH && s390_execute_label (insn))
d345b493 8755 {
8756 if (!curr_pool)
8757 curr_pool = s390_start_pool (&pool_list, insn);
8758
8759 s390_add_execute (curr_pool, insn);
8760 s390_add_pool_insn (curr_pool, insn);
8761 }
aa90bb35 8762 else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
0756cebb 8763 {
96be3ab6 8764 rtx pool_ref = NULL_RTX;
0756cebb 8765 find_constant_pool_ref (PATTERN (insn), &pool_ref);
8766 if (pool_ref)
8767 {
12ef3745 8768 rtx constant = get_pool_constant (pool_ref);
3754d046 8769 machine_mode mode = get_pool_mode (pool_ref);
12ef3745 8770
0756cebb 8771 if (!curr_pool)
8772 curr_pool = s390_start_pool (&pool_list, insn);
8773
12ef3745 8774 s390_add_constant (curr_pool, constant, mode);
96be3ab6 8775 s390_add_pool_insn (curr_pool, insn);
96be3ab6 8776
12ef3745 8777 /* Don't split the pool chunk between a LTREL_OFFSET load
8778 and the corresponding LTREL_BASE. */
8779 if (GET_CODE (constant) == CONST
8780 && GET_CODE (XEXP (constant, 0)) == UNSPEC
8781 && XINT (XEXP (constant, 0), 1) == UNSPEC_LTREL_OFFSET)
8782 {
32eda510 8783 gcc_assert (!pending_ltrel);
12ef3745 8784 pending_ltrel = pool_ref;
8785 }
0756cebb 8786 }
8787 }
8788
91f71fa3 8789 if (JUMP_P (insn) || JUMP_TABLE_DATA_P (insn) || LABEL_P (insn))
12ef3745 8790 {
8791 if (curr_pool)
8792 s390_add_pool_insn (curr_pool, insn);
8793 /* An LTREL_BASE must follow within the same basic block. */
32eda510 8794 gcc_assert (!pending_ltrel);
12ef3745 8795 }
96be3ab6 8796
414bc417 8797 if (NOTE_P (insn))
8798 switch (NOTE_KIND (insn))
8799 {
8800 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
8801 section_switch_p = true;
8802 break;
8803 case NOTE_INSN_VAR_LOCATION:
8804 case NOTE_INSN_CALL_ARG_LOCATION:
8805 continue;
8806 default:
8807 break;
8808 }
86428198 8809
f81e845f 8810 if (!curr_pool
0756cebb 8811 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
8812 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
4673c1a0 8813 continue;
479ca6e8 8814
dafc8d45 8815 if (TARGET_CPU_ZARCH)
4673c1a0 8816 {
0756cebb 8817 if (curr_pool->size < S390_POOL_CHUNK_MAX)
8818 continue;
479ca6e8 8819
93e0956b 8820 s390_end_pool (curr_pool, NULL);
0756cebb 8821 curr_pool = NULL;
8822 }
8823 else
4673c1a0 8824 {
0756cebb 8825 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
b40da9a7 8826 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
0756cebb 8827 + extra_size;
8828
8829 /* We will later have to insert base register reload insns.
8830 Those will have an effect on code size, which we need to
8831 consider here. This calculation makes rather pessimistic
8832 worst-case assumptions. */
aa90bb35 8833 if (LABEL_P (insn))
0756cebb 8834 extra_size += 6;
0756cebb 8835
8836 if (chunk_size < S390_POOL_CHUNK_MIN
86428198 8837 && curr_pool->size < S390_POOL_CHUNK_MIN
8838 && !section_switch_p)
0756cebb 8839 continue;
8840
8841 /* Pool chunks can only be inserted after BARRIERs ... */
aa90bb35 8842 if (BARRIER_P (insn))
0756cebb 8843 {
8844 s390_end_pool (curr_pool, insn);
8845 curr_pool = NULL;
8846 extra_size = 0;
8847 }
8848
8849 /* ... so if we don't find one in time, create one. */
86428198 8850 else if (chunk_size > S390_POOL_CHUNK_MAX
8851 || curr_pool->size > S390_POOL_CHUNK_MAX
8852 || section_switch_p)
0756cebb 8853 {
93e0956b 8854 rtx_insn *label, *jump, *barrier, *next, *prev;
0756cebb 8855
86428198 8856 if (!section_switch_p)
8857 {
8858 /* We can insert the barrier only after a 'real' insn. */
aa90bb35 8859 if (! NONJUMP_INSN_P (insn) && ! CALL_P (insn))
86428198 8860 continue;
8861 if (get_attr_length (insn) == 0)
8862 continue;
8863 /* Don't separate LTREL_BASE from the corresponding
414bc417 8864 LTREL_OFFSET load. */
86428198 8865 if (pending_ltrel)
8866 continue;
414bc417 8867 next = insn;
8868 do
8869 {
8870 insn = next;
8871 next = NEXT_INSN (insn);
8872 }
8873 while (next
8874 && NOTE_P (next)
8875 && (NOTE_KIND (next) == NOTE_INSN_VAR_LOCATION
8876 || NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION));
86428198 8877 }
8878 else
8879 {
8880 gcc_assert (!pending_ltrel);
8881
8882 /* The old pool has to end before the section switch
8883 note in order to make it part of the current
8884 section. */
8885 insn = PREV_INSN (insn);
8886 }
96be3ab6 8887
b40da9a7 8888 label = gen_label_rtx ();
414bc417 8889 prev = insn;
8890 if (prev && NOTE_P (prev))
8891 prev = prev_nonnote_insn (prev);
8892 if (prev)
8893 jump = emit_jump_insn_after_setloc (gen_jump (label), insn,
d53c050c 8894 INSN_LOCATION (prev));
414bc417 8895 else
8896 jump = emit_jump_insn_after_noloc (gen_jump (label), insn);
0756cebb 8897 barrier = emit_barrier_after (jump);
8898 insn = emit_label_after (label, barrier);
8899 JUMP_LABEL (jump) = label;
8900 LABEL_NUSES (label) = 1;
8901
96be3ab6 8902 INSN_ADDRESSES_NEW (jump, -1);
8903 INSN_ADDRESSES_NEW (barrier, -1);
0756cebb 8904 INSN_ADDRESSES_NEW (insn, -1);
8905
8906 s390_end_pool (curr_pool, barrier);
8907 curr_pool = NULL;
8908 extra_size = 0;
8909 }
479ca6e8 8910 }
4673c1a0 8911 }
9fa6d5d9 8912
96be3ab6 8913 if (curr_pool)
93e0956b 8914 s390_end_pool (curr_pool, NULL);
32eda510 8915 gcc_assert (!pending_ltrel);
0756cebb 8916
f81e845f 8917 /* Find all labels that are branched into
479ca6e8 8918 from an insn belonging to a different chunk. */
9fa6d5d9 8919
4d6e8511 8920 far_labels = BITMAP_ALLOC (NULL);
a8ef833a 8921
479ca6e8 8922 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4673c1a0 8923 {
c86d86ff 8924 rtx_jump_table_data *table;
245402e7 8925
0756cebb 8926 /* Labels marked with LABEL_PRESERVE_P can be target
8927 of non-local jumps, so we have to mark them.
8928 The same holds for named labels.
8929
8930 Don't do that, however, if it is the label before
8931 a jump table. */
8932
aa90bb35 8933 if (LABEL_P (insn)
0756cebb 8934 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
8935 {
93e0956b 8936 rtx_insn *vec_insn = NEXT_INSN (insn);
77985f1a 8937 if (! vec_insn || ! JUMP_TABLE_DATA_P (vec_insn))
0756cebb 8938 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
8939 }
245402e7 8940 /* Check potential targets in a table jump (casesi_jump). */
8941 else if (tablejump_p (insn, NULL, &table))
8942 {
8943 rtx vec_pat = PATTERN (table);
8944 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
8945
8946 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
8947 {
8948 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
0756cebb 8949
245402e7 8950 if (s390_find_pool (pool_list, label)
8951 != s390_find_pool (pool_list, insn))
8952 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
8953 }
8954 }
8955 /* If we have a direct jump (conditional or unconditional),
8956 check all potential targets. */
aa90bb35 8957 else if (JUMP_P (insn))
479ca6e8 8958 {
245402e7 8959 rtx pat = PATTERN (insn);
0cd9a9a9 8960
245402e7 8961 if (GET_CODE (pat) == PARALLEL)
3c482144 8962 pat = XVECEXP (pat, 0, 0);
8963
245402e7 8964 if (GET_CODE (pat) == SET)
8965 {
96be3ab6 8966 rtx label = JUMP_LABEL (insn);
7a64c761 8967 if (label && !ANY_RETURN_P (label))
479ca6e8 8968 {
245402e7 8969 if (s390_find_pool (pool_list, label)
0756cebb 8970 != s390_find_pool (pool_list, insn))
8971 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
479ca6e8 8972 }
0756cebb 8973 }
245402e7 8974 }
4673c1a0 8975 }
9fa6d5d9 8976
0756cebb 8977 /* Insert base register reload insns before every pool. */
8978
8979 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
96be3ab6 8980 {
ffead1ca 8981 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
20074f87 8982 curr_pool->label);
93e0956b 8983 rtx_insn *insn = curr_pool->first_insn;
96be3ab6 8984 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
8985 }
0756cebb 8986
8987 /* Insert base register reload insns at every far label. */
479ca6e8 8988
479ca6e8 8989 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
aa90bb35 8990 if (LABEL_P (insn)
0756cebb 8991 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
8992 {
8993 struct constant_pool *pool = s390_find_pool (pool_list, insn);
8994 if (pool)
8995 {
ffead1ca 8996 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
20074f87 8997 pool->label);
96be3ab6 8998 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
0756cebb 8999 }
9000 }
9001
96be3ab6 9002
4d6e8511 9003 BITMAP_FREE (far_labels);
479ca6e8 9004
479ca6e8 9005
9006 /* Recompute insn addresses. */
9007
9008 init_insn_lengths ();
9009 shorten_branches (get_insns ());
4673c1a0 9010
96be3ab6 9011 return pool_list;
9012}
4673c1a0 9013
96be3ab6 9014/* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
f81e845f 9015 After we have decided to use this list, finish implementing
20074f87 9016 all changes to the current function as required. */
f81e845f 9017
96be3ab6 9018static void
20074f87 9019s390_chunkify_finish (struct constant_pool *pool_list)
96be3ab6 9020{
96be3ab6 9021 struct constant_pool *curr_pool = NULL;
93e0956b 9022 rtx_insn *insn;
f81e845f 9023
9024
96be3ab6 9025 /* Replace all literal pool references. */
9026
f81e845f 9027 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
96be3ab6 9028 {
12ef3745 9029 if (INSN_P (insn))
20074f87 9030 replace_ltrel_base (&PATTERN (insn));
12ef3745 9031
96be3ab6 9032 curr_pool = s390_find_pool (pool_list, insn);
9033 if (!curr_pool)
9034 continue;
9035
aa90bb35 9036 if (NONJUMP_INSN_P (insn) || CALL_P (insn))
96be3ab6 9037 {
9038 rtx addr, pool_ref = NULL_RTX;
9039 find_constant_pool_ref (PATTERN (insn), &pool_ref);
9040 if (pool_ref)
9041 {
d345b493 9042 if (s390_execute_label (insn))
9043 addr = s390_find_execute (curr_pool, insn);
9044 else
9045 addr = s390_find_constant (curr_pool,
9046 get_pool_constant (pool_ref),
9047 get_pool_mode (pool_ref));
9048
96be3ab6 9049 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
9050 INSN_CODE (insn) = -1;
9051 }
96be3ab6 9052 }
9053 }
9054
9055 /* Dump out all literal pools. */
f81e845f 9056
96be3ab6 9057 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
c2c1332a 9058 s390_dump_pool (curr_pool, 0);
f81e845f 9059
96be3ab6 9060 /* Free pool list. */
9061
9062 while (pool_list)
9063 {
9064 struct constant_pool *next = pool_list->next;
9065 s390_free_pool (pool_list);
9066 pool_list = next;
9067 }
9068}
9069
9070/* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
9071 We have decided we cannot use this list, so revert all changes
9072 to the current function that were done by s390_chunkify_start. */
f81e845f 9073
96be3ab6 9074static void
b40da9a7 9075s390_chunkify_cancel (struct constant_pool *pool_list)
96be3ab6 9076{
9077 struct constant_pool *curr_pool = NULL;
93e0956b 9078 rtx_insn *insn;
96be3ab6 9079
9080 /* Remove all pool placeholder insns. */
9081
9082 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
9083 {
9084 /* Did we insert an extra barrier? Remove it. */
93e0956b 9085 rtx_insn *barrier = PREV_INSN (curr_pool->pool_insn);
9086 rtx_insn *jump = barrier? PREV_INSN (barrier) : NULL;
9087 rtx_insn *label = NEXT_INSN (curr_pool->pool_insn);
96be3ab6 9088
aa90bb35 9089 if (jump && JUMP_P (jump)
9090 && barrier && BARRIER_P (barrier)
9091 && label && LABEL_P (label)
96be3ab6 9092 && GET_CODE (PATTERN (jump)) == SET
9093 && SET_DEST (PATTERN (jump)) == pc_rtx
9094 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
9095 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
9096 {
9097 remove_insn (jump);
9098 remove_insn (barrier);
9099 remove_insn (label);
0756cebb 9100 }
4673c1a0 9101
96be3ab6 9102 remove_insn (curr_pool->pool_insn);
9103 }
9104
12ef3745 9105 /* Remove all base register reload insns. */
96be3ab6 9106
9107 for (insn = get_insns (); insn; )
9108 {
93e0956b 9109 rtx_insn *next_insn = NEXT_INSN (insn);
96be3ab6 9110
aa90bb35 9111 if (NONJUMP_INSN_P (insn)
96be3ab6 9112 && GET_CODE (PATTERN (insn)) == SET
9113 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
12ef3745 9114 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
96be3ab6 9115 remove_insn (insn);
4673c1a0 9116
96be3ab6 9117 insn = next_insn;
9118 }
9119
9120 /* Free pool list. */
4673c1a0 9121
0756cebb 9122 while (pool_list)
4673c1a0 9123 {
0756cebb 9124 struct constant_pool *next = pool_list->next;
9125 s390_free_pool (pool_list);
9126 pool_list = next;
4673c1a0 9127 }
4673c1a0 9128}
9129
74d2529d 9130/* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
df82fb76 9131
9132void
3754d046 9133s390_output_pool_entry (rtx exp, machine_mode mode, unsigned int align)
df82fb76 9134{
df82fb76 9135 switch (GET_MODE_CLASS (mode))
9136 {
9137 case MODE_FLOAT:
36868490 9138 case MODE_DECIMAL_FLOAT:
32eda510 9139 gcc_assert (GET_CODE (exp) == CONST_DOUBLE);
df82fb76 9140
945f7b03 9141 assemble_real (*CONST_DOUBLE_REAL_VALUE (exp), mode, align);
df82fb76 9142 break;
9143
9144 case MODE_INT:
74d2529d 9145 assemble_integer (exp, GET_MODE_SIZE (mode), align, 1);
af2a449c 9146 mark_symbol_refs_as_used (exp);
df82fb76 9147 break;
9148
76a4c804 9149 case MODE_VECTOR_INT:
9150 case MODE_VECTOR_FLOAT:
9151 {
9152 int i;
9153 machine_mode inner_mode;
9154 gcc_assert (GET_CODE (exp) == CONST_VECTOR);
9155
9156 inner_mode = GET_MODE_INNER (GET_MODE (exp));
9157 for (i = 0; i < XVECLEN (exp, 0); i++)
9158 s390_output_pool_entry (XVECEXP (exp, 0, i),
9159 inner_mode,
9160 i == 0
9161 ? align
9162 : GET_MODE_BITSIZE (inner_mode));
9163 }
9164 break;
9165
df82fb76 9166 default:
32eda510 9167 gcc_unreachable ();
df82fb76 9168 }
9169}
9170
9171
875862bf 9172/* Return an RTL expression representing the value of the return address
9173 for the frame COUNT steps up from the current frame. FRAME is the
9174 frame pointer of that frame. */
0756cebb 9175
875862bf 9176rtx
9177s390_return_addr_rtx (int count, rtx frame ATTRIBUTE_UNUSED)
0756cebb 9178{
875862bf 9179 int offset;
9180 rtx addr;
96be3ab6 9181
875862bf 9182 /* Without backchain, we fail for all but the current frame. */
9a2a66ae 9183
875862bf 9184 if (!TARGET_BACKCHAIN && count > 0)
9185 return NULL_RTX;
9a2a66ae 9186
875862bf 9187 /* For the current frame, we need to make sure the initial
9188 value of RETURN_REGNUM is actually saved. */
9a2a66ae 9189
875862bf 9190 if (count == 0)
9a2a66ae 9191 {
1e639cb0 9192 /* On non-z architectures branch splitting could overwrite r14. */
9193 if (TARGET_CPU_ZARCH)
9194 return get_hard_reg_initial_val (Pmode, RETURN_REGNUM);
9195 else
9196 {
9197 cfun_frame_layout.save_return_addr_p = true;
9198 return gen_rtx_MEM (Pmode, return_address_pointer_rtx);
9199 }
875862bf 9200 }
9a2a66ae 9201
875862bf 9202 if (TARGET_PACKED_STACK)
b5fdc416 9203 offset = -2 * UNITS_PER_LONG;
875862bf 9204 else
b5fdc416 9205 offset = RETURN_REGNUM * UNITS_PER_LONG;
9a2a66ae 9206
29c05e22 9207 addr = plus_constant (Pmode, frame, offset);
875862bf 9208 addr = memory_address (Pmode, addr);
9209 return gen_rtx_MEM (Pmode, addr);
9210}
9a2a66ae 9211
875862bf 9212/* Return an RTL expression representing the back chain stored in
9213 the current stack frame. */
5fe74ca1 9214
875862bf 9215rtx
9216s390_back_chain_rtx (void)
9217{
9218 rtx chain;
5fe74ca1 9219
875862bf 9220 gcc_assert (TARGET_BACKCHAIN);
5fe74ca1 9221
875862bf 9222 if (TARGET_PACKED_STACK)
29c05e22 9223 chain = plus_constant (Pmode, stack_pointer_rtx,
b5fdc416 9224 STACK_POINTER_OFFSET - UNITS_PER_LONG);
875862bf 9225 else
9226 chain = stack_pointer_rtx;
5fe74ca1 9227
875862bf 9228 chain = gen_rtx_MEM (Pmode, chain);
9229 return chain;
9230}
9a2a66ae 9231
875862bf 9232/* Find first call clobbered register unused in a function.
9233 This could be used as base register in a leaf function
9234 or for holding the return address before epilogue. */
9a2a66ae 9235
875862bf 9236static int
9237find_unused_clobbered_reg (void)
9238{
9239 int i;
9240 for (i = 0; i < 6; i++)
3072d30e 9241 if (!df_regs_ever_live_p (i))
875862bf 9242 return i;
9243 return 0;
9244}
9a2a66ae 9245
1e639cb0 9246
ffead1ca 9247/* Helper function for s390_regs_ever_clobbered. Sets the fields in DATA for all
1e639cb0 9248 clobbered hard regs in SETREG. */
9249
9250static void
81a410b1 9251s390_reg_clobbered_rtx (rtx setreg, const_rtx set_insn ATTRIBUTE_UNUSED, void *data)
1e639cb0 9252{
ff4ce128 9253 char *regs_ever_clobbered = (char *)data;
1e639cb0 9254 unsigned int i, regno;
3754d046 9255 machine_mode mode = GET_MODE (setreg);
1e639cb0 9256
9257 if (GET_CODE (setreg) == SUBREG)
9258 {
9259 rtx inner = SUBREG_REG (setreg);
5ada7a14 9260 if (!GENERAL_REG_P (inner) && !FP_REG_P (inner))
1e639cb0 9261 return;
9262 regno = subreg_regno (setreg);
9263 }
5ada7a14 9264 else if (GENERAL_REG_P (setreg) || FP_REG_P (setreg))
1e639cb0 9265 regno = REGNO (setreg);
9266 else
9267 return;
9268
9269 for (i = regno;
9270 i < regno + HARD_REGNO_NREGS (regno, mode);
9271 i++)
9272 regs_ever_clobbered[i] = 1;
9273}
9274
9275/* Walks through all basic blocks of the current function looking
9276 for clobbered hard regs using s390_reg_clobbered_rtx. The fields
9277 of the passed integer array REGS_EVER_CLOBBERED are set to one for
9278 each of those regs. */
9279
9280static void
ff4ce128 9281s390_regs_ever_clobbered (char regs_ever_clobbered[])
1e639cb0 9282{
9283 basic_block cur_bb;
93e0956b 9284 rtx_insn *cur_insn;
1e639cb0 9285 unsigned int i;
9286
ff4ce128 9287 memset (regs_ever_clobbered, 0, 32);
1e639cb0 9288
9289 /* For non-leaf functions we have to consider all call clobbered regs to be
9290 clobbered. */
d5bf7b64 9291 if (!crtl->is_leaf)
1e639cb0 9292 {
5ada7a14 9293 for (i = 0; i < 32; i++)
1e639cb0 9294 regs_ever_clobbered[i] = call_really_used_regs[i];
9295 }
9296
9297 /* Make the "magic" eh_return registers live if necessary. For regs_ever_live
9298 this work is done by liveness analysis (mark_regs_live_at_end).
9299 Special care is needed for functions containing landing pads. Landing pads
9300 may use the eh registers, but the code which sets these registers is not
9301 contained in that function. Hence s390_regs_ever_clobbered is not able to
9302 deal with this automatically. */
18d50ae6 9303 if (crtl->calls_eh_return || cfun->machine->has_landing_pad_p)
1e639cb0 9304 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM ; i++)
ffead1ca 9305 if (crtl->calls_eh_return
9306 || (cfun->machine->has_landing_pad_p
3072d30e 9307 && df_regs_ever_live_p (EH_RETURN_DATA_REGNO (i))))
220be973 9308 regs_ever_clobbered[EH_RETURN_DATA_REGNO (i)] = 1;
1e639cb0 9309
9310 /* For nonlocal gotos all call-saved registers have to be saved.
9311 This flag is also set for the unwinding code in libgcc.
9312 See expand_builtin_unwind_init. For regs_ever_live this is done by
9313 reload. */
ff4ce128 9314 if (crtl->saves_all_registers)
5ada7a14 9315 for (i = 0; i < 32; i++)
1e639cb0 9316 if (!call_really_used_regs[i])
9317 regs_ever_clobbered[i] = 1;
9318
fc00614f 9319 FOR_EACH_BB_FN (cur_bb, cfun)
1e639cb0 9320 {
9321 FOR_BB_INSNS (cur_bb, cur_insn)
9322 {
ff4ce128 9323 rtx pat;
9324
9325 if (!INSN_P (cur_insn))
9326 continue;
9327
9328 pat = PATTERN (cur_insn);
9329
9330 /* Ignore GPR restore insns. */
9331 if (epilogue_completed && RTX_FRAME_RELATED_P (cur_insn))
9332 {
9333 if (GET_CODE (pat) == SET
9334 && GENERAL_REG_P (SET_DEST (pat)))
9335 {
9336 /* lgdr */
9337 if (GET_MODE (SET_SRC (pat)) == DImode
9338 && FP_REG_P (SET_SRC (pat)))
9339 continue;
9340
9341 /* l / lg */
9342 if (GET_CODE (SET_SRC (pat)) == MEM)
9343 continue;
9344 }
9345
9346 /* lm / lmg */
9347 if (GET_CODE (pat) == PARALLEL
9348 && load_multiple_operation (pat, VOIDmode))
9349 continue;
9350 }
9351
9352 note_stores (pat,
9353 s390_reg_clobbered_rtx,
9354 regs_ever_clobbered);
1e639cb0 9355 }
9356 }
9357}
9358
ffead1ca 9359/* Determine the frame area which actually has to be accessed
9360 in the function epilogue. The values are stored at the
875862bf 9361 given pointers AREA_BOTTOM (address of the lowest used stack
ffead1ca 9362 address) and AREA_TOP (address of the first item which does
875862bf 9363 not belong to the stack frame). */
5fe74ca1 9364
875862bf 9365static void
9366s390_frame_area (int *area_bottom, int *area_top)
9367{
9368 int b, t;
5fe74ca1 9369
875862bf 9370 b = INT_MAX;
9371 t = INT_MIN;
67928721 9372
9373 if (cfun_frame_layout.first_restore_gpr != -1)
9374 {
9375 b = (cfun_frame_layout.gprs_offset
b5fdc416 9376 + cfun_frame_layout.first_restore_gpr * UNITS_PER_LONG);
67928721 9377 t = b + (cfun_frame_layout.last_restore_gpr
b5fdc416 9378 - cfun_frame_layout.first_restore_gpr + 1) * UNITS_PER_LONG;
67928721 9379 }
9380
9381 if (TARGET_64BIT && cfun_save_high_fprs_p)
9382 {
9383 b = MIN (b, cfun_frame_layout.f8_offset);
9384 t = MAX (t, (cfun_frame_layout.f8_offset
9385 + cfun_frame_layout.high_fprs * 8));
9386 }
9387
9388 if (!TARGET_64BIT)
29439367 9389 {
6a2469fe 9390 if (cfun_fpr_save_p (FPR4_REGNUM))
67928721 9391 {
29439367 9392 b = MIN (b, cfun_frame_layout.f4_offset);
9393 t = MAX (t, cfun_frame_layout.f4_offset + 8);
67928721 9394 }
6a2469fe 9395 if (cfun_fpr_save_p (FPR6_REGNUM))
29439367 9396 {
9397 b = MIN (b, cfun_frame_layout.f4_offset + 8);
9398 t = MAX (t, cfun_frame_layout.f4_offset + 16);
9399 }
9400 }
67928721 9401 *area_bottom = b;
9402 *area_top = t;
9403}
ff4ce128 9404/* Update gpr_save_slots in the frame layout trying to make use of
9405 FPRs as GPR save slots.
9406 This is a helper routine of s390_register_info. */
8b4a4127 9407
9408static void
ff4ce128 9409s390_register_info_gprtofpr ()
8b4a4127 9410{
ff4ce128 9411 int save_reg_slot = FPR0_REGNUM;
8b4a4127 9412 int i, j;
8b4a4127 9413
ff4ce128 9414 if (!TARGET_Z10 || !TARGET_HARD_FLOAT || !crtl->is_leaf)
9415 return;
1e639cb0 9416
ff4ce128 9417 for (i = 15; i >= 6; i--)
5ada7a14 9418 {
1d3cea74 9419 if (cfun_gpr_save_slot (i) == SAVE_SLOT_NONE)
ff4ce128 9420 continue;
68bc0408 9421
ff4ce128 9422 /* Advance to the next FP register which can be used as a
9423 GPR save slot. */
9424 while ((!call_really_used_regs[save_reg_slot]
9425 || df_regs_ever_live_p (save_reg_slot)
9426 || cfun_fpr_save_p (save_reg_slot))
9427 && FP_REGNO_P (save_reg_slot))
9428 save_reg_slot++;
9429 if (!FP_REGNO_P (save_reg_slot))
9430 {
9431 /* We only want to use ldgr/lgdr if we can get rid of
9432 stm/lm entirely. So undo the gpr slot allocation in
9433 case we ran out of FPR save slots. */
9434 for (j = 6; j <= 15; j++)
9435 if (FP_REGNO_P (cfun_gpr_save_slot (j)))
1d3cea74 9436 cfun_gpr_save_slot (j) = SAVE_SLOT_STACK;
ff4ce128 9437 break;
68bc0408 9438 }
ff4ce128 9439 cfun_gpr_save_slot (i) = save_reg_slot++;
5ada7a14 9440 }
ff4ce128 9441}
5ada7a14 9442
ff4ce128 9443/* Set the bits in fpr_bitmap for FPRs which need to be saved due to
9444 stdarg.
9445 This is a helper routine for s390_register_info. */
1e639cb0 9446
ff4ce128 9447static void
9448s390_register_info_stdarg_fpr ()
9449{
9450 int i;
9451 int min_fpr;
9452 int max_fpr;
9453
9454 /* Save the FP argument regs for stdarg. f0, f2 for 31 bit and
9455 f0-f4 for 64 bit. */
9456 if (!cfun->stdarg
9457 || !TARGET_HARD_FLOAT
9458 || !cfun->va_list_fpr_size
9459 || crtl->args.info.fprs >= FP_ARG_NUM_REG)
9460 return;
9461
9462 min_fpr = crtl->args.info.fprs;
1d3cea74 9463 max_fpr = min_fpr + cfun->va_list_fpr_size - 1;
9464 if (max_fpr >= FP_ARG_NUM_REG)
9465 max_fpr = FP_ARG_NUM_REG - 1;
ff4ce128 9466
1d3cea74 9467 /* FPR argument regs start at f0. */
9468 min_fpr += FPR0_REGNUM;
9469 max_fpr += FPR0_REGNUM;
9470
9471 for (i = min_fpr; i <= max_fpr; i++)
9472 cfun_set_fpr_save (i);
ff4ce128 9473}
9474
9475/* Reserve the GPR save slots for GPRs which need to be saved due to
9476 stdarg.
9477 This is a helper routine for s390_register_info. */
9478
9479static void
9480s390_register_info_stdarg_gpr ()
9481{
9482 int i;
9483 int min_gpr;
9484 int max_gpr;
9485
9486 if (!cfun->stdarg
9487 || !cfun->va_list_gpr_size
9488 || crtl->args.info.gprs >= GP_ARG_NUM_REG)
9489 return;
9490
9491 min_gpr = crtl->args.info.gprs;
1d3cea74 9492 max_gpr = min_gpr + cfun->va_list_gpr_size - 1;
9493 if (max_gpr >= GP_ARG_NUM_REG)
9494 max_gpr = GP_ARG_NUM_REG - 1;
9495
9496 /* GPR argument regs start at r2. */
9497 min_gpr += GPR2_REGNUM;
9498 max_gpr += GPR2_REGNUM;
9499
9500 /* If r6 was supposed to be saved into an FPR and now needs to go to
9501 the stack for vararg we have to adjust the restore range to make
9502 sure that the restore is done from stack as well. */
9503 if (FP_REGNO_P (cfun_gpr_save_slot (GPR6_REGNUM))
9504 && min_gpr <= GPR6_REGNUM
9505 && max_gpr >= GPR6_REGNUM)
9506 {
9507 if (cfun_frame_layout.first_restore_gpr == -1
9508 || cfun_frame_layout.first_restore_gpr > GPR6_REGNUM)
9509 cfun_frame_layout.first_restore_gpr = GPR6_REGNUM;
9510 if (cfun_frame_layout.last_restore_gpr == -1
9511 || cfun_frame_layout.last_restore_gpr < GPR6_REGNUM)
9512 cfun_frame_layout.last_restore_gpr = GPR6_REGNUM;
9513 }
9514
9515 if (cfun_frame_layout.first_save_gpr == -1
9516 || cfun_frame_layout.first_save_gpr > min_gpr)
9517 cfun_frame_layout.first_save_gpr = min_gpr;
9518
9519 if (cfun_frame_layout.last_save_gpr == -1
9520 || cfun_frame_layout.last_save_gpr < max_gpr)
9521 cfun_frame_layout.last_save_gpr = max_gpr;
9522
9523 for (i = min_gpr; i <= max_gpr; i++)
9524 cfun_gpr_save_slot (i) = SAVE_SLOT_STACK;
9525}
9526
9527/* Calculate the save and restore ranges for stm(g) and lm(g) in the
9528 prologue and epilogue. */
ff4ce128 9529
1d3cea74 9530static void
9531s390_register_info_set_ranges ()
9532{
9533 int i, j;
9534
9535 /* Find the first and the last save slot supposed to use the stack
9536 to set the restore range.
9537 Vararg regs might be marked as save to stack but only the
9538 call-saved regs really need restoring (i.e. r6). This code
9539 assumes that the vararg regs have not yet been recorded in
9540 cfun_gpr_save_slot. */
9541 for (i = 0; i < 16 && cfun_gpr_save_slot (i) != SAVE_SLOT_STACK; i++);
9542 for (j = 15; j > i && cfun_gpr_save_slot (j) != SAVE_SLOT_STACK; j--);
9543 cfun_frame_layout.first_restore_gpr = (i == 16) ? -1 : i;
9544 cfun_frame_layout.last_restore_gpr = (i == 16) ? -1 : j;
1d3cea74 9545 cfun_frame_layout.first_save_gpr = (i == 16) ? -1 : i;
9546 cfun_frame_layout.last_save_gpr = (i == 16) ? -1 : j;
ff4ce128 9547}
9548
9549/* The GPR and FPR save slots in cfun->machine->frame_layout are set
9550 for registers which need to be saved in function prologue.
9551 This function can be used until the insns emitted for save/restore
9552 of the regs are visible in the RTL stream. */
9553
9554static void
9555s390_register_info ()
9556{
1d3cea74 9557 int i;
ff4ce128 9558 char clobbered_regs[32];
9559
9560 gcc_assert (!epilogue_completed);
9561
9562 if (reload_completed)
9563 /* After reload we rely on our own routine to determine which
9564 registers need saving. */
9565 s390_regs_ever_clobbered (clobbered_regs);
9566 else
9567 /* During reload we use regs_ever_live as a base since reload
9568 does changes in there which we otherwise would not be aware
9569 of. */
9570 for (i = 0; i < 32; i++)
9571 clobbered_regs[i] = df_regs_ever_live_p (i);
9572
9573 for (i = 0; i < 32; i++)
9574 clobbered_regs[i] = clobbered_regs[i] && !global_regs[i];
9575
9576 /* Mark the call-saved FPRs which need to be saved.
9577 This needs to be done before checking the special GPRs since the
9578 stack pointer usage depends on whether high FPRs have to be saved
9579 or not. */
9580 cfun_frame_layout.fpr_bitmap = 0;
9581 cfun_frame_layout.high_fprs = 0;
9582 for (i = FPR0_REGNUM; i <= FPR15_REGNUM; i++)
9583 if (clobbered_regs[i] && !call_really_used_regs[i])
9584 {
9585 cfun_set_fpr_save (i);
9586 if (i >= FPR8_REGNUM)
9587 cfun_frame_layout.high_fprs++;
9588 }
9a2a66ae 9589
c6d481f7 9590 /* Register 12 is used for GOT address, but also as temp in prologue
9591 for split-stack stdarg functions (unless r14 is available). */
9592 clobbered_regs[12]
9593 |= ((flag_pic && df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM))
9594 || (flag_split_stack && cfun->stdarg
9595 && (crtl->is_leaf || TARGET_TPF_PROFILING
9596 || has_hard_reg_initial_val (Pmode, RETURN_REGNUM))));
4fed3f99 9597
ffead1ca 9598 clobbered_regs[BASE_REGNUM]
77beec48 9599 |= (cfun->machine->base_reg
ff4ce128 9600 && REGNO (cfun->machine->base_reg) == BASE_REGNUM);
4fed3f99 9601
ff4ce128 9602 clobbered_regs[HARD_FRAME_POINTER_REGNUM]
9603 |= !!frame_pointer_needed;
9604
9605 /* On pre z900 machines this might take until machine dependent
9606 reorg to decide.
9607 save_return_addr_p will only be set on non-zarch machines so
9608 there is no risk that r14 goes into an FPR instead of a stack
9609 slot. */
1e639cb0 9610 clobbered_regs[RETURN_REGNUM]
d5bf7b64 9611 |= (!crtl->is_leaf
9bee2845 9612 || TARGET_TPF_PROFILING
77beec48 9613 || cfun->machine->split_branches_pending_p
9614 || cfun_frame_layout.save_return_addr_p
ff4ce128 9615 || crtl->calls_eh_return);
4fed3f99 9616
1e639cb0 9617 clobbered_regs[STACK_POINTER_REGNUM]
d5bf7b64 9618 |= (!crtl->is_leaf
77beec48 9619 || TARGET_TPF_PROFILING
9620 || cfun_save_high_fprs_p
9621 || get_frame_size () > 0
68bc0408 9622 || (reload_completed && cfun_frame_layout.frame_size > 0)
ff4ce128 9623 || cfun->calls_alloca);
9624
1d3cea74 9625 memset (cfun_frame_layout.gpr_save_slots, SAVE_SLOT_NONE, 16);
1e639cb0 9626
beee1f75 9627 for (i = 6; i < 16; i++)
ff4ce128 9628 if (clobbered_regs[i])
1d3cea74 9629 cfun_gpr_save_slot (i) = SAVE_SLOT_STACK;
9a2a66ae 9630
ff4ce128 9631 s390_register_info_stdarg_fpr ();
9632 s390_register_info_gprtofpr ();
1d3cea74 9633 s390_register_info_set_ranges ();
ff4ce128 9634 /* stdarg functions might need to save GPRs 2 to 6. This might
1d3cea74 9635 override the GPR->FPR save decision made by
9636 s390_register_info_gprtofpr for r6 since vararg regs must go to
9637 the stack. */
ff4ce128 9638 s390_register_info_stdarg_gpr ();
ff4ce128 9639}
9a2a66ae 9640
ff4ce128 9641/* This function is called by s390_optimize_prologue in order to get
9642 rid of unnecessary GPR save/restore instructions. The register info
9643 for the GPRs is re-computed and the ranges are re-calculated. */
6902d973 9644
ff4ce128 9645static void
9646s390_optimize_register_info ()
9647{
9648 char clobbered_regs[32];
1d3cea74 9649 int i;
6902d973 9650
ff4ce128 9651 gcc_assert (epilogue_completed);
9652 gcc_assert (!cfun->machine->split_branches_pending_p);
beee1f75 9653
ff4ce128 9654 s390_regs_ever_clobbered (clobbered_regs);
6902d973 9655
ff4ce128 9656 for (i = 0; i < 32; i++)
9657 clobbered_regs[i] = clobbered_regs[i] && !global_regs[i];
6902d973 9658
ff4ce128 9659 /* There is still special treatment needed for cases invisible to
9660 s390_regs_ever_clobbered. */
9661 clobbered_regs[RETURN_REGNUM]
9662 |= (TARGET_TPF_PROFILING
9663 /* When expanding builtin_return_addr in ESA mode we do not
9664 know whether r14 will later be needed as scratch reg when
9665 doing branch splitting. So the builtin always accesses the
9666 r14 save slot and we need to stick to the save/restore
9667 decision for r14 even if it turns out that it didn't get
9668 clobbered. */
9669 || cfun_frame_layout.save_return_addr_p
9670 || crtl->calls_eh_return);
9671
1d3cea74 9672 memset (cfun_frame_layout.gpr_save_slots, SAVE_SLOT_NONE, 6);
ff4ce128 9673
9674 for (i = 6; i < 16; i++)
9675 if (!clobbered_regs[i])
1d3cea74 9676 cfun_gpr_save_slot (i) = SAVE_SLOT_NONE;
ff4ce128 9677
1d3cea74 9678 s390_register_info_set_ranges ();
ff4ce128 9679 s390_register_info_stdarg_gpr ();
67928721 9680}
9681
4fed3f99 9682/* Fill cfun->machine with info about frame of current function. */
67928721 9683
9684static void
4fed3f99 9685s390_frame_info (void)
67928721 9686{
62eb9236 9687 HOST_WIDE_INT lowest_offset;
67928721 9688
ff4ce128 9689 cfun_frame_layout.first_save_gpr_slot = cfun_frame_layout.first_save_gpr;
9690 cfun_frame_layout.last_save_gpr_slot = cfun_frame_layout.last_save_gpr;
9691
9692 /* The va_arg builtin uses a constant distance of 16 *
9693 UNITS_PER_LONG (r0-r15) to reach the FPRs from the reg_save_area
9694 pointer. So even if we are going to save the stack pointer in an
9695 FPR we need the stack space in order to keep the offsets
9696 correct. */
9697 if (cfun->stdarg && cfun_save_arg_fprs_p)
9698 {
9699 cfun_frame_layout.last_save_gpr_slot = STACK_POINTER_REGNUM;
9700
9701 if (cfun_frame_layout.first_save_gpr_slot == -1)
9702 cfun_frame_layout.first_save_gpr_slot = STACK_POINTER_REGNUM;
9703 }
9704
67928721 9705 cfun_frame_layout.frame_size = get_frame_size ();
67928721 9706 if (!TARGET_64BIT && cfun_frame_layout.frame_size > 0x7fff0000)
c05be867 9707 fatal_error (input_location,
9708 "total size of local variables exceeds architecture limit");
ffead1ca 9709
646a946e 9710 if (!TARGET_PACKED_STACK)
67928721 9711 {
62eb9236 9712 /* Fixed stack layout. */
67928721 9713 cfun_frame_layout.backchain_offset = 0;
b5fdc416 9714 cfun_frame_layout.f0_offset = 16 * UNITS_PER_LONG;
67928721 9715 cfun_frame_layout.f4_offset = cfun_frame_layout.f0_offset + 2 * 8;
9716 cfun_frame_layout.f8_offset = -cfun_frame_layout.high_fprs * 8;
5214e6ae 9717 cfun_frame_layout.gprs_offset = (cfun_frame_layout.first_save_gpr_slot
b5fdc416 9718 * UNITS_PER_LONG);
67928721 9719 }
62eb9236 9720 else if (TARGET_BACKCHAIN)
67928721 9721 {
62eb9236 9722 /* Kernel stack layout - packed stack, backchain, no float */
9723 gcc_assert (TARGET_SOFT_FLOAT);
67928721 9724 cfun_frame_layout.backchain_offset = (STACK_POINTER_OFFSET
b5fdc416 9725 - UNITS_PER_LONG);
62eb9236 9726
9727 /* The distance between the backchain and the return address
9728 save slot must not change. So we always need a slot for the
9729 stack pointer which resides in between. */
9730 cfun_frame_layout.last_save_gpr_slot = STACK_POINTER_REGNUM;
9731
ffead1ca 9732 cfun_frame_layout.gprs_offset
62eb9236 9733 = cfun_frame_layout.backchain_offset - cfun_gprs_save_area_size;
ffead1ca 9734
62eb9236 9735 /* FPRs will not be saved. Nevertheless pick sane values to
9736 keep area calculations valid. */
9737 cfun_frame_layout.f0_offset =
9738 cfun_frame_layout.f4_offset =
9739 cfun_frame_layout.f8_offset = cfun_frame_layout.gprs_offset;
67928721 9740 }
62eb9236 9741 else
67928721 9742 {
031bdf83 9743 int num_fprs;
9744
62eb9236 9745 /* Packed stack layout without backchain. */
ffead1ca 9746
031bdf83 9747 /* With stdarg FPRs need their dedicated slots. */
9748 num_fprs = (TARGET_64BIT && cfun->stdarg ? 2
9749 : (cfun_fpr_save_p (FPR4_REGNUM) +
9750 cfun_fpr_save_p (FPR6_REGNUM)));
9751 cfun_frame_layout.f4_offset = STACK_POINTER_OFFSET - 8 * num_fprs;
9752
9753 num_fprs = (cfun->stdarg ? 2
9754 : (cfun_fpr_save_p (FPR0_REGNUM)
9755 + cfun_fpr_save_p (FPR2_REGNUM)));
9756 cfun_frame_layout.f0_offset = cfun_frame_layout.f4_offset - 8 * num_fprs;
ffead1ca 9757
9758 cfun_frame_layout.gprs_offset
67928721 9759 = cfun_frame_layout.f0_offset - cfun_gprs_save_area_size;
62eb9236 9760
9761 cfun_frame_layout.f8_offset = (cfun_frame_layout.gprs_offset
9762 - cfun_frame_layout.high_fprs * 8);
67928721 9763 }
9764
62eb9236 9765 if (cfun_save_high_fprs_p)
9766 cfun_frame_layout.frame_size += cfun_frame_layout.high_fprs * 8;
9767
9768 if (!crtl->is_leaf)
9769 cfun_frame_layout.frame_size += crtl->outgoing_args_size;
9770
9771 /* In the following cases we have to allocate a STACK_POINTER_OFFSET
9772 sized area at the bottom of the stack. This is required also for
9773 leaf functions. When GCC generates a local stack reference it
9774 will always add STACK_POINTER_OFFSET to all these references. */
d5bf7b64 9775 if (crtl->is_leaf
67928721 9776 && !TARGET_TPF_PROFILING
9777 && cfun_frame_layout.frame_size == 0
ff4ce128 9778 && !cfun->calls_alloca)
67928721 9779 return;
9780
62eb9236 9781 /* Calculate the number of bytes we have used in our own register
9782 save area. With the packed stack layout we can re-use the
9783 remaining bytes for normal stack elements. */
67928721 9784
62eb9236 9785 if (TARGET_PACKED_STACK)
9786 lowest_offset = MIN (MIN (cfun_frame_layout.f0_offset,
9787 cfun_frame_layout.f4_offset),
9788 cfun_frame_layout.gprs_offset);
9789 else
9790 lowest_offset = 0;
ffead1ca 9791
62eb9236 9792 if (TARGET_BACKCHAIN)
9793 lowest_offset = MIN (lowest_offset, cfun_frame_layout.backchain_offset);
ffead1ca 9794
62eb9236 9795 cfun_frame_layout.frame_size += STACK_POINTER_OFFSET - lowest_offset;
67928721 9796
62eb9236 9797 /* If under 31 bit an odd number of gprs has to be saved we have to
9798 adjust the frame size to sustain 8 byte alignment of stack
9799 frames. */
9800 cfun_frame_layout.frame_size = ((cfun_frame_layout.frame_size +
9801 STACK_BOUNDARY / BITS_PER_UNIT - 1)
9802 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1));
8b4a4127 9803}
9804
4fed3f99 9805/* Generate frame layout. Fills in register and frame data for the current
9806 function in cfun->machine. This routine can be called multiple times;
9807 it will re-do the complete frame layout every time. */
8b4a4127 9808
4fed3f99 9809static void
9810s390_init_frame_layout (void)
4673c1a0 9811{
4fed3f99 9812 HOST_WIDE_INT frame_size;
9813 int base_used;
ff4ce128 9814
b85ca4c8 9815 /* After LRA the frame layout is supposed to be read-only and should
9816 not be re-computed. */
9817 if (reload_completed)
9818 return;
beee1f75 9819
4fed3f99 9820 /* On S/390 machines, we may need to perform branch splitting, which
9821 will require both base and return address register. We have no
9822 choice but to assume we're going to need them until right at the
9823 end of the machine dependent reorg phase. */
9824 if (!TARGET_CPU_ZARCH)
9825 cfun->machine->split_branches_pending_p = true;
9826
9827 do
9828 {
9829 frame_size = cfun_frame_layout.frame_size;
9830
9831 /* Try to predict whether we'll need the base register. */
9832 base_used = cfun->machine->split_branches_pending_p
18d50ae6 9833 || crtl->uses_const_pool
3ea2a559 9834 || (!DISP_IN_RANGE (frame_size)
9835 && !CONST_OK_FOR_K (frame_size));
4fed3f99 9836
9837 /* Decide which register to use as literal pool base. In small
9838 leaf functions, try to use an unused call-clobbered register
9839 as base register to avoid save/restore overhead. */
9840 if (!base_used)
9841 cfun->machine->base_reg = NULL_RTX;
4fed3f99 9842 else
fee9fc9f 9843 {
9844 int br = 0;
9845
9846 if (crtl->is_leaf)
9847 /* Prefer r5 (most likely to be free). */
9848 for (br = 5; br >= 2 && df_regs_ever_live_p (br); br--)
9849 ;
9850 cfun->machine->base_reg =
009c4697 9851 gen_rtx_REG (Pmode, (br >= 2) ? br : BASE_REGNUM);
fee9fc9f 9852 }
67928721 9853
ff4ce128 9854 s390_register_info ();
4fed3f99 9855 s390_frame_info ();
9856 }
9857 while (frame_size != cfun_frame_layout.frame_size);
4673c1a0 9858}
9859
5ada7a14 9860/* Remove the FPR clobbers from a tbegin insn if it can be proven that
9861 the TX is nonescaping. A transaction is considered escaping if
9862 there is at least one path from tbegin returning CC0 to the
9863 function exit block without an tend.
9864
9865 The check so far has some limitations:
9866 - only single tbegin/tend BBs are supported
9867 - the first cond jump after tbegin must separate the CC0 path from ~CC0
9868 - when CC is copied to a GPR and the CC0 check is done with the GPR
9869 this is not supported
9870*/
9871
9872static void
9873s390_optimize_nonescaping_tx (void)
9874{
9875 const unsigned int CC0 = 1 << 3;
9876 basic_block tbegin_bb = NULL;
9877 basic_block tend_bb = NULL;
9878 basic_block bb;
93e0956b 9879 rtx_insn *insn;
5ada7a14 9880 bool result = true;
9881 int bb_index;
93e0956b 9882 rtx_insn *tbegin_insn = NULL;
5ada7a14 9883
9884 if (!cfun->machine->tbegin_p)
9885 return;
9886
a28770e1 9887 for (bb_index = 0; bb_index < n_basic_blocks_for_fn (cfun); bb_index++)
5ada7a14 9888 {
f5a6b05f 9889 bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
5ada7a14 9890
91dfd73e 9891 if (!bb)
9892 continue;
9893
5ada7a14 9894 FOR_BB_INSNS (bb, insn)
9895 {
9896 rtx ite, cc, pat, target;
9897 unsigned HOST_WIDE_INT mask;
9898
9899 if (!INSN_P (insn) || INSN_CODE (insn) <= 0)
9900 continue;
9901
9902 pat = PATTERN (insn);
9903
9904 if (GET_CODE (pat) == PARALLEL)
9905 pat = XVECEXP (pat, 0, 0);
9906
9907 if (GET_CODE (pat) != SET
9908 || GET_CODE (SET_SRC (pat)) != UNSPEC_VOLATILE)
9909 continue;
9910
9911 if (XINT (SET_SRC (pat), 1) == UNSPECV_TBEGIN)
9912 {
91a55c11 9913 rtx_insn *tmp;
5ada7a14 9914
9915 tbegin_insn = insn;
9916
9917 /* Just return if the tbegin doesn't have clobbers. */
9918 if (GET_CODE (PATTERN (insn)) != PARALLEL)
9919 return;
9920
9921 if (tbegin_bb != NULL)
9922 return;
9923
9924 /* Find the next conditional jump. */
9925 for (tmp = NEXT_INSN (insn);
9926 tmp != NULL_RTX;
9927 tmp = NEXT_INSN (tmp))
9928 {
9929 if (reg_set_p (gen_rtx_REG (CCmode, CC_REGNUM), tmp))
9930 return;
9931 if (!JUMP_P (tmp))
9932 continue;
9933
9934 ite = SET_SRC (PATTERN (tmp));
9935 if (GET_CODE (ite) != IF_THEN_ELSE)
9936 continue;
9937
9938 cc = XEXP (XEXP (ite, 0), 0);
9939 if (!REG_P (cc) || !CC_REGNO_P (REGNO (cc))
9940 || GET_MODE (cc) != CCRAWmode
9941 || GET_CODE (XEXP (XEXP (ite, 0), 1)) != CONST_INT)
9942 return;
9943
9944 if (bb->succs->length () != 2)
9945 return;
9946
9947 mask = INTVAL (XEXP (XEXP (ite, 0), 1));
9948 if (GET_CODE (XEXP (ite, 0)) == NE)
9949 mask ^= 0xf;
9950
9951 if (mask == CC0)
9952 target = XEXP (ite, 1);
9953 else if (mask == (CC0 ^ 0xf))
9954 target = XEXP (ite, 2);
9955 else
9956 return;
9957
9958 {
9959 edge_iterator ei;
9960 edge e1, e2;
9961
9962 ei = ei_start (bb->succs);
9963 e1 = ei_safe_edge (ei);
9964 ei_next (&ei);
9965 e2 = ei_safe_edge (ei);
9966
9967 if (e2->flags & EDGE_FALLTHRU)
9968 {
9969 e2 = e1;
9970 e1 = ei_safe_edge (ei);
9971 }
9972
9973 if (!(e1->flags & EDGE_FALLTHRU))
9974 return;
9975
9976 tbegin_bb = (target == pc_rtx) ? e1->dest : e2->dest;
9977 }
9978 if (tmp == BB_END (bb))
9979 break;
9980 }
9981 }
9982
9983 if (XINT (SET_SRC (pat), 1) == UNSPECV_TEND)
9984 {
9985 if (tend_bb != NULL)
9986 return;
9987 tend_bb = bb;
9988 }
9989 }
9990 }
9991
9992 /* Either we successfully remove the FPR clobbers here or we are not
9993 able to do anything for this TX. Both cases don't qualify for
9994 another look. */
9995 cfun->machine->tbegin_p = false;
9996
9997 if (tbegin_bb == NULL || tend_bb == NULL)
9998 return;
9999
10000 calculate_dominance_info (CDI_POST_DOMINATORS);
10001 result = dominated_by_p (CDI_POST_DOMINATORS, tbegin_bb, tend_bb);
10002 free_dominance_info (CDI_POST_DOMINATORS);
10003
10004 if (!result)
10005 return;
10006
91dfd73e 10007 PATTERN (tbegin_insn) = gen_rtx_PARALLEL (VOIDmode,
10008 gen_rtvec (2,
10009 XVECEXP (PATTERN (tbegin_insn), 0, 0),
10010 XVECEXP (PATTERN (tbegin_insn), 0, 1)));
5ada7a14 10011 INSN_CODE (tbegin_insn) = -1;
10012 df_insn_rescan (tbegin_insn);
10013
10014 return;
10015}
10016
8f1128bb 10017/* Return true if it is legal to put a value with MODE into REGNO. */
10018
10019bool
3754d046 10020s390_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
8f1128bb 10021{
76a4c804 10022 if (!TARGET_VX && VECTOR_NOFP_REGNO_P (regno))
10023 return false;
10024
8f1128bb 10025 switch (REGNO_REG_CLASS (regno))
10026 {
76a4c804 10027 case VEC_REGS:
10028 return ((GET_MODE_CLASS (mode) == MODE_INT
10029 && s390_class_max_nregs (VEC_REGS, mode) == 1)
10030 || mode == DFmode
10031 || s390_vector_mode_supported_p (mode));
10032 break;
8f1128bb 10033 case FP_REGS:
76a4c804 10034 if (TARGET_VX
10035 && ((GET_MODE_CLASS (mode) == MODE_INT
10036 && s390_class_max_nregs (FP_REGS, mode) == 1)
10037 || mode == DFmode
10038 || s390_vector_mode_supported_p (mode)))
10039 return true;
10040
8f1128bb 10041 if (REGNO_PAIR_OK (regno, mode))
10042 {
10043 if (mode == SImode || mode == DImode)
10044 return true;
10045
10046 if (FLOAT_MODE_P (mode) && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
10047 return true;
10048 }
10049 break;
10050 case ADDR_REGS:
10051 if (FRAME_REGNO_P (regno) && mode == Pmode)
10052 return true;
10053
10054 /* fallthrough */
10055 case GENERAL_REGS:
10056 if (REGNO_PAIR_OK (regno, mode))
10057 {
b5fdc416 10058 if (TARGET_ZARCH
36868490 10059 || (mode != TFmode && mode != TCmode && mode != TDmode))
8f1128bb 10060 return true;
ffead1ca 10061 }
8f1128bb 10062 break;
10063 case CC_REGS:
10064 if (GET_MODE_CLASS (mode) == MODE_CC)
10065 return true;
10066 break;
10067 case ACCESS_REGS:
10068 if (REGNO_PAIR_OK (regno, mode))
10069 {
10070 if (mode == SImode || mode == Pmode)
10071 return true;
10072 }
10073 break;
10074 default:
10075 return false;
10076 }
ffead1ca 10077
8f1128bb 10078 return false;
10079}
10080
d1a5573e 10081/* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
10082
10083bool
10084s390_hard_regno_rename_ok (unsigned int old_reg, unsigned int new_reg)
10085{
10086 /* Once we've decided upon a register to use as base register, it must
10087 no longer be used for any other purpose. */
10088 if (cfun->machine->base_reg)
10089 if (REGNO (cfun->machine->base_reg) == old_reg
10090 || REGNO (cfun->machine->base_reg) == new_reg)
10091 return false;
10092
ff4ce128 10093 /* Prevent regrename from using call-saved regs which haven't
10094 actually been saved. This is necessary since regrename assumes
10095 the backend save/restore decisions are based on
10096 df_regs_ever_live. Since we have our own routine we have to tell
10097 regrename manually about it. */
10098 if (GENERAL_REGNO_P (new_reg)
10099 && !call_really_used_regs[new_reg]
1d3cea74 10100 && cfun_gpr_save_slot (new_reg) == SAVE_SLOT_NONE)
ff4ce128 10101 return false;
10102
10103 return true;
10104}
10105
10106/* Return nonzero if register REGNO can be used as a scratch register
10107 in peephole2. */
10108
10109static bool
10110s390_hard_regno_scratch_ok (unsigned int regno)
10111{
10112 /* See s390_hard_regno_rename_ok. */
10113 if (GENERAL_REGNO_P (regno)
10114 && !call_really_used_regs[regno]
1d3cea74 10115 && cfun_gpr_save_slot (regno) == SAVE_SLOT_NONE)
ff4ce128 10116 return false;
10117
d1a5573e 10118 return true;
10119}
10120
8f1128bb 10121/* Maximum number of registers to represent a value of mode MODE
8deb3959 10122 in a register of class RCLASS. */
8f1128bb 10123
6c2d82ab 10124int
3754d046 10125s390_class_max_nregs (enum reg_class rclass, machine_mode mode)
8f1128bb 10126{
76a4c804 10127 int reg_size;
10128 bool reg_pair_required_p = false;
10129
8deb3959 10130 switch (rclass)
8f1128bb 10131 {
10132 case FP_REGS:
76a4c804 10133 case VEC_REGS:
10134 reg_size = TARGET_VX ? 16 : 8;
10135
10136 /* TF and TD modes would fit into a VR but we put them into a
10137 register pair since we do not have 128bit FP instructions on
10138 full VRs. */
10139 if (TARGET_VX
10140 && SCALAR_FLOAT_MODE_P (mode)
10141 && GET_MODE_SIZE (mode) >= 16)
10142 reg_pair_required_p = true;
10143
10144 /* Even if complex types would fit into a single FPR/VR we force
10145 them into a register pair to deal with the parts more easily.
10146 (FIXME: What about complex ints?) */
8f1128bb 10147 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
76a4c804 10148 reg_pair_required_p = true;
10149 break;
8f1128bb 10150 case ACCESS_REGS:
76a4c804 10151 reg_size = 4;
10152 break;
8f1128bb 10153 default:
76a4c804 10154 reg_size = UNITS_PER_WORD;
8f1128bb 10155 break;
10156 }
76a4c804 10157
10158 if (reg_pair_required_p)
10159 return 2 * ((GET_MODE_SIZE (mode) / 2 + reg_size - 1) / reg_size);
10160
10161 return (GET_MODE_SIZE (mode) + reg_size - 1) / reg_size;
10162}
10163
10164/* Return TRUE if changing mode from FROM to TO should not be allowed
10165 for register class CLASS. */
10166
10167int
10168s390_cannot_change_mode_class (machine_mode from_mode,
10169 machine_mode to_mode,
10170 enum reg_class rclass)
10171{
10172 machine_mode small_mode;
10173 machine_mode big_mode;
10174
10175 if (GET_MODE_SIZE (from_mode) == GET_MODE_SIZE (to_mode))
10176 return 0;
10177
10178 if (GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
10179 {
10180 small_mode = from_mode;
10181 big_mode = to_mode;
10182 }
10183 else
10184 {
10185 small_mode = to_mode;
10186 big_mode = from_mode;
10187 }
10188
10189 /* Values residing in VRs are little-endian style. All modes are
10190 placed left-aligned in an VR. This means that we cannot allow
10191 switching between modes with differing sizes. Also if the vector
10192 facility is available we still place TFmode values in VR register
10193 pairs, since the only instructions we have operating on TFmodes
10194 only deal with register pairs. Therefore we have to allow DFmode
10195 subregs of TFmodes to enable the TFmode splitters. */
10196 if (reg_classes_intersect_p (VEC_REGS, rclass)
10197 && (GET_MODE_SIZE (small_mode) < 8
10198 || s390_class_max_nregs (VEC_REGS, big_mode) == 1))
10199 return 1;
10200
10201 /* Likewise for access registers, since they have only half the
10202 word size on 64-bit. */
10203 if (reg_classes_intersect_p (ACCESS_REGS, rclass))
10204 return 1;
10205
10206 return 0;
8f1128bb 10207}
10208
7b1bda1c 10209/* Return true if we use LRA instead of reload pass. */
10210static bool
10211s390_lra_p (void)
10212{
10213 return s390_lra_flag;
10214}
10215
4fed3f99 10216/* Return true if register FROM can be eliminated via register TO. */
10217
cd90919d 10218static bool
10219s390_can_eliminate (const int from, const int to)
4fed3f99 10220{
d1a5573e 10221 /* On zSeries machines, we have not marked the base register as fixed.
10222 Instead, we have an elimination rule BASE_REGNUM -> BASE_REGNUM.
10223 If a function requires the base register, we say here that this
10224 elimination cannot be performed. This will cause reload to free
10225 up the base register (as if it were fixed). On the other hand,
10226 if the current function does *not* require the base register, we
10227 say here the elimination succeeds, which in turn allows reload
10228 to allocate the base register for any other purpose. */
10229 if (from == BASE_REGNUM && to == BASE_REGNUM)
10230 {
10231 if (TARGET_CPU_ZARCH)
10232 {
10233 s390_init_frame_layout ();
10234 return cfun->machine->base_reg == NULL_RTX;
10235 }
10236
10237 return false;
10238 }
10239
10240 /* Everything else must point into the stack frame. */
4fed3f99 10241 gcc_assert (to == STACK_POINTER_REGNUM
10242 || to == HARD_FRAME_POINTER_REGNUM);
10243
10244 gcc_assert (from == FRAME_POINTER_REGNUM
10245 || from == ARG_POINTER_REGNUM
10246 || from == RETURN_ADDRESS_POINTER_REGNUM);
10247
10248 /* Make sure we actually saved the return address. */
10249 if (from == RETURN_ADDRESS_POINTER_REGNUM)
18d50ae6 10250 if (!crtl->calls_eh_return
10251 && !cfun->stdarg
4fed3f99 10252 && !cfun_frame_layout.save_return_addr_p)
10253 return false;
10254
10255 return true;
10256}
10257
10258/* Return offset between register FROM and TO initially after prolog. */
7cbfc974 10259
10260HOST_WIDE_INT
4fed3f99 10261s390_initial_elimination_offset (int from, int to)
7cbfc974 10262{
4fed3f99 10263 HOST_WIDE_INT offset;
7cbfc974 10264
4fed3f99 10265 /* ??? Why are we called for non-eliminable pairs? */
10266 if (!s390_can_eliminate (from, to))
10267 return 0;
10268
10269 switch (from)
10270 {
10271 case FRAME_POINTER_REGNUM:
ffead1ca 10272 offset = (get_frame_size()
119114cb 10273 + STACK_POINTER_OFFSET
abe32cce 10274 + crtl->outgoing_args_size);
4fed3f99 10275 break;
67928721 10276
4fed3f99 10277 case ARG_POINTER_REGNUM:
10278 s390_init_frame_layout ();
10279 offset = cfun_frame_layout.frame_size + STACK_POINTER_OFFSET;
10280 break;
10281
10282 case RETURN_ADDRESS_POINTER_REGNUM:
10283 s390_init_frame_layout ();
ff4ce128 10284
10285 if (cfun_frame_layout.first_save_gpr_slot == -1)
10286 {
10287 /* If it turns out that for stdarg nothing went into the reg
10288 save area we also do not need the return address
10289 pointer. */
10290 if (cfun->stdarg && !cfun_save_arg_fprs_p)
10291 return 0;
10292
10293 gcc_unreachable ();
10294 }
10295
10296 /* In order to make the following work it is not necessary for
10297 r14 to have a save slot. It is sufficient if one other GPR
10298 got one. Since the GPRs are always stored without gaps we
10299 are able to calculate where the r14 save slot would
10300 reside. */
10301 offset = (cfun_frame_layout.frame_size + cfun_frame_layout.gprs_offset +
10302 (RETURN_REGNUM - cfun_frame_layout.first_save_gpr_slot) *
10303 UNITS_PER_LONG);
4fed3f99 10304 break;
10305
d1a5573e 10306 case BASE_REGNUM:
10307 offset = 0;
10308 break;
10309
4fed3f99 10310 default:
10311 gcc_unreachable ();
10312 }
10313
10314 return offset;
7cbfc974 10315}
10316
8b4a4127 10317/* Emit insn to save fpr REGNUM at offset OFFSET relative
f81e845f 10318 to register BASE. Return generated insn. */
56769981 10319
4673c1a0 10320static rtx
b40da9a7 10321save_fpr (rtx base, int offset, int regnum)
4673c1a0 10322{
8b4a4127 10323 rtx addr;
29c05e22 10324 addr = gen_rtx_MEM (DFmode, plus_constant (Pmode, base, offset));
ce1d5a67 10325
10326 if (regnum >= 16 && regnum <= (16 + FP_ARG_NUM_REG))
10327 set_mem_alias_set (addr, get_varargs_alias_set ());
10328 else
10329 set_mem_alias_set (addr, get_frame_alias_set ());
4673c1a0 10330
8b4a4127 10331 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
10332}
4673c1a0 10333
8b4a4127 10334/* Emit insn to restore fpr REGNUM from offset OFFSET relative
f81e845f 10335 to register BASE. Return generated insn. */
4673c1a0 10336
8b4a4127 10337static rtx
b40da9a7 10338restore_fpr (rtx base, int offset, int regnum)
8b4a4127 10339{
10340 rtx addr;
29c05e22 10341 addr = gen_rtx_MEM (DFmode, plus_constant (Pmode, base, offset));
ce1d5a67 10342 set_mem_alias_set (addr, get_frame_alias_set ());
4673c1a0 10343
8b4a4127 10344 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
4673c1a0 10345}
10346
a3cd0f6a 10347/* Return true if REGNO is a global register, but not one
10348 of the special ones that need to be saved/restored in anyway. */
10349
10350static inline bool
10351global_not_special_regno_p (int regno)
10352{
10353 return (global_regs[regno]
10354 /* These registers are special and need to be
10355 restored in any case. */
10356 && !(regno == STACK_POINTER_REGNUM
10357 || regno == RETURN_REGNUM
10358 || regno == BASE_REGNUM
10359 || (flag_pic && regno == (int)PIC_OFFSET_TABLE_REGNUM)));
10360}
10361
9a2a66ae 10362/* Generate insn to save registers FIRST to LAST into
f81e845f 10363 the register save area located at offset OFFSET
9a2a66ae 10364 relative to register BASE. */
4673c1a0 10365
9a2a66ae 10366static rtx
b40da9a7 10367save_gprs (rtx base, int offset, int first, int last)
4673c1a0 10368{
9a2a66ae 10369 rtx addr, insn, note;
10370 int i;
10371
29c05e22 10372 addr = plus_constant (Pmode, base, offset);
9a2a66ae 10373 addr = gen_rtx_MEM (Pmode, addr);
ce1d5a67 10374
10375 set_mem_alias_set (addr, get_frame_alias_set ());
9a2a66ae 10376
10377 /* Special-case single register. */
10378 if (first == last)
10379 {
10380 if (TARGET_64BIT)
10381 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
10382 else
10383 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
10384
a3cd0f6a 10385 if (!global_not_special_regno_p (first))
10386 RTX_FRAME_RELATED_P (insn) = 1;
9a2a66ae 10387 return insn;
10388 }
10389
10390
10391 insn = gen_store_multiple (addr,
10392 gen_rtx_REG (Pmode, first),
10393 GEN_INT (last - first + 1));
10394
18d50ae6 10395 if (first <= 6 && cfun->stdarg)
ce1d5a67 10396 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
10397 {
10398 rtx mem = XEXP (XVECEXP (PATTERN (insn), 0, i), 0);
ffead1ca 10399
ce1d5a67 10400 if (first + i <= 6)
10401 set_mem_alias_set (mem, get_varargs_alias_set ());
10402 }
9a2a66ae 10403
10404 /* We need to set the FRAME_RELATED flag on all SETs
10405 inside the store-multiple pattern.
10406
10407 However, we must not emit DWARF records for registers 2..5
f81e845f 10408 if they are stored for use by variable arguments ...
9a2a66ae 10409
3ce7ff97 10410 ??? Unfortunately, it is not enough to simply not the
9a2a66ae 10411 FRAME_RELATED flags for those SETs, because the first SET
10412 of the PARALLEL is always treated as if it had the flag
10413 set, even if it does not. Therefore we emit a new pattern
10414 without those registers as REG_FRAME_RELATED_EXPR note. */
10415
a3cd0f6a 10416 if (first >= 6 && !global_not_special_regno_p (first))
9a2a66ae 10417 {
10418 rtx pat = PATTERN (insn);
10419
10420 for (i = 0; i < XVECLEN (pat, 0); i++)
a3cd0f6a 10421 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
10422 && !global_not_special_regno_p (REGNO (SET_SRC (XVECEXP (pat,
10423 0, i)))))
9a2a66ae 10424 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
10425
10426 RTX_FRAME_RELATED_P (insn) = 1;
10427 }
10428 else if (last >= 6)
10429 {
a3cd0f6a 10430 int start;
10431
10432 for (start = first >= 6 ? first : 6; start <= last; start++)
10433 if (!global_not_special_regno_p (start))
10434 break;
10435
10436 if (start > last)
10437 return insn;
10438
29c05e22 10439 addr = plus_constant (Pmode, base,
10440 offset + (start - first) * UNITS_PER_LONG);
ff4ce128 10441
10442 if (start == last)
10443 {
10444 if (TARGET_64BIT)
10445 note = gen_movdi (gen_rtx_MEM (Pmode, addr),
10446 gen_rtx_REG (Pmode, start));
10447 else
10448 note = gen_movsi (gen_rtx_MEM (Pmode, addr),
10449 gen_rtx_REG (Pmode, start));
10450 note = PATTERN (note);
10451
10452 add_reg_note (insn, REG_FRAME_RELATED_EXPR, note);
10453 RTX_FRAME_RELATED_P (insn) = 1;
10454
10455 return insn;
10456 }
10457
f81e845f 10458 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
a3cd0f6a 10459 gen_rtx_REG (Pmode, start),
10460 GEN_INT (last - start + 1));
9a2a66ae 10461 note = PATTERN (note);
10462
b9c74b4d 10463 add_reg_note (insn, REG_FRAME_RELATED_EXPR, note);
9a2a66ae 10464
10465 for (i = 0; i < XVECLEN (note, 0); i++)
a3cd0f6a 10466 if (GET_CODE (XVECEXP (note, 0, i)) == SET
10467 && !global_not_special_regno_p (REGNO (SET_SRC (XVECEXP (note,
10468 0, i)))))
9a2a66ae 10469 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
10470
10471 RTX_FRAME_RELATED_P (insn) = 1;
10472 }
10473
10474 return insn;
8b4a4127 10475}
4673c1a0 10476
9a2a66ae 10477/* Generate insn to restore registers FIRST to LAST from
f81e845f 10478 the register save area located at offset OFFSET
9a2a66ae 10479 relative to register BASE. */
4673c1a0 10480
9a2a66ae 10481static rtx
b40da9a7 10482restore_gprs (rtx base, int offset, int first, int last)
8b4a4127 10483{
9a2a66ae 10484 rtx addr, insn;
10485
29c05e22 10486 addr = plus_constant (Pmode, base, offset);
9a2a66ae 10487 addr = gen_rtx_MEM (Pmode, addr);
ce1d5a67 10488 set_mem_alias_set (addr, get_frame_alias_set ());
9a2a66ae 10489
10490 /* Special-case single register. */
10491 if (first == last)
10492 {
10493 if (TARGET_64BIT)
10494 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
10495 else
10496 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
10497
ff4ce128 10498 RTX_FRAME_RELATED_P (insn) = 1;
9a2a66ae 10499 return insn;
10500 }
10501
10502 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
10503 addr,
10504 GEN_INT (last - first + 1));
ff4ce128 10505 RTX_FRAME_RELATED_P (insn) = 1;
9a2a66ae 10506 return insn;
8b4a4127 10507}
4673c1a0 10508
20074f87 10509/* Return insn sequence to load the GOT register. */
12ef3745 10510
10511static GTY(()) rtx got_symbol;
93e0956b 10512rtx_insn *
20074f87 10513s390_load_got (void)
12ef3745 10514{
93e0956b 10515 rtx_insn *insns;
20074f87 10516
c60a7572 10517 /* We cannot use pic_offset_table_rtx here since we use this
10518 function also for non-pic if __tls_get_offset is called and in
10519 that case PIC_OFFSET_TABLE_REGNUM as well as pic_offset_table_rtx
10520 aren't usable. */
10521 rtx got_rtx = gen_rtx_REG (Pmode, 12);
10522
12ef3745 10523 if (!got_symbol)
10524 {
10525 got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
10526 SYMBOL_REF_FLAGS (got_symbol) = SYMBOL_FLAG_LOCAL;
10527 }
10528
20074f87 10529 start_sequence ();
10530
dafc8d45 10531 if (TARGET_CPU_ZARCH)
12ef3745 10532 {
c60a7572 10533 emit_move_insn (got_rtx, got_symbol);
12ef3745 10534 }
10535 else
10536 {
20074f87 10537 rtx offset;
12ef3745 10538
f81e845f 10539 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, got_symbol),
12ef3745 10540 UNSPEC_LTREL_OFFSET);
10541 offset = gen_rtx_CONST (Pmode, offset);
10542 offset = force_const_mem (Pmode, offset);
10543
c60a7572 10544 emit_move_insn (got_rtx, offset);
12ef3745 10545
f81e845f 10546 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (offset, 0)),
12ef3745 10547 UNSPEC_LTREL_BASE);
c60a7572 10548 offset = gen_rtx_PLUS (Pmode, got_rtx, offset);
12ef3745 10549
c60a7572 10550 emit_move_insn (got_rtx, offset);
12ef3745 10551 }
20074f87 10552
10553 insns = get_insns ();
10554 end_sequence ();
10555 return insns;
12ef3745 10556}
10557
062c49fd 10558/* This ties together stack memory (MEM with an alias set of frame_alias_set)
10559 and the change to the stack pointer. */
10560
10561static void
10562s390_emit_stack_tie (void)
10563{
10564 rtx mem = gen_frame_mem (BLKmode,
10565 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
10566
10567 emit_insn (gen_stack_tie (mem));
10568}
10569
ff4ce128 10570/* Copy GPRS into FPR save slots. */
10571
10572static void
10573s390_save_gprs_to_fprs (void)
10574{
10575 int i;
10576
10577 if (!TARGET_Z10 || !TARGET_HARD_FLOAT || !crtl->is_leaf)
10578 return;
10579
10580 for (i = 6; i < 16; i++)
10581 {
10582 if (FP_REGNO_P (cfun_gpr_save_slot (i)))
10583 {
93e0956b 10584 rtx_insn *insn =
ff4ce128 10585 emit_move_insn (gen_rtx_REG (DImode, cfun_gpr_save_slot (i)),
10586 gen_rtx_REG (DImode, i));
10587 RTX_FRAME_RELATED_P (insn) = 1;
c5dad799 10588 /* This prevents dwarf2cfi from interpreting the set. Doing
10589 so it might emit def_cfa_register infos setting an FPR as
10590 new CFA. */
10591 add_reg_note (insn, REG_CFA_REGISTER, PATTERN (insn));
ff4ce128 10592 }
10593 }
10594}
10595
10596/* Restore GPRs from FPR save slots. */
10597
10598static void
10599s390_restore_gprs_from_fprs (void)
10600{
10601 int i;
10602
10603 if (!TARGET_Z10 || !TARGET_HARD_FLOAT || !crtl->is_leaf)
10604 return;
10605
10606 for (i = 6; i < 16; i++)
10607 {
54530437 10608 rtx_insn *insn;
10609
10610 if (!FP_REGNO_P (cfun_gpr_save_slot (i)))
10611 continue;
10612
10613 rtx fpr = gen_rtx_REG (DImode, cfun_gpr_save_slot (i));
10614
10615 if (i == STACK_POINTER_REGNUM)
10616 insn = emit_insn (gen_stack_restore_from_fpr (fpr));
10617 else
10618 insn = emit_move_insn (gen_rtx_REG (DImode, i), fpr);
10619
10620 df_set_regs_ever_live (i, true);
10621 add_reg_note (insn, REG_CFA_RESTORE, gen_rtx_REG (DImode, i));
10622 if (i == STACK_POINTER_REGNUM)
10623 add_reg_note (insn, REG_CFA_DEF_CFA,
10624 plus_constant (Pmode, stack_pointer_rtx,
10625 STACK_POINTER_OFFSET));
10626 RTX_FRAME_RELATED_P (insn) = 1;
ff4ce128 10627 }
10628}
10629
4673c1a0 10630
0b8be04c 10631/* A pass run immediately before shrink-wrapping and prologue and epilogue
10632 generation. */
10633
0b8be04c 10634namespace {
10635
10636const pass_data pass_data_s390_early_mach =
10637{
10638 RTL_PASS, /* type */
10639 "early_mach", /* name */
10640 OPTGROUP_NONE, /* optinfo_flags */
0b8be04c 10641 TV_MACH_DEP, /* tv_id */
10642 0, /* properties_required */
10643 0, /* properties_provided */
10644 0, /* properties_destroyed */
10645 0, /* todo_flags_start */
8b88439e 10646 ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
0b8be04c 10647};
20074f87 10648
0b8be04c 10649class pass_s390_early_mach : public rtl_opt_pass
10650{
10651public:
10652 pass_s390_early_mach (gcc::context *ctxt)
10653 : rtl_opt_pass (pass_data_s390_early_mach, ctxt)
10654 {}
10655
10656 /* opt_pass methods: */
65b0537f 10657 virtual unsigned int execute (function *);
0b8be04c 10658
10659}; // class pass_s390_early_mach
10660
65b0537f 10661unsigned int
10662pass_s390_early_mach::execute (function *fun)
10663{
93e0956b 10664 rtx_insn *insn;
65b0537f 10665
10666 /* Try to get rid of the FPR clobbers. */
10667 s390_optimize_nonescaping_tx ();
10668
10669 /* Re-compute register info. */
10670 s390_register_info ();
10671
10672 /* If we're using a base register, ensure that it is always valid for
10673 the first non-prologue instruction. */
10674 if (fun->machine->base_reg)
10675 emit_insn_at_entry (gen_main_pool (fun->machine->base_reg));
10676
10677 /* Annotate all constant pool references to let the scheduler know
10678 they implicitly use the base register. */
10679 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10680 if (INSN_P (insn))
10681 {
10682 annotate_constant_pool_refs (&PATTERN (insn));
10683 df_insn_rescan (insn);
10684 }
10685 return 0;
10686}
10687
0b8be04c 10688} // anon namespace
10689
10690/* Expand the prologue into a bunch of separate insns. */
10691
10692void
10693s390_emit_prologue (void)
10694{
10695 rtx insn, addr;
10696 rtx temp_reg;
10697 int i;
10698 int offset;
10699 int next_fpr = 0;
20074f87 10700
f81e845f 10701 /* Choose best register to use for temp use within prologue.
c6d481f7 10702 TPF with profiling must avoid the register 14 - the tracing function
10703 needs the original contents of r14 to be preserved. */
f81e845f 10704
ffead1ca 10705 if (!has_hard_reg_initial_val (Pmode, RETURN_REGNUM)
d5bf7b64 10706 && !crtl->is_leaf
1e639cb0 10707 && !TARGET_TPF_PROFILING)
8b4a4127 10708 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
c6d481f7 10709 else if (flag_split_stack && cfun->stdarg)
10710 temp_reg = gen_rtx_REG (Pmode, 12);
4673c1a0 10711 else
8b4a4127 10712 temp_reg = gen_rtx_REG (Pmode, 1);
4673c1a0 10713
ff4ce128 10714 s390_save_gprs_to_fprs ();
10715
8b4a4127 10716 /* Save call saved gprs. */
67928721 10717 if (cfun_frame_layout.first_save_gpr != -1)
4ac7fd98 10718 {
ffead1ca 10719 insn = save_gprs (stack_pointer_rtx,
10720 cfun_frame_layout.gprs_offset +
b5fdc416 10721 UNITS_PER_LONG * (cfun_frame_layout.first_save_gpr
5214e6ae 10722 - cfun_frame_layout.first_save_gpr_slot),
ffead1ca 10723 cfun_frame_layout.first_save_gpr,
4ac7fd98 10724 cfun_frame_layout.last_save_gpr);
10725 emit_insn (insn);
10726 }
8b4a4127 10727
c2c1332a 10728 /* Dummy insn to mark literal pool slot. */
f81e845f 10729
4fed3f99 10730 if (cfun->machine->base_reg)
10731 emit_insn (gen_main_pool (cfun->machine->base_reg));
f81e845f 10732
67928721 10733 offset = cfun_frame_layout.f0_offset;
8b4a4127 10734
67928721 10735 /* Save f0 and f2. */
6a2469fe 10736 for (i = FPR0_REGNUM; i <= FPR0_REGNUM + 1; i++)
67928721 10737 {
29439367 10738 if (cfun_fpr_save_p (i))
67928721 10739 {
29439367 10740 save_fpr (stack_pointer_rtx, offset, i);
67928721 10741 offset += 8;
10742 }
031bdf83 10743 else if (!TARGET_PACKED_STACK || cfun->stdarg)
10744 offset += 8;
67928721 10745 }
4673c1a0 10746
67928721 10747 /* Save f4 and f6. */
10748 offset = cfun_frame_layout.f4_offset;
6a2469fe 10749 for (i = FPR4_REGNUM; i <= FPR4_REGNUM + 1; i++)
67928721 10750 {
29439367 10751 if (cfun_fpr_save_p (i))
8b4a4127 10752 {
29439367 10753 insn = save_fpr (stack_pointer_rtx, offset, i);
67928721 10754 offset += 8;
10755
031bdf83 10756 /* If f4 and f6 are call clobbered they are saved due to
10757 stdargs and therefore are not frame related. */
29439367 10758 if (!call_really_used_regs[i])
67928721 10759 RTX_FRAME_RELATED_P (insn) = 1;
8b4a4127 10760 }
031bdf83 10761 else if (!TARGET_PACKED_STACK || call_really_used_regs[i])
67928721 10762 offset += 8;
10763 }
10764
646a946e 10765 if (TARGET_PACKED_STACK
67928721 10766 && cfun_save_high_fprs_p
10767 && cfun_frame_layout.f8_offset + cfun_frame_layout.high_fprs * 8 > 0)
10768 {
10769 offset = (cfun_frame_layout.f8_offset
10770 + (cfun_frame_layout.high_fprs - 1) * 8);
10771
6a2469fe 10772 for (i = FPR15_REGNUM; i >= FPR8_REGNUM && offset >= 0; i--)
29439367 10773 if (cfun_fpr_save_p (i))
67928721 10774 {
29439367 10775 insn = save_fpr (stack_pointer_rtx, offset, i);
ffead1ca 10776
67928721 10777 RTX_FRAME_RELATED_P (insn) = 1;
10778 offset -= 8;
10779 }
10780 if (offset >= cfun_frame_layout.f8_offset)
29439367 10781 next_fpr = i;
67928721 10782 }
ffead1ca 10783
646a946e 10784 if (!TARGET_PACKED_STACK)
6a2469fe 10785 next_fpr = cfun_save_high_fprs_p ? FPR15_REGNUM : 0;
4673c1a0 10786
8c0dd614 10787 if (flag_stack_usage_info)
7810b579 10788 current_function_static_stack_size = cfun_frame_layout.frame_size;
10789
8b4a4127 10790 /* Decrement stack pointer. */
4673c1a0 10791
67928721 10792 if (cfun_frame_layout.frame_size > 0)
8b4a4127 10793 {
67928721 10794 rtx frame_off = GEN_INT (-cfun_frame_layout.frame_size);
b9c74b4d 10795 rtx real_frame_off;
4673c1a0 10796
cbb300e8 10797 if (s390_stack_size)
10798 {
00d233e6 10799 HOST_WIDE_INT stack_guard;
cbb300e8 10800
00d233e6 10801 if (s390_stack_guard)
10802 stack_guard = s390_stack_guard;
cbb300e8 10803 else
00d233e6 10804 {
10805 /* If no value for stack guard is provided the smallest power of 2
10806 larger than the current frame size is chosen. */
10807 stack_guard = 1;
10808 while (stack_guard < cfun_frame_layout.frame_size)
10809 stack_guard <<= 1;
10810 }
cbb300e8 10811
00d233e6 10812 if (cfun_frame_layout.frame_size >= s390_stack_size)
10813 {
8ad6fff9 10814 warning (0, "frame size of function %qs is %wd"
00d233e6 10815 " bytes exceeding user provided stack limit of "
8ad6fff9 10816 "%d bytes. "
00d233e6 10817 "An unconditional trap is added.",
10818 current_function_name(), cfun_frame_layout.frame_size,
10819 s390_stack_size);
10820 emit_insn (gen_trap ());
482869e7 10821 emit_barrier ();
00d233e6 10822 }
10823 else
10824 {
b437383e 10825 /* stack_guard has to be smaller than s390_stack_size.
10826 Otherwise we would emit an AND with zero which would
10827 not match the test under mask pattern. */
10828 if (stack_guard >= s390_stack_size)
10829 {
7fe62d25 10830 warning (0, "frame size of function %qs is %wd"
b437383e 10831 " bytes which is more than half the stack size. "
10832 "The dynamic check would not be reliable. "
10833 "No check emitted for this function.",
10834 current_function_name(),
10835 cfun_frame_layout.frame_size);
10836 }
00d233e6 10837 else
b437383e 10838 {
10839 HOST_WIDE_INT stack_check_mask = ((s390_stack_size - 1)
10840 & ~(stack_guard - 1));
10841
10842 rtx t = gen_rtx_AND (Pmode, stack_pointer_rtx,
10843 GEN_INT (stack_check_mask));
10844 if (TARGET_64BIT)
10845 emit_insn (gen_ctrapdi4 (gen_rtx_EQ (VOIDmode,
10846 t, const0_rtx),
10847 t, const0_rtx, const0_rtx));
10848 else
10849 emit_insn (gen_ctrapsi4 (gen_rtx_EQ (VOIDmode,
10850 t, const0_rtx),
10851 t, const0_rtx, const0_rtx));
10852 }
00d233e6 10853 }
cbb300e8 10854 }
10855
ffead1ca 10856 if (s390_warn_framesize > 0
cbb300e8 10857 && cfun_frame_layout.frame_size >= s390_warn_framesize)
7fe62d25 10858 warning (0, "frame size of %qs is %wd bytes",
cbb300e8 10859 current_function_name (), cfun_frame_layout.frame_size);
10860
10861 if (s390_warn_dynamicstack_p && cfun->calls_alloca)
c3ceba8e 10862 warning (0, "%qs uses dynamic stack allocation", current_function_name ());
cbb300e8 10863
8b4a4127 10864 /* Save incoming stack pointer into temp reg. */
e5c64bfc 10865 if (TARGET_BACKCHAIN || next_fpr)
67928721 10866 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
f81e845f 10867
1fc184ee 10868 /* Subtract frame size from stack pointer. */
8b4a4127 10869
51aa1e9c 10870 if (DISP_IN_RANGE (INTVAL (frame_off)))
10871 {
d1f9b275 10872 insn = gen_rtx_SET (stack_pointer_rtx,
ffead1ca 10873 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
b40da9a7 10874 frame_off));
51aa1e9c 10875 insn = emit_insn (insn);
10876 }
10877 else
10878 {
cb888f33 10879 if (!CONST_OK_FOR_K (INTVAL (frame_off)))
51aa1e9c 10880 frame_off = force_const_mem (Pmode, frame_off);
10881
10882 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
20074f87 10883 annotate_constant_pool_refs (&PATTERN (insn));
51aa1e9c 10884 }
8b4a4127 10885
8b4a4127 10886 RTX_FRAME_RELATED_P (insn) = 1;
b9c74b4d 10887 real_frame_off = GEN_INT (-cfun_frame_layout.frame_size);
10888 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
d1f9b275 10889 gen_rtx_SET (stack_pointer_rtx,
b9c74b4d 10890 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
10891 real_frame_off)));
8b4a4127 10892
10893 /* Set backchain. */
f81e845f 10894
e5c64bfc 10895 if (TARGET_BACKCHAIN)
4673c1a0 10896 {
67928721 10897 if (cfun_frame_layout.backchain_offset)
ffead1ca 10898 addr = gen_rtx_MEM (Pmode,
29c05e22 10899 plus_constant (Pmode, stack_pointer_rtx,
67928721 10900 cfun_frame_layout.backchain_offset));
10901 else
ffead1ca 10902 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
ce1d5a67 10903 set_mem_alias_set (addr, get_frame_alias_set ());
8b4a4127 10904 insn = emit_insn (gen_move_insn (addr, temp_reg));
4673c1a0 10905 }
90524d70 10906
cbeb677e 10907 /* If we support non-call exceptions (e.g. for Java),
90524d70 10908 we need to make sure the backchain pointer is set up
10909 before any possibly trapping memory access. */
cbeb677e 10910 if (TARGET_BACKCHAIN && cfun->can_throw_non_call_exceptions)
90524d70 10911 {
10912 addr = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
18b42941 10913 emit_clobber (addr);
90524d70 10914 }
8b4a4127 10915 }
4673c1a0 10916
8b4a4127 10917 /* Save fprs 8 - 15 (64 bit ABI). */
f81e845f 10918
67928721 10919 if (cfun_save_high_fprs_p && next_fpr)
8b4a4127 10920 {
062c49fd 10921 /* If the stack might be accessed through a different register
10922 we have to make sure that the stack pointer decrement is not
10923 moved below the use of the stack slots. */
10924 s390_emit_stack_tie ();
10925
ffead1ca 10926 insn = emit_insn (gen_add2_insn (temp_reg,
67928721 10927 GEN_INT (cfun_frame_layout.f8_offset)));
10928
10929 offset = 0;
4673c1a0 10930
6a2469fe 10931 for (i = FPR8_REGNUM; i <= next_fpr; i++)
29439367 10932 if (cfun_fpr_save_p (i))
8b4a4127 10933 {
29c05e22 10934 rtx addr = plus_constant (Pmode, stack_pointer_rtx,
67928721 10935 cfun_frame_layout.frame_size
10936 + cfun_frame_layout.f8_offset
10937 + offset);
ffead1ca 10938
67928721 10939 insn = save_fpr (temp_reg, offset, i);
10940 offset += 8;
8b4a4127 10941 RTX_FRAME_RELATED_P (insn) = 1;
b9c74b4d 10942 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
d1f9b275 10943 gen_rtx_SET (gen_rtx_MEM (DFmode, addr),
b9c74b4d 10944 gen_rtx_REG (DFmode, i)));
8b4a4127 10945 }
10946 }
f81e845f 10947
8b4a4127 10948 /* Set frame pointer, if needed. */
f81e845f 10949
5a5e802f 10950 if (frame_pointer_needed)
8b4a4127 10951 {
10952 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
10953 RTX_FRAME_RELATED_P (insn) = 1;
10954 }
4673c1a0 10955
8b4a4127 10956 /* Set up got pointer, if needed. */
f81e845f 10957
3072d30e 10958 if (flag_pic && df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM))
20074f87 10959 {
93e0956b 10960 rtx_insn *insns = s390_load_got ();
20074f87 10961
91a55c11 10962 for (rtx_insn *insn = insns; insn; insn = NEXT_INSN (insn))
3072d30e 10963 annotate_constant_pool_refs (&PATTERN (insn));
20074f87 10964
10965 emit_insn (insns);
10966 }
f81e845f 10967
de253666 10968 if (TARGET_TPF_PROFILING)
f81e845f 10969 {
10970 /* Generate a BAS instruction to serve as a function
10971 entry intercept to facilitate the use of tracing
346fecd5 10972 algorithms located at the branch target. */
10973 emit_insn (gen_prologue_tpf ());
f81e845f 10974
10975 /* Emit a blockage here so that all code
10976 lies between the profiling mechanisms. */
10977 emit_insn (gen_blockage ());
10978 }
8b4a4127 10979}
4673c1a0 10980
d2833c15 10981/* Expand the epilogue into a bunch of separate insns. */
4673c1a0 10982
8b4a4127 10983void
7346ca58 10984s390_emit_epilogue (bool sibcall)
8b4a4127 10985{
a3cd0f6a 10986 rtx frame_pointer, return_reg, cfa_restores = NULL_RTX;
abd8f04d 10987 int area_bottom, area_top, offset = 0;
67928721 10988 int next_offset;
8b4a4127 10989 rtvec p;
78c2b526 10990 int i;
4673c1a0 10991
de253666 10992 if (TARGET_TPF_PROFILING)
f81e845f 10993 {
10994
10995 /* Generate a BAS instruction to serve as a function
10996 entry intercept to facilitate the use of tracing
346fecd5 10997 algorithms located at the branch target. */
f81e845f 10998
f81e845f 10999 /* Emit a blockage here so that all code
11000 lies between the profiling mechanisms. */
11001 emit_insn (gen_blockage ());
11002
346fecd5 11003 emit_insn (gen_epilogue_tpf ());
f81e845f 11004 }
11005
8b4a4127 11006 /* Check whether to use frame or stack pointer for restore. */
4673c1a0 11007
ffead1ca 11008 frame_pointer = (frame_pointer_needed
67928721 11009 ? hard_frame_pointer_rtx : stack_pointer_rtx);
4673c1a0 11010
67928721 11011 s390_frame_area (&area_bottom, &area_top);
4673c1a0 11012
f81e845f 11013 /* Check whether we can access the register save area.
8b4a4127 11014 If not, increment the frame pointer as required. */
4673c1a0 11015
8b4a4127 11016 if (area_top <= area_bottom)
11017 {
11018 /* Nothing to restore. */
11019 }
67928721 11020 else if (DISP_IN_RANGE (cfun_frame_layout.frame_size + area_bottom)
11021 && DISP_IN_RANGE (cfun_frame_layout.frame_size + area_top - 1))
8b4a4127 11022 {
11023 /* Area is in range. */
67928721 11024 offset = cfun_frame_layout.frame_size;
8b4a4127 11025 }
11026 else
11027 {
a3cd0f6a 11028 rtx insn, frame_off, cfa;
4673c1a0 11029
f81e845f 11030 offset = area_bottom < 0 ? -area_bottom : 0;
67928721 11031 frame_off = GEN_INT (cfun_frame_layout.frame_size - offset);
4673c1a0 11032
d1f9b275 11033 cfa = gen_rtx_SET (frame_pointer,
a3cd0f6a 11034 gen_rtx_PLUS (Pmode, frame_pointer, frame_off));
51aa1e9c 11035 if (DISP_IN_RANGE (INTVAL (frame_off)))
11036 {
d1f9b275 11037 insn = gen_rtx_SET (frame_pointer,
51aa1e9c 11038 gen_rtx_PLUS (Pmode, frame_pointer, frame_off));
11039 insn = emit_insn (insn);
11040 }
11041 else
11042 {
cb888f33 11043 if (!CONST_OK_FOR_K (INTVAL (frame_off)))
51aa1e9c 11044 frame_off = force_const_mem (Pmode, frame_off);
4673c1a0 11045
51aa1e9c 11046 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
20074f87 11047 annotate_constant_pool_refs (&PATTERN (insn));
51aa1e9c 11048 }
a3cd0f6a 11049 add_reg_note (insn, REG_CFA_ADJUST_CFA, cfa);
11050 RTX_FRAME_RELATED_P (insn) = 1;
8b4a4127 11051 }
4673c1a0 11052
8b4a4127 11053 /* Restore call saved fprs. */
11054
11055 if (TARGET_64BIT)
4673c1a0 11056 {
67928721 11057 if (cfun_save_high_fprs_p)
11058 {
11059 next_offset = cfun_frame_layout.f8_offset;
6a2469fe 11060 for (i = FPR8_REGNUM; i <= FPR15_REGNUM; i++)
67928721 11061 {
29439367 11062 if (cfun_fpr_save_p (i))
67928721 11063 {
11064 restore_fpr (frame_pointer,
11065 offset + next_offset, i);
a3cd0f6a 11066 cfa_restores
11067 = alloc_reg_note (REG_CFA_RESTORE,
11068 gen_rtx_REG (DFmode, i), cfa_restores);
67928721 11069 next_offset += 8;
11070 }
11071 }
11072 }
ffead1ca 11073
4673c1a0 11074 }
11075 else
11076 {
67928721 11077 next_offset = cfun_frame_layout.f4_offset;
29439367 11078 /* f4, f6 */
6a2469fe 11079 for (i = FPR4_REGNUM; i <= FPR4_REGNUM + 1; i++)
67928721 11080 {
29439367 11081 if (cfun_fpr_save_p (i))
67928721 11082 {
11083 restore_fpr (frame_pointer,
11084 offset + next_offset, i);
a3cd0f6a 11085 cfa_restores
11086 = alloc_reg_note (REG_CFA_RESTORE,
11087 gen_rtx_REG (DFmode, i), cfa_restores);
67928721 11088 next_offset += 8;
11089 }
646a946e 11090 else if (!TARGET_PACKED_STACK)
67928721 11091 next_offset += 8;
11092 }
ffead1ca 11093
8b4a4127 11094 }
4673c1a0 11095
8b4a4127 11096 /* Return register. */
11097
f81e845f 11098 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
8b4a4127 11099
11100 /* Restore call saved gprs. */
11101
67928721 11102 if (cfun_frame_layout.first_restore_gpr != -1)
8b4a4127 11103 {
9a2a66ae 11104 rtx insn, addr;
43935856 11105 int i;
11106
f81e845f 11107 /* Check for global register and save them
43935856 11108 to stack location from where they get restored. */
11109
67928721 11110 for (i = cfun_frame_layout.first_restore_gpr;
11111 i <= cfun_frame_layout.last_restore_gpr;
43935856 11112 i++)
11113 {
a3cd0f6a 11114 if (global_not_special_regno_p (i))
43935856 11115 {
29c05e22 11116 addr = plus_constant (Pmode, frame_pointer,
ffead1ca 11117 offset + cfun_frame_layout.gprs_offset
5214e6ae 11118 + (i - cfun_frame_layout.first_save_gpr_slot)
b5fdc416 11119 * UNITS_PER_LONG);
43935856 11120 addr = gen_rtx_MEM (Pmode, addr);
ce1d5a67 11121 set_mem_alias_set (addr, get_frame_alias_set ());
43935856 11122 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
f81e845f 11123 }
a3cd0f6a 11124 else
11125 cfa_restores
11126 = alloc_reg_note (REG_CFA_RESTORE,
11127 gen_rtx_REG (Pmode, i), cfa_restores);
43935856 11128 }
8b4a4127 11129
7346ca58 11130 if (! sibcall)
4673c1a0 11131 {
7346ca58 11132 /* Fetch return address from stack before load multiple,
d7c99e1a 11133 this will do good for scheduling.
11134
11135 Only do this if we already decided that r14 needs to be
11136 saved to a stack slot. (And not just because r14 happens to
11137 be in between two GPRs which need saving.) Otherwise it
11138 would be difficult to take that decision back in
11139 s390_optimize_prologue. */
1d3cea74 11140 if (cfun_gpr_save_slot (RETURN_REGNUM) == SAVE_SLOT_STACK)
7346ca58 11141 {
11142 int return_regnum = find_unused_clobbered_reg();
11143 if (!return_regnum)
11144 return_regnum = 4;
11145 return_reg = gen_rtx_REG (Pmode, return_regnum);
f588eb9f 11146
29c05e22 11147 addr = plus_constant (Pmode, frame_pointer,
67928721 11148 offset + cfun_frame_layout.gprs_offset
ffead1ca 11149 + (RETURN_REGNUM
5214e6ae 11150 - cfun_frame_layout.first_save_gpr_slot)
b5fdc416 11151 * UNITS_PER_LONG);
7346ca58 11152 addr = gen_rtx_MEM (Pmode, addr);
ce1d5a67 11153 set_mem_alias_set (addr, get_frame_alias_set ());
7346ca58 11154 emit_move_insn (return_reg, addr);
d7c99e1a 11155
11156 /* Once we did that optimization we have to make sure
11157 s390_optimize_prologue does not try to remove the
11158 store of r14 since we will not be able to find the
11159 load issued here. */
11160 cfun_frame_layout.save_return_addr_p = true;
7346ca58 11161 }
4673c1a0 11162 }
8b4a4127 11163
67928721 11164 insn = restore_gprs (frame_pointer,
11165 offset + cfun_frame_layout.gprs_offset
ffead1ca 11166 + (cfun_frame_layout.first_restore_gpr
5214e6ae 11167 - cfun_frame_layout.first_save_gpr_slot)
b5fdc416 11168 * UNITS_PER_LONG,
67928721 11169 cfun_frame_layout.first_restore_gpr,
11170 cfun_frame_layout.last_restore_gpr);
a3cd0f6a 11171 insn = emit_insn (insn);
11172 REG_NOTES (insn) = cfa_restores;
11173 add_reg_note (insn, REG_CFA_DEF_CFA,
29c05e22 11174 plus_constant (Pmode, stack_pointer_rtx,
11175 STACK_POINTER_OFFSET));
a3cd0f6a 11176 RTX_FRAME_RELATED_P (insn) = 1;
8b4a4127 11177 }
4673c1a0 11178
ff4ce128 11179 s390_restore_gprs_from_fprs ();
11180
7346ca58 11181 if (! sibcall)
11182 {
f81e845f 11183
7346ca58 11184 /* Return to caller. */
f588eb9f 11185
7346ca58 11186 p = rtvec_alloc (2);
f588eb9f 11187
1a860023 11188 RTVEC_ELT (p, 0) = ret_rtx;
7346ca58 11189 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
11190 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
11191 }
4673c1a0 11192}
11193
7a64c761 11194/* Implement TARGET_SET_UP_BY_PROLOGUE. */
11195
11196static void
11197s300_set_up_by_prologue (hard_reg_set_container *regs)
11198{
11199 if (cfun->machine->base_reg
11200 && !call_really_used_regs[REGNO (cfun->machine->base_reg)])
11201 SET_HARD_REG_BIT (regs->set, REGNO (cfun->machine->base_reg));
11202}
11203
c6d481f7 11204/* -fsplit-stack support. */
11205
11206/* A SYMBOL_REF for __morestack. */
11207static GTY(()) rtx morestack_ref;
11208
11209/* When using -fsplit-stack, the allocation routines set a field in
11210 the TCB to the bottom of the stack plus this much space, measured
11211 in bytes. */
11212
11213#define SPLIT_STACK_AVAILABLE 1024
11214
11215/* Emit -fsplit-stack prologue, which goes before the regular function
11216 prologue. */
11217
11218void
11219s390_expand_split_stack_prologue (void)
11220{
11221 rtx r1, guard, cc = NULL;
11222 rtx_insn *insn;
11223 /* Offset from thread pointer to __private_ss. */
11224 int psso = TARGET_64BIT ? 0x38 : 0x20;
11225 /* Pointer size in bytes. */
11226 /* Frame size and argument size - the two parameters to __morestack. */
11227 HOST_WIDE_INT frame_size = cfun_frame_layout.frame_size;
11228 /* Align argument size to 8 bytes - simplifies __morestack code. */
11229 HOST_WIDE_INT args_size = crtl->args.size >= 0
11230 ? ((crtl->args.size + 7) & ~7)
11231 : 0;
11232 /* Label to be called by __morestack. */
11233 rtx_code_label *call_done = NULL;
11234 rtx_code_label *parm_base = NULL;
11235 rtx tmp;
11236
11237 gcc_assert (flag_split_stack && reload_completed);
11238 if (!TARGET_CPU_ZARCH)
11239 {
11240 sorry ("CPUs older than z900 are not supported for -fsplit-stack");
11241 return;
11242 }
11243
11244 r1 = gen_rtx_REG (Pmode, 1);
11245
11246 /* If no stack frame will be allocated, don't do anything. */
11247 if (!frame_size)
11248 {
11249 if (cfun->machine->split_stack_varargs_pointer != NULL_RTX)
11250 {
11251 /* If va_start is used, just use r15. */
11252 emit_move_insn (r1,
11253 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
11254 GEN_INT (STACK_POINTER_OFFSET)));
11255
11256 }
11257 return;
11258 }
11259
11260 if (morestack_ref == NULL_RTX)
11261 {
11262 morestack_ref = gen_rtx_SYMBOL_REF (Pmode, "__morestack");
11263 SYMBOL_REF_FLAGS (morestack_ref) |= (SYMBOL_FLAG_LOCAL
11264 | SYMBOL_FLAG_FUNCTION);
11265 }
11266
11267 if (CONST_OK_FOR_K (frame_size) || CONST_OK_FOR_Op (frame_size))
11268 {
11269 /* If frame_size will fit in an add instruction, do a stack space
11270 check, and only call __morestack if there's not enough space. */
11271
11272 /* Get thread pointer. r1 is the only register we can always destroy - r0
11273 could contain a static chain (and cannot be used to address memory
11274 anyway), r2-r6 can contain parameters, and r6-r15 are callee-saved. */
11275 emit_move_insn (r1, gen_rtx_REG (Pmode, TP_REGNUM));
11276 /* Aim at __private_ss. */
11277 guard = gen_rtx_MEM (Pmode, plus_constant (Pmode, r1, psso));
11278
11279 /* If less that 1kiB used, skip addition and compare directly with
11280 __private_ss. */
11281 if (frame_size > SPLIT_STACK_AVAILABLE)
11282 {
11283 emit_move_insn (r1, guard);
11284 if (TARGET_64BIT)
11285 emit_insn (gen_adddi3 (r1, r1, GEN_INT (frame_size)));
11286 else
11287 emit_insn (gen_addsi3 (r1, r1, GEN_INT (frame_size)));
11288 guard = r1;
11289 }
11290
11291 /* Compare the (maybe adjusted) guard with the stack pointer. */
11292 cc = s390_emit_compare (LT, stack_pointer_rtx, guard);
11293 }
11294
11295 call_done = gen_label_rtx ();
11296 parm_base = gen_label_rtx ();
11297
11298 /* Emit the parameter block. */
11299 tmp = gen_split_stack_data (parm_base, call_done,
11300 GEN_INT (frame_size),
11301 GEN_INT (args_size));
11302 insn = emit_insn (tmp);
11303 add_reg_note (insn, REG_LABEL_OPERAND, call_done);
11304 LABEL_NUSES (call_done)++;
11305 add_reg_note (insn, REG_LABEL_OPERAND, parm_base);
11306 LABEL_NUSES (parm_base)++;
11307
11308 /* %r1 = litbase. */
11309 insn = emit_move_insn (r1, gen_rtx_LABEL_REF (VOIDmode, parm_base));
11310 add_reg_note (insn, REG_LABEL_OPERAND, parm_base);
11311 LABEL_NUSES (parm_base)++;
11312
11313 /* Now, we need to call __morestack. It has very special calling
11314 conventions: it preserves param/return/static chain registers for
11315 calling main function body, and looks for its own parameters at %r1. */
11316
11317 if (cc != NULL)
11318 {
11319 tmp = gen_split_stack_cond_call (morestack_ref, cc, call_done);
11320
11321 insn = emit_jump_insn (tmp);
11322 JUMP_LABEL (insn) = call_done;
11323 LABEL_NUSES (call_done)++;
11324
11325 /* Mark the jump as very unlikely to be taken. */
11326 add_int_reg_note (insn, REG_BR_PROB, REG_BR_PROB_BASE / 100);
11327
11328 if (cfun->machine->split_stack_varargs_pointer != NULL_RTX)
11329 {
11330 /* If va_start is used, and __morestack was not called, just use
11331 r15. */
11332 emit_move_insn (r1,
11333 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
11334 GEN_INT (STACK_POINTER_OFFSET)));
11335 }
11336 }
11337 else
11338 {
11339 tmp = gen_split_stack_call (morestack_ref, call_done);
11340 insn = emit_jump_insn (tmp);
11341 JUMP_LABEL (insn) = call_done;
11342 LABEL_NUSES (call_done)++;
11343 emit_barrier ();
11344 }
11345
11346 /* __morestack will call us here. */
11347
11348 emit_label (call_done);
11349}
11350
11351/* We may have to tell the dataflow pass that the split stack prologue
11352 is initializing a register. */
11353
11354static void
11355s390_live_on_entry (bitmap regs)
11356{
11357 if (cfun->machine->split_stack_varargs_pointer != NULL_RTX)
11358 {
11359 gcc_assert (flag_split_stack);
11360 bitmap_set_bit (regs, 1);
11361 }
11362}
11363
7a64c761 11364/* Return true if the function can use simple_return to return outside
11365 of a shrink-wrapped region. At present shrink-wrapping is supported
11366 in all cases. */
11367
11368bool
11369s390_can_use_simple_return_insn (void)
11370{
11371 return true;
11372}
11373
11374/* Return true if the epilogue is guaranteed to contain only a return
11375 instruction and if a direct return can therefore be used instead.
11376 One of the main advantages of using direct return instructions
11377 is that we can then use conditional returns. */
11378
11379bool
11380s390_can_use_return_insn (void)
11381{
11382 int i;
11383
11384 if (!reload_completed)
11385 return false;
11386
11387 if (crtl->profile)
11388 return false;
11389
11390 if (TARGET_TPF_PROFILING)
11391 return false;
11392
11393 for (i = 0; i < 16; i++)
1d3cea74 11394 if (cfun_gpr_save_slot (i) != SAVE_SLOT_NONE)
7a64c761 11395 return false;
11396
06fa0630 11397 /* For 31 bit this is not covered by the frame_size check below
11398 since f4, f6 are saved in the register save area without needing
11399 additional stack space. */
11400 if (!TARGET_64BIT
11401 && (cfun_fpr_save_p (FPR4_REGNUM) || cfun_fpr_save_p (FPR6_REGNUM)))
11402 return false;
11403
7a64c761 11404 if (cfun->machine->base_reg
11405 && !call_really_used_regs[REGNO (cfun->machine->base_reg)])
11406 return false;
11407
11408 return cfun_frame_layout.frame_size == 0;
11409}
4673c1a0 11410
76a4c804 11411/* The VX ABI differs for vararg functions. Therefore we need the
11412 prototype of the callee to be available when passing vector type
11413 values. */
11414static const char *
11415s390_invalid_arg_for_unprototyped_fn (const_tree typelist, const_tree funcdecl, const_tree val)
11416{
11417 return ((TARGET_VX_ABI
11418 && typelist == 0
11419 && VECTOR_TYPE_P (TREE_TYPE (val))
11420 && (funcdecl == NULL_TREE
11421 || (TREE_CODE (funcdecl) == FUNCTION_DECL
11422 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
11423 ? N_("Vector argument passed to unprototyped function")
11424 : NULL);
11425}
11426
11427
f81e845f 11428/* Return the size in bytes of a function argument of
56769981 11429 type TYPE and/or mode MODE. At least one of TYPE or
11430 MODE must be specified. */
4673c1a0 11431
11432static int
3754d046 11433s390_function_arg_size (machine_mode mode, const_tree type)
4673c1a0 11434{
11435 if (type)
11436 return int_size_in_bytes (type);
11437
0c034860 11438 /* No type info available for some library calls ... */
4673c1a0 11439 if (mode != BLKmode)
11440 return GET_MODE_SIZE (mode);
11441
11442 /* If we have neither type nor mode, abort */
32eda510 11443 gcc_unreachable ();
4673c1a0 11444}
11445
76a4c804 11446/* Return true if a function argument of type TYPE and mode MODE
11447 is to be passed in a vector register, if available. */
11448
11449bool
11450s390_function_arg_vector (machine_mode mode, const_tree type)
11451{
11452 if (!TARGET_VX_ABI)
11453 return false;
11454
11455 if (s390_function_arg_size (mode, type) > 16)
11456 return false;
11457
11458 /* No type info available for some library calls ... */
11459 if (!type)
11460 return VECTOR_MODE_P (mode);
11461
11462 /* The ABI says that record types with a single member are treated
11463 just like that member would be. */
11464 while (TREE_CODE (type) == RECORD_TYPE)
11465 {
11466 tree field, single = NULL_TREE;
11467
11468 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
11469 {
11470 if (TREE_CODE (field) != FIELD_DECL)
11471 continue;
11472
11473 if (single == NULL_TREE)
11474 single = TREE_TYPE (field);
11475 else
11476 return false;
11477 }
11478
11479 if (single == NULL_TREE)
11480 return false;
11481 else
11482 {
11483 /* If the field declaration adds extra byte due to
11484 e.g. padding this is not accepted as vector type. */
11485 if (int_size_in_bytes (single) <= 0
11486 || int_size_in_bytes (single) != int_size_in_bytes (type))
11487 return false;
11488 type = single;
11489 }
11490 }
11491
11492 return VECTOR_TYPE_P (type);
11493}
11494
59652f3f 11495/* Return true if a function argument of type TYPE and mode MODE
11496 is to be passed in a floating-point register, if available. */
11497
11498static bool
3754d046 11499s390_function_arg_float (machine_mode mode, const_tree type)
59652f3f 11500{
76a4c804 11501 if (s390_function_arg_size (mode, type) > 8)
201e502c 11502 return false;
11503
59652f3f 11504 /* Soft-float changes the ABI: no floating-point registers are used. */
11505 if (TARGET_SOFT_FLOAT)
11506 return false;
11507
11508 /* No type info available for some library calls ... */
11509 if (!type)
36868490 11510 return mode == SFmode || mode == DFmode || mode == SDmode || mode == DDmode;
59652f3f 11511
11512 /* The ABI says that record types with a single member are treated
11513 just like that member would be. */
11514 while (TREE_CODE (type) == RECORD_TYPE)
11515 {
11516 tree field, single = NULL_TREE;
11517
1767a056 11518 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
59652f3f 11519 {
11520 if (TREE_CODE (field) != FIELD_DECL)
11521 continue;
11522
11523 if (single == NULL_TREE)
11524 single = TREE_TYPE (field);
11525 else
11526 return false;
11527 }
11528
11529 if (single == NULL_TREE)
11530 return false;
11531 else
11532 type = single;
11533 }
11534
11535 return TREE_CODE (type) == REAL_TYPE;
11536}
11537
201e502c 11538/* Return true if a function argument of type TYPE and mode MODE
11539 is to be passed in an integer register, or a pair of integer
11540 registers, if available. */
11541
11542static bool
3754d046 11543s390_function_arg_integer (machine_mode mode, const_tree type)
201e502c 11544{
11545 int size = s390_function_arg_size (mode, type);
11546 if (size > 8)
11547 return false;
11548
11549 /* No type info available for some library calls ... */
11550 if (!type)
11551 return GET_MODE_CLASS (mode) == MODE_INT
36868490 11552 || (TARGET_SOFT_FLOAT && SCALAR_FLOAT_MODE_P (mode));
201e502c 11553
11554 /* We accept small integral (and similar) types. */
11555 if (INTEGRAL_TYPE_P (type)
f588eb9f 11556 || POINTER_TYPE_P (type)
bd3e12e5 11557 || TREE_CODE (type) == NULLPTR_TYPE
201e502c 11558 || TREE_CODE (type) == OFFSET_TYPE
11559 || (TARGET_SOFT_FLOAT && TREE_CODE (type) == REAL_TYPE))
11560 return true;
11561
11562 /* We also accept structs of size 1, 2, 4, 8 that are not
f588eb9f 11563 passed in floating-point registers. */
201e502c 11564 if (AGGREGATE_TYPE_P (type)
11565 && exact_log2 (size) >= 0
11566 && !s390_function_arg_float (mode, type))
11567 return true;
11568
11569 return false;
11570}
11571
56769981 11572/* Return 1 if a function argument of type TYPE and mode MODE
11573 is to be passed by reference. The ABI specifies that only
11574 structures of size 1, 2, 4, or 8 bytes are passed by value,
11575 all other structures (and complex numbers) are passed by
11576 reference. */
11577
b981d932 11578static bool
39cba157 11579s390_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
3754d046 11580 machine_mode mode, const_tree type,
b981d932 11581 bool named ATTRIBUTE_UNUSED)
4673c1a0 11582{
11583 int size = s390_function_arg_size (mode, type);
76a4c804 11584
11585 if (s390_function_arg_vector (mode, type))
11586 return false;
11587
201e502c 11588 if (size > 8)
11589 return true;
4673c1a0 11590
11591 if (type)
11592 {
201e502c 11593 if (AGGREGATE_TYPE_P (type) && exact_log2 (size) < 0)
76a4c804 11594 return true;
4673c1a0 11595
201e502c 11596 if (TREE_CODE (type) == COMPLEX_TYPE
11597 || TREE_CODE (type) == VECTOR_TYPE)
76a4c804 11598 return true;
4673c1a0 11599 }
f81e845f 11600
76a4c804 11601 return false;
4673c1a0 11602}
11603
11604/* Update the data in CUM to advance over an argument of mode MODE and
11605 data type TYPE. (TYPE is null for libcalls where that information
56769981 11606 may not be available.). The boolean NAMED specifies whether the
11607 argument is a named argument (as opposed to an unnamed argument
11608 matching an ellipsis). */
4673c1a0 11609
12bc26aa 11610static void
3754d046 11611s390_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
76a4c804 11612 const_tree type, bool named)
4673c1a0 11613{
39cba157 11614 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
11615
76a4c804 11616 if (s390_function_arg_vector (mode, type))
11617 {
11618 /* We are called for unnamed vector stdarg arguments which are
11619 passed on the stack. In this case this hook does not have to
11620 do anything since stack arguments are tracked by common
11621 code. */
11622 if (!named)
11623 return;
11624 cum->vrs += 1;
11625 }
11626 else if (s390_function_arg_float (mode, type))
4673c1a0 11627 {
59652f3f 11628 cum->fprs += 1;
4673c1a0 11629 }
201e502c 11630 else if (s390_function_arg_integer (mode, type))
4673c1a0 11631 {
11632 int size = s390_function_arg_size (mode, type);
b5fdc416 11633 cum->gprs += ((size + UNITS_PER_LONG - 1) / UNITS_PER_LONG);
4673c1a0 11634 }
201e502c 11635 else
32eda510 11636 gcc_unreachable ();
4673c1a0 11637}
11638
56769981 11639/* Define where to put the arguments to a function.
11640 Value is zero to push the argument on the stack,
11641 or a hard register in which to store the argument.
11642
11643 MODE is the argument's machine mode.
11644 TYPE is the data type of the argument (as a tree).
11645 This is null for libcalls where that information may
11646 not be available.
11647 CUM is a variable of type CUMULATIVE_ARGS which gives info about
11648 the preceding args and about the function being called.
11649 NAMED is nonzero if this argument is a named parameter
f81e845f 11650 (otherwise it is an extra parameter matching an ellipsis).
56769981 11651
11652 On S/390, we use general purpose registers 2 through 6 to
11653 pass integer, pointer, and certain structure arguments, and
11654 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
11655 to pass floating point arguments. All remaining arguments
11656 are pushed to the stack. */
4673c1a0 11657
12bc26aa 11658static rtx
3754d046 11659s390_function_arg (cumulative_args_t cum_v, machine_mode mode,
76a4c804 11660 const_tree type, bool named)
4673c1a0 11661{
39cba157 11662 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
11663
6b7cfb9c 11664 if (!named)
11665 s390_check_type_for_vector_abi (type, true, false);
76a4c804 11666
11667 if (s390_function_arg_vector (mode, type))
11668 {
11669 /* Vector arguments being part of the ellipsis are passed on the
11670 stack. */
11671 if (!named || (cum->vrs + 1 > VEC_ARG_NUM_REG))
11672 return NULL_RTX;
11673
11674 return gen_rtx_REG (mode, cum->vrs + FIRST_VEC_ARG_REGNO);
11675 }
11676 else if (s390_function_arg_float (mode, type))
4673c1a0 11677 {
6902d973 11678 if (cum->fprs + 1 > FP_ARG_NUM_REG)
76a4c804 11679 return NULL_RTX;
4673c1a0 11680 else
1a83b3ff 11681 return gen_rtx_REG (mode, cum->fprs + 16);
4673c1a0 11682 }
201e502c 11683 else if (s390_function_arg_integer (mode, type))
4673c1a0 11684 {
11685 int size = s390_function_arg_size (mode, type);
b5fdc416 11686 int n_gprs = (size + UNITS_PER_LONG - 1) / UNITS_PER_LONG;
4673c1a0 11687
6902d973 11688 if (cum->gprs + n_gprs > GP_ARG_NUM_REG)
76a4c804 11689 return NULL_RTX;
b5fdc416 11690 else if (n_gprs == 1 || UNITS_PER_WORD == UNITS_PER_LONG)
1a83b3ff 11691 return gen_rtx_REG (mode, cum->gprs + 2);
b5fdc416 11692 else if (n_gprs == 2)
11693 {
11694 rtvec p = rtvec_alloc (2);
11695
11696 RTVEC_ELT (p, 0)
11697 = gen_rtx_EXPR_LIST (SImode, gen_rtx_REG (SImode, cum->gprs + 2),
11698 const0_rtx);
11699 RTVEC_ELT (p, 1)
11700 = gen_rtx_EXPR_LIST (SImode, gen_rtx_REG (SImode, cum->gprs + 3),
11701 GEN_INT (4));
11702
11703 return gen_rtx_PARALLEL (mode, p);
11704 }
4673c1a0 11705 }
201e502c 11706
11707 /* After the real arguments, expand_call calls us once again
11708 with a void_type_node type. Whatever we return here is
11709 passed as operand 2 to the call expanders.
11710
11711 We don't need this feature ... */
11712 else if (type == void_type_node)
11713 return const0_rtx;
11714
32eda510 11715 gcc_unreachable ();
201e502c 11716}
11717
11718/* Return true if return values of type TYPE should be returned
11719 in a memory buffer whose address is passed by the caller as
11720 hidden first argument. */
11721
11722static bool
fb80456a 11723s390_return_in_memory (const_tree type, const_tree fundecl ATTRIBUTE_UNUSED)
201e502c 11724{
11725 /* We accept small integral (and similar) types. */
11726 if (INTEGRAL_TYPE_P (type)
f588eb9f 11727 || POINTER_TYPE_P (type)
201e502c 11728 || TREE_CODE (type) == OFFSET_TYPE
11729 || TREE_CODE (type) == REAL_TYPE)
11730 return int_size_in_bytes (type) > 8;
11731
76a4c804 11732 /* vector types which fit into a VR. */
11733 if (TARGET_VX_ABI
11734 && VECTOR_TYPE_P (type)
11735 && int_size_in_bytes (type) <= 16)
11736 return false;
11737
201e502c 11738 /* Aggregates and similar constructs are always returned
11739 in memory. */
11740 if (AGGREGATE_TYPE_P (type)
11741 || TREE_CODE (type) == COMPLEX_TYPE
76a4c804 11742 || VECTOR_TYPE_P (type))
201e502c 11743 return true;
11744
11745 /* ??? We get called on all sorts of random stuff from
11746 aggregate_value_p. We can't abort, but it's not clear
11747 what's safe to return. Pretend it's a struct I guess. */
11748 return true;
11749}
11750
3b2411a8 11751/* Function arguments and return values are promoted to word size. */
11752
3754d046 11753static machine_mode
11754s390_promote_function_mode (const_tree type, machine_mode mode,
3b2411a8 11755 int *punsignedp,
11756 const_tree fntype ATTRIBUTE_UNUSED,
11757 int for_return ATTRIBUTE_UNUSED)
11758{
11759 if (INTEGRAL_MODE_P (mode)
b5fdc416 11760 && GET_MODE_SIZE (mode) < UNITS_PER_LONG)
3b2411a8 11761 {
adaf4ef0 11762 if (type != NULL_TREE && POINTER_TYPE_P (type))
3b2411a8 11763 *punsignedp = POINTERS_EXTEND_UNSIGNED;
11764 return Pmode;
11765 }
11766
11767 return mode;
11768}
11769
dc3b3062 11770/* Define where to return a (scalar) value of type RET_TYPE.
11771 If RET_TYPE is null, define where to return a (scalar)
201e502c 11772 value of mode MODE from a libcall. */
11773
dc3b3062 11774static rtx
3754d046 11775s390_function_and_libcall_value (machine_mode mode,
dc3b3062 11776 const_tree ret_type,
11777 const_tree fntype_or_decl,
11778 bool outgoing ATTRIBUTE_UNUSED)
201e502c 11779{
76a4c804 11780 /* For vector return types it is important to use the RET_TYPE
11781 argument whenever available since the middle-end might have
11782 changed the mode to a scalar mode. */
11783 bool vector_ret_type_p = ((ret_type && VECTOR_TYPE_P (ret_type))
11784 || (!ret_type && VECTOR_MODE_P (mode)));
11785
dc3b3062 11786 /* For normal functions perform the promotion as
11787 promote_function_mode would do. */
11788 if (ret_type)
201e502c 11789 {
dc3b3062 11790 int unsignedp = TYPE_UNSIGNED (ret_type);
11791 mode = promote_function_mode (ret_type, mode, &unsignedp,
11792 fntype_or_decl, 1);
201e502c 11793 }
11794
76a4c804 11795 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
11796 || SCALAR_FLOAT_MODE_P (mode)
11797 || (TARGET_VX_ABI && vector_ret_type_p));
11798 gcc_assert (GET_MODE_SIZE (mode) <= (TARGET_VX_ABI ? 16 : 8));
201e502c 11799
76a4c804 11800 if (TARGET_VX_ABI && vector_ret_type_p)
11801 return gen_rtx_REG (mode, FIRST_VEC_ARG_REGNO);
11802 else if (TARGET_HARD_FLOAT && SCALAR_FLOAT_MODE_P (mode))
201e502c 11803 return gen_rtx_REG (mode, 16);
b5fdc416 11804 else if (GET_MODE_SIZE (mode) <= UNITS_PER_LONG
11805 || UNITS_PER_LONG == UNITS_PER_WORD)
201e502c 11806 return gen_rtx_REG (mode, 2);
b5fdc416 11807 else if (GET_MODE_SIZE (mode) == 2 * UNITS_PER_LONG)
11808 {
dc3b3062 11809 /* This case is triggered when returning a 64 bit value with
11810 -m31 -mzarch. Although the value would fit into a single
11811 register it has to be forced into a 32 bit register pair in
11812 order to match the ABI. */
b5fdc416 11813 rtvec p = rtvec_alloc (2);
11814
11815 RTVEC_ELT (p, 0)
11816 = gen_rtx_EXPR_LIST (SImode, gen_rtx_REG (SImode, 2), const0_rtx);
11817 RTVEC_ELT (p, 1)
11818 = gen_rtx_EXPR_LIST (SImode, gen_rtx_REG (SImode, 3), GEN_INT (4));
11819
11820 return gen_rtx_PARALLEL (mode, p);
11821 }
11822
11823 gcc_unreachable ();
4673c1a0 11824}
11825
dc3b3062 11826/* Define where to return a scalar return value of type RET_TYPE. */
11827
11828static rtx
11829s390_function_value (const_tree ret_type, const_tree fn_decl_or_type,
11830 bool outgoing)
11831{
11832 return s390_function_and_libcall_value (TYPE_MODE (ret_type), ret_type,
11833 fn_decl_or_type, outgoing);
11834}
11835
11836/* Define where to return a scalar libcall return value of mode
11837 MODE. */
11838
11839static rtx
3754d046 11840s390_libcall_value (machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
dc3b3062 11841{
11842 return s390_function_and_libcall_value (mode, NULL_TREE,
11843 NULL_TREE, true);
11844}
11845
4673c1a0 11846
56769981 11847/* Create and return the va_list datatype.
11848
11849 On S/390, va_list is an array type equivalent to
11850
11851 typedef struct __va_list_tag
11852 {
11853 long __gpr;
11854 long __fpr;
11855 void *__overflow_arg_area;
11856 void *__reg_save_area;
56769981 11857 } va_list[1];
11858
11859 where __gpr and __fpr hold the number of general purpose
11860 or floating point arguments used up to now, respectively,
f81e845f 11861 __overflow_arg_area points to the stack location of the
56769981 11862 next argument passed on the stack, and __reg_save_area
11863 always points to the start of the register area in the
11864 call frame of the current function. The function prologue
11865 saves all registers used for argument passing into this
11866 area if the function uses variable arguments. */
4673c1a0 11867
2e15d750 11868static tree
11869s390_build_builtin_va_list (void)
4673c1a0 11870{
11871 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
11872
5ebb663d 11873 record = lang_hooks.types.make_type (RECORD_TYPE);
4673c1a0 11874
11875 type_decl =
54e46243 11876 build_decl (BUILTINS_LOCATION,
11877 TYPE_DECL, get_identifier ("__va_list_tag"), record);
4673c1a0 11878
54e46243 11879 f_gpr = build_decl (BUILTINS_LOCATION,
11880 FIELD_DECL, get_identifier ("__gpr"),
4673c1a0 11881 long_integer_type_node);
54e46243 11882 f_fpr = build_decl (BUILTINS_LOCATION,
11883 FIELD_DECL, get_identifier ("__fpr"),
4673c1a0 11884 long_integer_type_node);
54e46243 11885 f_ovf = build_decl (BUILTINS_LOCATION,
11886 FIELD_DECL, get_identifier ("__overflow_arg_area"),
4673c1a0 11887 ptr_type_node);
54e46243 11888 f_sav = build_decl (BUILTINS_LOCATION,
11889 FIELD_DECL, get_identifier ("__reg_save_area"),
4673c1a0 11890 ptr_type_node);
11891
6902d973 11892 va_list_gpr_counter_field = f_gpr;
11893 va_list_fpr_counter_field = f_fpr;
11894
4673c1a0 11895 DECL_FIELD_CONTEXT (f_gpr) = record;
11896 DECL_FIELD_CONTEXT (f_fpr) = record;
11897 DECL_FIELD_CONTEXT (f_ovf) = record;
11898 DECL_FIELD_CONTEXT (f_sav) = record;
11899
bc907808 11900 TYPE_STUB_DECL (record) = type_decl;
4673c1a0 11901 TYPE_NAME (record) = type_decl;
11902 TYPE_FIELDS (record) = f_gpr;
1767a056 11903 DECL_CHAIN (f_gpr) = f_fpr;
11904 DECL_CHAIN (f_fpr) = f_ovf;
11905 DECL_CHAIN (f_ovf) = f_sav;
4673c1a0 11906
11907 layout_type (record);
11908
11909 /* The correct type is an array type of one element. */
11910 return build_array_type (record, build_index_type (size_zero_node));
11911}
11912
56769981 11913/* Implement va_start by filling the va_list structure VALIST.
7ccc713a 11914 STDARG_P is always true, and ignored.
11915 NEXTARG points to the first anonymous stack argument.
56769981 11916
8ef587dc 11917 The following global variables are used to initialize
56769981 11918 the va_list structure:
11919
abe32cce 11920 crtl->args.info:
56769981 11921 holds number of gprs and fprs used for named arguments.
abe32cce 11922 crtl->args.arg_offset_rtx:
56769981 11923 holds the offset of the first anonymous stack argument
11924 (relative to the virtual arg pointer). */
4673c1a0 11925
8a58ed0a 11926static void
b40da9a7 11927s390_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
4673c1a0 11928{
11929 HOST_WIDE_INT n_gpr, n_fpr;
11930 int off;
11931 tree f_gpr, f_fpr, f_ovf, f_sav;
11932 tree gpr, fpr, ovf, sav, t;
11933
11934 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
1767a056 11935 f_fpr = DECL_CHAIN (f_gpr);
11936 f_ovf = DECL_CHAIN (f_fpr);
11937 f_sav = DECL_CHAIN (f_ovf);
4673c1a0 11938
170efcd4 11939 valist = build_simple_mem_ref (valist);
ed03eadb 11940 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
11941 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
11942 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
11943 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
4673c1a0 11944
11945 /* Count number of gp and fp argument registers used. */
11946
abe32cce 11947 n_gpr = crtl->args.info.gprs;
11948 n_fpr = crtl->args.info.fprs;
4673c1a0 11949
6902d973 11950 if (cfun->va_list_gpr_size)
11951 {
75a70cf9 11952 t = build2 (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
11953 build_int_cst (NULL_TREE, n_gpr));
6902d973 11954 TREE_SIDE_EFFECTS (t) = 1;
11955 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
11956 }
4673c1a0 11957
6902d973 11958 if (cfun->va_list_fpr_size)
11959 {
75a70cf9 11960 t = build2 (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
ed03eadb 11961 build_int_cst (NULL_TREE, n_fpr));
6902d973 11962 TREE_SIDE_EFFECTS (t) = 1;
11963 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
11964 }
4673c1a0 11965
c6d481f7 11966 if (flag_split_stack
11967 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
11968 == NULL)
11969 && cfun->machine->split_stack_varargs_pointer == NULL_RTX)
11970 {
11971 rtx reg;
11972 rtx_insn *seq;
11973
11974 reg = gen_reg_rtx (Pmode);
11975 cfun->machine->split_stack_varargs_pointer = reg;
11976
11977 start_sequence ();
11978 emit_move_insn (reg, gen_rtx_REG (Pmode, 1));
11979 seq = get_insns ();
11980 end_sequence ();
11981
11982 push_topmost_sequence ();
11983 emit_insn_after (seq, entry_of_function ());
11984 pop_topmost_sequence ();
11985 }
11986
76a4c804 11987 /* Find the overflow area.
11988 FIXME: This currently is too pessimistic when the vector ABI is
11989 enabled. In that case we *always* set up the overflow area
11990 pointer. */
6902d973 11991 if (n_gpr + cfun->va_list_gpr_size > GP_ARG_NUM_REG
76a4c804 11992 || n_fpr + cfun->va_list_fpr_size > FP_ARG_NUM_REG
11993 || TARGET_VX_ABI)
6902d973 11994 {
c6d481f7 11995 if (cfun->machine->split_stack_varargs_pointer == NULL_RTX)
11996 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
11997 else
11998 t = make_tree (TREE_TYPE (ovf), cfun->machine->split_stack_varargs_pointer);
4673c1a0 11999
abe32cce 12000 off = INTVAL (crtl->args.arg_offset_rtx);
6902d973 12001 off = off < 0 ? 0 : off;
12002 if (TARGET_DEBUG_ARG)
12003 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
12004 (int)n_gpr, (int)n_fpr, off);
4673c1a0 12005
2cc66f2a 12006 t = fold_build_pointer_plus_hwi (t, off);
4673c1a0 12007
75a70cf9 12008 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
6902d973 12009 TREE_SIDE_EFFECTS (t) = 1;
12010 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
12011 }
4673c1a0 12012
12013 /* Find the register save area. */
6902d973 12014 if ((cfun->va_list_gpr_size && n_gpr < GP_ARG_NUM_REG)
12015 || (cfun->va_list_fpr_size && n_fpr < FP_ARG_NUM_REG))
12016 {
12017 t = make_tree (TREE_TYPE (sav), return_address_pointer_rtx);
2cc66f2a 12018 t = fold_build_pointer_plus_hwi (t, -RETURN_REGNUM * UNITS_PER_LONG);
ffead1ca 12019
75a70cf9 12020 t = build2 (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
6902d973 12021 TREE_SIDE_EFFECTS (t) = 1;
12022 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
12023 }
4673c1a0 12024}
12025
f81e845f 12026/* Implement va_arg by updating the va_list structure
56769981 12027 VALIST as required to retrieve an argument of type
f81e845f 12028 TYPE, and returning that argument.
12029
56769981 12030 Generates code equivalent to:
f81e845f 12031
4673c1a0 12032 if (integral value) {
12033 if (size <= 4 && args.gpr < 5 ||
f81e845f 12034 size > 4 && args.gpr < 4 )
4673c1a0 12035 ret = args.reg_save_area[args.gpr+8]
12036 else
12037 ret = *args.overflow_arg_area++;
76a4c804 12038 } else if (vector value) {
12039 ret = *args.overflow_arg_area;
12040 args.overflow_arg_area += size / 8;
4673c1a0 12041 } else if (float value) {
12042 if (args.fgpr < 2)
12043 ret = args.reg_save_area[args.fpr+64]
12044 else
12045 ret = *args.overflow_arg_area++;
12046 } else if (aggregate value) {
12047 if (args.gpr < 5)
12048 ret = *args.reg_save_area[args.gpr]
12049 else
12050 ret = **args.overflow_arg_area++;
12051 } */
12052
875862bf 12053static tree
ffead1ca 12054s390_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p,
75a70cf9 12055 gimple_seq *post_p ATTRIBUTE_UNUSED)
4673c1a0 12056{
12057 tree f_gpr, f_fpr, f_ovf, f_sav;
12058 tree gpr, fpr, ovf, sav, reg, t, u;
12059 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
76a4c804 12060 tree lab_false, lab_over;
12061 tree addr = create_tmp_var (ptr_type_node, "addr");
12062 bool left_align_p; /* How a value < UNITS_PER_LONG is aligned within
12063 a stack slot. */
4673c1a0 12064
12065 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
1767a056 12066 f_fpr = DECL_CHAIN (f_gpr);
12067 f_ovf = DECL_CHAIN (f_fpr);
12068 f_sav = DECL_CHAIN (f_ovf);
4673c1a0 12069
ed03eadb 12070 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
12071 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
ed03eadb 12072 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
4673c1a0 12073
75a70cf9 12074 /* The tree for args* cannot be shared between gpr/fpr and ovf since
12075 both appear on a lhs. */
12076 valist = unshare_expr (valist);
12077 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
12078
4673c1a0 12079 size = int_size_in_bytes (type);
12080
6b7cfb9c 12081 s390_check_type_for_vector_abi (type, true, false);
12082
b981d932 12083 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
4673c1a0 12084 {
12085 if (TARGET_DEBUG_ARG)
12086 {
12087 fprintf (stderr, "va_arg: aggregate type");
12088 debug_tree (type);
12089 }
12090
12091 /* Aggregates are passed by reference. */
12092 indirect_p = 1;
12093 reg = gpr;
12094 n_reg = 1;
99e8a714 12095
646a946e 12096 /* kernel stack layout on 31 bit: It is assumed here that no padding
99e8a714 12097 will be added by s390_frame_info because for va_args always an even
12098 number of gprs has to be saved r15-r2 = 14 regs. */
b5fdc416 12099 sav_ofs = 2 * UNITS_PER_LONG;
12100 sav_scale = UNITS_PER_LONG;
12101 size = UNITS_PER_LONG;
6902d973 12102 max_reg = GP_ARG_NUM_REG - n_reg;
76a4c804 12103 left_align_p = false;
12104 }
12105 else if (s390_function_arg_vector (TYPE_MODE (type), type))
12106 {
12107 if (TARGET_DEBUG_ARG)
12108 {
12109 fprintf (stderr, "va_arg: vector type");
12110 debug_tree (type);
12111 }
12112
12113 indirect_p = 0;
12114 reg = NULL_TREE;
12115 n_reg = 0;
12116 sav_ofs = 0;
12117 sav_scale = 8;
12118 max_reg = 0;
12119 left_align_p = true;
4673c1a0 12120 }
59652f3f 12121 else if (s390_function_arg_float (TYPE_MODE (type), type))
4673c1a0 12122 {
12123 if (TARGET_DEBUG_ARG)
12124 {
12125 fprintf (stderr, "va_arg: float type");
12126 debug_tree (type);
12127 }
12128
12129 /* FP args go in FP registers, if present. */
12130 indirect_p = 0;
12131 reg = fpr;
12132 n_reg = 1;
b5fdc416 12133 sav_ofs = 16 * UNITS_PER_LONG;
4673c1a0 12134 sav_scale = 8;
6902d973 12135 max_reg = FP_ARG_NUM_REG - n_reg;
76a4c804 12136 left_align_p = false;
4673c1a0 12137 }
12138 else
12139 {
12140 if (TARGET_DEBUG_ARG)
12141 {
12142 fprintf (stderr, "va_arg: other type");
12143 debug_tree (type);
12144 }
12145
12146 /* Otherwise into GP registers. */
12147 indirect_p = 0;
12148 reg = gpr;
b5fdc416 12149 n_reg = (size + UNITS_PER_LONG - 1) / UNITS_PER_LONG;
99e8a714 12150
646a946e 12151 /* kernel stack layout on 31 bit: It is assumed here that no padding
12152 will be added by s390_frame_info because for va_args always an even
12153 number of gprs has to be saved r15-r2 = 14 regs. */
b5fdc416 12154 sav_ofs = 2 * UNITS_PER_LONG;
f81e845f 12155
b5fdc416 12156 if (size < UNITS_PER_LONG)
12157 sav_ofs += UNITS_PER_LONG - size;
4673c1a0 12158
b5fdc416 12159 sav_scale = UNITS_PER_LONG;
6902d973 12160 max_reg = GP_ARG_NUM_REG - n_reg;
76a4c804 12161 left_align_p = false;
4673c1a0 12162 }
12163
12164 /* Pull the value out of the saved registers ... */
12165
76a4c804 12166 if (reg != NULL_TREE)
12167 {
12168 /*
12169 if (reg > ((typeof (reg))max_reg))
12170 goto lab_false;
4673c1a0 12171
76a4c804 12172 addr = sav + sav_ofs + reg * save_scale;
4673c1a0 12173
76a4c804 12174 goto lab_over;
4673c1a0 12175
76a4c804 12176 lab_false:
12177 */
12178
12179 lab_false = create_artificial_label (UNKNOWN_LOCATION);
12180 lab_over = create_artificial_label (UNKNOWN_LOCATION);
12181
12182 t = fold_convert (TREE_TYPE (reg), size_int (max_reg));
12183 t = build2 (GT_EXPR, boolean_type_node, reg, t);
12184 u = build1 (GOTO_EXPR, void_type_node, lab_false);
12185 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
12186 gimplify_and_add (t, pre_p);
4673c1a0 12187
76a4c804 12188 t = fold_build_pointer_plus_hwi (sav, sav_ofs);
12189 u = build2 (MULT_EXPR, TREE_TYPE (reg), reg,
12190 fold_convert (TREE_TYPE (reg), size_int (sav_scale)));
12191 t = fold_build_pointer_plus (t, u);
4673c1a0 12192
76a4c804 12193 gimplify_assign (addr, t, pre_p);
4673c1a0 12194
76a4c804 12195 gimple_seq_add_stmt (pre_p, gimple_build_goto (lab_over));
12196
12197 gimple_seq_add_stmt (pre_p, gimple_build_label (lab_false));
12198 }
4673c1a0 12199
12200 /* ... Otherwise out of the overflow area. */
12201
875862bf 12202 t = ovf;
76a4c804 12203 if (size < UNITS_PER_LONG && !left_align_p)
2cc66f2a 12204 t = fold_build_pointer_plus_hwi (t, UNITS_PER_LONG - size);
875862bf 12205
12206 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
12207
75a70cf9 12208 gimplify_assign (addr, t, pre_p);
875862bf 12209
76a4c804 12210 if (size < UNITS_PER_LONG && left_align_p)
12211 t = fold_build_pointer_plus_hwi (t, UNITS_PER_LONG);
12212 else
12213 t = fold_build_pointer_plus_hwi (t, size);
12214
75a70cf9 12215 gimplify_assign (ovf, t, pre_p);
875862bf 12216
76a4c804 12217 if (reg != NULL_TREE)
12218 gimple_seq_add_stmt (pre_p, gimple_build_label (lab_over));
875862bf 12219
12220
12221 /* Increment register save count. */
12222
76a4c804 12223 if (n_reg > 0)
12224 {
12225 u = build2 (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
12226 fold_convert (TREE_TYPE (reg), size_int (n_reg)));
12227 gimplify_and_add (u, pre_p);
12228 }
875862bf 12229
12230 if (indirect_p)
12231 {
8115f0af 12232 t = build_pointer_type_for_mode (build_pointer_type (type),
12233 ptr_mode, true);
875862bf 12234 addr = fold_convert (t, addr);
12235 addr = build_va_arg_indirect_ref (addr);
12236 }
12237 else
12238 {
8115f0af 12239 t = build_pointer_type_for_mode (type, ptr_mode, true);
875862bf 12240 addr = fold_convert (t, addr);
12241 }
12242
12243 return build_va_arg_indirect_ref (addr);
12244}
12245
5ada7a14 12246/* Emit rtl for the tbegin or tbegin_retry (RETRY != NULL_RTX)
12247 expanders.
12248 DEST - Register location where CC will be stored.
12249 TDB - Pointer to a 256 byte area where to store the transaction.
12250 diagnostic block. NULL if TDB is not needed.
12251 RETRY - Retry count value. If non-NULL a retry loop for CC2
12252 is emitted
12253 CLOBBER_FPRS_P - If true clobbers for all FPRs are emitted as part
12254 of the tbegin instruction pattern. */
12255
12256void
12257s390_expand_tbegin (rtx dest, rtx tdb, rtx retry, bool clobber_fprs_p)
12258{
91dfd73e 12259 rtx retry_plus_two = gen_reg_rtx (SImode);
5ada7a14 12260 rtx retry_reg = gen_reg_rtx (SImode);
79f6a8ed 12261 rtx_code_label *retry_label = NULL;
5ada7a14 12262
12263 if (retry != NULL_RTX)
12264 {
12265 emit_move_insn (retry_reg, retry);
91dfd73e 12266 emit_insn (gen_addsi3 (retry_plus_two, retry_reg, const2_rtx));
12267 emit_insn (gen_addsi3 (retry_reg, retry_reg, const1_rtx));
5ada7a14 12268 retry_label = gen_label_rtx ();
12269 emit_label (retry_label);
12270 }
12271
12272 if (clobber_fprs_p)
044a78dc 12273 {
12274 if (TARGET_VX)
12275 emit_insn (gen_tbegin_1_z13 (gen_rtx_CONST_INT (VOIDmode, TBEGIN_MASK),
12276 tdb));
12277 else
12278 emit_insn (gen_tbegin_1 (gen_rtx_CONST_INT (VOIDmode, TBEGIN_MASK),
12279 tdb));
12280 }
5ada7a14 12281 else
91dfd73e 12282 emit_insn (gen_tbegin_nofloat_1 (gen_rtx_CONST_INT (VOIDmode, TBEGIN_MASK),
12283 tdb));
5ada7a14 12284
91dfd73e 12285 emit_move_insn (dest, gen_rtx_UNSPEC (SImode,
12286 gen_rtvec (1, gen_rtx_REG (CCRAWmode,
12287 CC_REGNUM)),
12288 UNSPEC_CC_TO_INT));
5ada7a14 12289 if (retry != NULL_RTX)
12290 {
d089210f 12291 const int CC0 = 1 << 3;
12292 const int CC1 = 1 << 2;
12293 const int CC3 = 1 << 0;
12294 rtx jump;
5ada7a14 12295 rtx count = gen_reg_rtx (SImode);
93e0956b 12296 rtx_code_label *leave_label = gen_label_rtx ();
d089210f 12297
12298 /* Exit for success and permanent failures. */
5ada7a14 12299 jump = s390_emit_jump (leave_label,
12300 gen_rtx_EQ (VOIDmode,
12301 gen_rtx_REG (CCRAWmode, CC_REGNUM),
d089210f 12302 gen_rtx_CONST_INT (VOIDmode, CC0 | CC1 | CC3)));
12303 LABEL_NUSES (leave_label) = 1;
5ada7a14 12304
12305 /* CC2 - transient failure. Perform retry with ppa. */
91dfd73e 12306 emit_move_insn (count, retry_plus_two);
5ada7a14 12307 emit_insn (gen_subsi3 (count, count, retry_reg));
12308 emit_insn (gen_tx_assist (count));
12309 jump = emit_jump_insn (gen_doloop_si64 (retry_label,
12310 retry_reg,
12311 retry_reg));
12312 JUMP_LABEL (jump) = retry_label;
12313 LABEL_NUSES (retry_label) = 1;
d089210f 12314 emit_label (leave_label);
5ada7a14 12315 }
5ada7a14 12316}
12317
5ada7a14 12318
751c914e 12319/* Return the decl for the target specific builtin with the function
12320 code FCODE. */
12321
12322static tree
12323s390_builtin_decl (unsigned fcode, bool initialized_p ATTRIBUTE_UNUSED)
12324{
12325 if (fcode >= S390_BUILTIN_MAX)
12326 return error_mark_node;
12327
12328 return s390_builtin_decls[fcode];
12329}
12330
d44f2f7c 12331/* We call mcount before the function prologue. So a profiled leaf
12332 function should stay a leaf function. */
12333
12334static bool
12335s390_keep_leaf_when_profiled ()
12336{
12337 return true;
12338}
5ada7a14 12339
875862bf 12340/* Output assembly code for the trampoline template to
12341 stdio stream FILE.
12342
12343 On S/390, we use gpr 1 internally in the trampoline code;
12344 gpr 0 is used to hold the static chain. */
12345
4d946732 12346static void
12347s390_asm_trampoline_template (FILE *file)
875862bf 12348{
12349 rtx op[2];
12350 op[0] = gen_rtx_REG (Pmode, 0);
12351 op[1] = gen_rtx_REG (Pmode, 1);
12352
12353 if (TARGET_64BIT)
12354 {
29335855 12355 output_asm_insn ("basr\t%1,0", op); /* 2 byte */
12356 output_asm_insn ("lmg\t%0,%1,14(%1)", op); /* 6 byte */
12357 output_asm_insn ("br\t%1", op); /* 2 byte */
875862bf 12358 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 10));
12359 }
12360 else
12361 {
29335855 12362 output_asm_insn ("basr\t%1,0", op); /* 2 byte */
12363 output_asm_insn ("lm\t%0,%1,6(%1)", op); /* 4 byte */
12364 output_asm_insn ("br\t%1", op); /* 2 byte */
875862bf 12365 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 8));
12366 }
12367}
12368
12369/* Emit RTL insns to initialize the variable parts of a trampoline.
12370 FNADDR is an RTX for the address of the function's pure code.
12371 CXT is an RTX for the static chain value for the function. */
12372
4d946732 12373static void
12374s390_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
875862bf 12375{
4d946732 12376 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
12377 rtx mem;
8a2a84e3 12378
4d946732 12379 emit_block_move (m_tramp, assemble_trampoline_template (),
29335855 12380 GEN_INT (2 * UNITS_PER_LONG), BLOCK_OP_NORMAL);
4d946732 12381
29335855 12382 mem = adjust_address (m_tramp, Pmode, 2 * UNITS_PER_LONG);
4d946732 12383 emit_move_insn (mem, cxt);
29335855 12384 mem = adjust_address (m_tramp, Pmode, 3 * UNITS_PER_LONG);
4d946732 12385 emit_move_insn (mem, fnaddr);
875862bf 12386}
12387
875862bf 12388/* Output assembler code to FILE to increment profiler label # LABELNO
12389 for profiling a function entry. */
12390
12391void
12392s390_function_profiler (FILE *file, int labelno)
12393{
12394 rtx op[7];
12395
12396 char label[128];
12397 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
12398
12399 fprintf (file, "# function profiler \n");
12400
12401 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
12402 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
29c05e22 12403 op[1] = gen_rtx_MEM (Pmode, plus_constant (Pmode, op[1], UNITS_PER_LONG));
875862bf 12404
12405 op[2] = gen_rtx_REG (Pmode, 1);
12406 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
12407 SYMBOL_REF_FLAGS (op[3]) = SYMBOL_FLAG_LOCAL;
12408
12409 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
12410 if (flag_pic)
12411 {
12412 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), UNSPEC_PLT);
12413 op[4] = gen_rtx_CONST (Pmode, op[4]);
12414 }
12415
12416 if (TARGET_64BIT)
12417 {
12418 output_asm_insn ("stg\t%0,%1", op);
12419 output_asm_insn ("larl\t%2,%3", op);
12420 output_asm_insn ("brasl\t%0,%4", op);
12421 output_asm_insn ("lg\t%0,%1", op);
12422 }
4bc40d24 12423 else if (TARGET_CPU_ZARCH)
12424 {
12425 output_asm_insn ("st\t%0,%1", op);
12426 output_asm_insn ("larl\t%2,%3", op);
12427 output_asm_insn ("brasl\t%0,%4", op);
12428 output_asm_insn ("l\t%0,%1", op);
12429 }
875862bf 12430 else if (!flag_pic)
12431 {
12432 op[6] = gen_label_rtx ();
12433
12434 output_asm_insn ("st\t%0,%1", op);
12435 output_asm_insn ("bras\t%2,%l6", op);
12436 output_asm_insn (".long\t%4", op);
12437 output_asm_insn (".long\t%3", op);
12438 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
12439 output_asm_insn ("l\t%0,0(%2)", op);
12440 output_asm_insn ("l\t%2,4(%2)", op);
12441 output_asm_insn ("basr\t%0,%0", op);
12442 output_asm_insn ("l\t%0,%1", op);
12443 }
12444 else
12445 {
12446 op[5] = gen_label_rtx ();
12447 op[6] = gen_label_rtx ();
12448
12449 output_asm_insn ("st\t%0,%1", op);
12450 output_asm_insn ("bras\t%2,%l6", op);
12451 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[5]));
12452 output_asm_insn (".long\t%4-%l5", op);
12453 output_asm_insn (".long\t%3-%l5", op);
12454 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
12455 output_asm_insn ("lr\t%0,%2", op);
12456 output_asm_insn ("a\t%0,0(%2)", op);
12457 output_asm_insn ("a\t%2,4(%2)", op);
12458 output_asm_insn ("basr\t%0,%0", op);
12459 output_asm_insn ("l\t%0,%1", op);
12460 }
12461}
12462
12463/* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
12464 into its SYMBOL_REF_FLAGS. */
12465
12466static void
12467s390_encode_section_info (tree decl, rtx rtl, int first)
12468{
12469 default_encode_section_info (decl, rtl, first);
12470
e68d6a13 12471 if (TREE_CODE (decl) == VAR_DECL)
12472 {
78affa36 12473 /* Store the alignment to be able to check if we can use
12474 a larl/load-relative instruction. We only handle the cases
ea283725 12475 that can go wrong (i.e. no FUNC_DECLs). */
09d899d1 12476 if (DECL_ALIGN (decl) == 0 || DECL_ALIGN (decl) % 16)
78affa36 12477 SYMBOL_FLAG_SET_NOTALIGN2 (XEXP (rtl, 0));
ea283725 12478 else if (DECL_ALIGN (decl) % 32)
12479 SYMBOL_FLAG_SET_NOTALIGN4 (XEXP (rtl, 0));
12480 else if (DECL_ALIGN (decl) % 64)
12481 SYMBOL_FLAG_SET_NOTALIGN8 (XEXP (rtl, 0));
e68d6a13 12482 }
12483
12484 /* Literal pool references don't have a decl so they are handled
12485 differently here. We rely on the information in the MEM_ALIGN
78affa36 12486 entry to decide upon the alignment. */
e68d6a13 12487 if (MEM_P (rtl)
12488 && GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF
ea283725 12489 && TREE_CONSTANT_POOL_ADDRESS_P (XEXP (rtl, 0)))
78affa36 12490 {
09d899d1 12491 if (MEM_ALIGN (rtl) == 0 || MEM_ALIGN (rtl) % 16)
78affa36 12492 SYMBOL_FLAG_SET_NOTALIGN2 (XEXP (rtl, 0));
ea283725 12493 else if (MEM_ALIGN (rtl) % 32)
12494 SYMBOL_FLAG_SET_NOTALIGN4 (XEXP (rtl, 0));
12495 else if (MEM_ALIGN (rtl) % 64)
12496 SYMBOL_FLAG_SET_NOTALIGN8 (XEXP (rtl, 0));
78affa36 12497 }
875862bf 12498}
12499
12500/* Output thunk to FILE that implements a C++ virtual function call (with
12501 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
12502 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
12503 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
12504 relative to the resulting this pointer. */
12505
12506static void
12507s390_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
12508 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
12509 tree function)
12510{
12511 rtx op[10];
12512 int nonlocal = 0;
12513
21a38800 12514 /* Make sure unwind info is emitted for the thunk if needed. */
12515 final_start_function (emit_barrier (), file, 1);
12516
875862bf 12517 /* Operand 0 is the target function. */
12518 op[0] = XEXP (DECL_RTL (function), 0);
12519 if (flag_pic && !SYMBOL_REF_LOCAL_P (op[0]))
12520 {
12521 nonlocal = 1;
12522 op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]),
12523 TARGET_64BIT ? UNSPEC_PLT : UNSPEC_GOT);
12524 op[0] = gen_rtx_CONST (Pmode, op[0]);
12525 }
12526
12527 /* Operand 1 is the 'this' pointer. */
12528 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
12529 op[1] = gen_rtx_REG (Pmode, 3);
12530 else
12531 op[1] = gen_rtx_REG (Pmode, 2);
12532
12533 /* Operand 2 is the delta. */
12534 op[2] = GEN_INT (delta);
12535
12536 /* Operand 3 is the vcall_offset. */
12537 op[3] = GEN_INT (vcall_offset);
12538
12539 /* Operand 4 is the temporary register. */
12540 op[4] = gen_rtx_REG (Pmode, 1);
12541
12542 /* Operands 5 to 8 can be used as labels. */
12543 op[5] = NULL_RTX;
12544 op[6] = NULL_RTX;
12545 op[7] = NULL_RTX;
12546 op[8] = NULL_RTX;
12547
12548 /* Operand 9 can be used for temporary register. */
12549 op[9] = NULL_RTX;
12550
12551 /* Generate code. */
12552 if (TARGET_64BIT)
12553 {
12554 /* Setup literal pool pointer if required. */
12555 if ((!DISP_IN_RANGE (delta)
163277cf 12556 && !CONST_OK_FOR_K (delta)
12557 && !CONST_OK_FOR_Os (delta))
875862bf 12558 || (!DISP_IN_RANGE (vcall_offset)
163277cf 12559 && !CONST_OK_FOR_K (vcall_offset)
12560 && !CONST_OK_FOR_Os (vcall_offset)))
875862bf 12561 {
12562 op[5] = gen_label_rtx ();
12563 output_asm_insn ("larl\t%4,%5", op);
12564 }
12565
12566 /* Add DELTA to this pointer. */
12567 if (delta)
12568 {
cb888f33 12569 if (CONST_OK_FOR_J (delta))
875862bf 12570 output_asm_insn ("la\t%1,%2(%1)", op);
12571 else if (DISP_IN_RANGE (delta))
12572 output_asm_insn ("lay\t%1,%2(%1)", op);
cb888f33 12573 else if (CONST_OK_FOR_K (delta))
875862bf 12574 output_asm_insn ("aghi\t%1,%2", op);
163277cf 12575 else if (CONST_OK_FOR_Os (delta))
12576 output_asm_insn ("agfi\t%1,%2", op);
875862bf 12577 else
12578 {
12579 op[6] = gen_label_rtx ();
12580 output_asm_insn ("agf\t%1,%6-%5(%4)", op);
12581 }
12582 }
12583
12584 /* Perform vcall adjustment. */
12585 if (vcall_offset)
12586 {
12587 if (DISP_IN_RANGE (vcall_offset))
12588 {
12589 output_asm_insn ("lg\t%4,0(%1)", op);
12590 output_asm_insn ("ag\t%1,%3(%4)", op);
12591 }
cb888f33 12592 else if (CONST_OK_FOR_K (vcall_offset))
875862bf 12593 {
12594 output_asm_insn ("lghi\t%4,%3", op);
12595 output_asm_insn ("ag\t%4,0(%1)", op);
12596 output_asm_insn ("ag\t%1,0(%4)", op);
12597 }
163277cf 12598 else if (CONST_OK_FOR_Os (vcall_offset))
12599 {
12600 output_asm_insn ("lgfi\t%4,%3", op);
12601 output_asm_insn ("ag\t%4,0(%1)", op);
12602 output_asm_insn ("ag\t%1,0(%4)", op);
12603 }
875862bf 12604 else
12605 {
12606 op[7] = gen_label_rtx ();
12607 output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
12608 output_asm_insn ("ag\t%4,0(%1)", op);
12609 output_asm_insn ("ag\t%1,0(%4)", op);
12610 }
12611 }
12612
12613 /* Jump to target. */
12614 output_asm_insn ("jg\t%0", op);
12615
12616 /* Output literal pool if required. */
12617 if (op[5])
12618 {
12619 output_asm_insn (".align\t4", op);
12620 targetm.asm_out.internal_label (file, "L",
12621 CODE_LABEL_NUMBER (op[5]));
12622 }
12623 if (op[6])
12624 {
12625 targetm.asm_out.internal_label (file, "L",
12626 CODE_LABEL_NUMBER (op[6]));
12627 output_asm_insn (".long\t%2", op);
12628 }
12629 if (op[7])
12630 {
12631 targetm.asm_out.internal_label (file, "L",
12632 CODE_LABEL_NUMBER (op[7]));
12633 output_asm_insn (".long\t%3", op);
12634 }
12635 }
12636 else
12637 {
12638 /* Setup base pointer if required. */
12639 if (!vcall_offset
12640 || (!DISP_IN_RANGE (delta)
163277cf 12641 && !CONST_OK_FOR_K (delta)
12642 && !CONST_OK_FOR_Os (delta))
875862bf 12643 || (!DISP_IN_RANGE (delta)
163277cf 12644 && !CONST_OK_FOR_K (vcall_offset)
12645 && !CONST_OK_FOR_Os (vcall_offset)))
875862bf 12646 {
12647 op[5] = gen_label_rtx ();
12648 output_asm_insn ("basr\t%4,0", op);
12649 targetm.asm_out.internal_label (file, "L",
12650 CODE_LABEL_NUMBER (op[5]));
12651 }
12652
12653 /* Add DELTA to this pointer. */
12654 if (delta)
12655 {
cb888f33 12656 if (CONST_OK_FOR_J (delta))
875862bf 12657 output_asm_insn ("la\t%1,%2(%1)", op);
12658 else if (DISP_IN_RANGE (delta))
12659 output_asm_insn ("lay\t%1,%2(%1)", op);
cb888f33 12660 else if (CONST_OK_FOR_K (delta))
875862bf 12661 output_asm_insn ("ahi\t%1,%2", op);
163277cf 12662 else if (CONST_OK_FOR_Os (delta))
12663 output_asm_insn ("afi\t%1,%2", op);
875862bf 12664 else
12665 {
12666 op[6] = gen_label_rtx ();
12667 output_asm_insn ("a\t%1,%6-%5(%4)", op);
12668 }
12669 }
12670
12671 /* Perform vcall adjustment. */
12672 if (vcall_offset)
12673 {
cb888f33 12674 if (CONST_OK_FOR_J (vcall_offset))
875862bf 12675 {
0451e449 12676 output_asm_insn ("l\t%4,0(%1)", op);
875862bf 12677 output_asm_insn ("a\t%1,%3(%4)", op);
12678 }
12679 else if (DISP_IN_RANGE (vcall_offset))
12680 {
0451e449 12681 output_asm_insn ("l\t%4,0(%1)", op);
875862bf 12682 output_asm_insn ("ay\t%1,%3(%4)", op);
12683 }
cb888f33 12684 else if (CONST_OK_FOR_K (vcall_offset))
875862bf 12685 {
12686 output_asm_insn ("lhi\t%4,%3", op);
12687 output_asm_insn ("a\t%4,0(%1)", op);
12688 output_asm_insn ("a\t%1,0(%4)", op);
12689 }
163277cf 12690 else if (CONST_OK_FOR_Os (vcall_offset))
12691 {
12692 output_asm_insn ("iilf\t%4,%3", op);
12693 output_asm_insn ("a\t%4,0(%1)", op);
12694 output_asm_insn ("a\t%1,0(%4)", op);
12695 }
875862bf 12696 else
12697 {
12698 op[7] = gen_label_rtx ();
12699 output_asm_insn ("l\t%4,%7-%5(%4)", op);
12700 output_asm_insn ("a\t%4,0(%1)", op);
12701 output_asm_insn ("a\t%1,0(%4)", op);
12702 }
4673c1a0 12703
875862bf 12704 /* We had to clobber the base pointer register.
12705 Re-setup the base pointer (with a different base). */
12706 op[5] = gen_label_rtx ();
12707 output_asm_insn ("basr\t%4,0", op);
12708 targetm.asm_out.internal_label (file, "L",
12709 CODE_LABEL_NUMBER (op[5]));
12710 }
4673c1a0 12711
875862bf 12712 /* Jump to target. */
12713 op[8] = gen_label_rtx ();
4673c1a0 12714
875862bf 12715 if (!flag_pic)
12716 output_asm_insn ("l\t%4,%8-%5(%4)", op);
12717 else if (!nonlocal)
12718 output_asm_insn ("a\t%4,%8-%5(%4)", op);
12719 /* We cannot call through .plt, since .plt requires %r12 loaded. */
12720 else if (flag_pic == 1)
12721 {
12722 output_asm_insn ("a\t%4,%8-%5(%4)", op);
12723 output_asm_insn ("l\t%4,%0(%4)", op);
12724 }
12725 else if (flag_pic == 2)
12726 {
12727 op[9] = gen_rtx_REG (Pmode, 0);
12728 output_asm_insn ("l\t%9,%8-4-%5(%4)", op);
12729 output_asm_insn ("a\t%4,%8-%5(%4)", op);
12730 output_asm_insn ("ar\t%4,%9", op);
12731 output_asm_insn ("l\t%4,0(%4)", op);
12732 }
4673c1a0 12733
875862bf 12734 output_asm_insn ("br\t%4", op);
4673c1a0 12735
875862bf 12736 /* Output literal pool. */
12737 output_asm_insn (".align\t4", op);
4673c1a0 12738
875862bf 12739 if (nonlocal && flag_pic == 2)
12740 output_asm_insn (".long\t%0", op);
12741 if (nonlocal)
12742 {
12743 op[0] = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
12744 SYMBOL_REF_FLAGS (op[0]) = SYMBOL_FLAG_LOCAL;
12745 }
d93e0d9f 12746
875862bf 12747 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[8]));
12748 if (!flag_pic)
12749 output_asm_insn (".long\t%0", op);
12750 else
12751 output_asm_insn (".long\t%0-%5", op);
4673c1a0 12752
875862bf 12753 if (op[6])
12754 {
12755 targetm.asm_out.internal_label (file, "L",
12756 CODE_LABEL_NUMBER (op[6]));
12757 output_asm_insn (".long\t%2", op);
12758 }
12759 if (op[7])
12760 {
12761 targetm.asm_out.internal_label (file, "L",
12762 CODE_LABEL_NUMBER (op[7]));
12763 output_asm_insn (".long\t%3", op);
12764 }
4673c1a0 12765 }
21a38800 12766 final_end_function ();
4673c1a0 12767}
12768
875862bf 12769static bool
3754d046 12770s390_valid_pointer_mode (machine_mode mode)
875862bf 12771{
12772 return (mode == SImode || (TARGET_64BIT && mode == DImode));
12773}
56769981 12774
347301d6 12775/* Checks whether the given CALL_EXPR would use a caller
875862bf 12776 saved register. This is used to decide whether sibling call
12777 optimization could be performed on the respective function
12778 call. */
be00aaa8 12779
875862bf 12780static bool
347301d6 12781s390_call_saved_register_used (tree call_expr)
be00aaa8 12782{
39cba157 12783 CUMULATIVE_ARGS cum_v;
12784 cumulative_args_t cum;
875862bf 12785 tree parameter;
3754d046 12786 machine_mode mode;
875862bf 12787 tree type;
12788 rtx parm_rtx;
347301d6 12789 int reg, i;
be00aaa8 12790
39cba157 12791 INIT_CUMULATIVE_ARGS (cum_v, NULL, NULL, 0, 0);
12792 cum = pack_cumulative_args (&cum_v);
be00aaa8 12793
347301d6 12794 for (i = 0; i < call_expr_nargs (call_expr); i++)
875862bf 12795 {
347301d6 12796 parameter = CALL_EXPR_ARG (call_expr, i);
32eda510 12797 gcc_assert (parameter);
be00aaa8 12798
875862bf 12799 /* For an undeclared variable passed as parameter we will get
12800 an ERROR_MARK node here. */
12801 if (TREE_CODE (parameter) == ERROR_MARK)
12802 return true;
be00aaa8 12803
32eda510 12804 type = TREE_TYPE (parameter);
12805 gcc_assert (type);
be00aaa8 12806
32eda510 12807 mode = TYPE_MODE (type);
12808 gcc_assert (mode);
be00aaa8 12809
76a4c804 12810 /* We assume that in the target function all parameters are
12811 named. This only has an impact on vector argument register
12812 usage none of which is call-saved. */
39cba157 12813 if (pass_by_reference (&cum_v, mode, type, true))
875862bf 12814 {
12815 mode = Pmode;
12816 type = build_pointer_type (type);
12817 }
be00aaa8 12818
76a4c804 12819 parm_rtx = s390_function_arg (cum, mode, type, true);
be00aaa8 12820
76a4c804 12821 s390_function_arg_advance (cum, mode, type, true);
be00aaa8 12822
b5fdc416 12823 if (!parm_rtx)
12824 continue;
12825
12826 if (REG_P (parm_rtx))
12827 {
cc6a115b 12828 for (reg = 0;
12829 reg < HARD_REGNO_NREGS (REGNO (parm_rtx), GET_MODE (parm_rtx));
12830 reg++)
b5fdc416 12831 if (!call_used_regs[reg + REGNO (parm_rtx)])
12832 return true;
12833 }
12834
12835 if (GET_CODE (parm_rtx) == PARALLEL)
875862bf 12836 {
b5fdc416 12837 int i;
cc6a115b 12838
b5fdc416 12839 for (i = 0; i < XVECLEN (parm_rtx, 0); i++)
12840 {
12841 rtx r = XEXP (XVECEXP (parm_rtx, 0, i), 0);
b5fdc416 12842
12843 gcc_assert (REG_P (r));
12844
cc6a115b 12845 for (reg = 0;
12846 reg < HARD_REGNO_NREGS (REGNO (r), GET_MODE (r));
12847 reg++)
b5fdc416 12848 if (!call_used_regs[reg + REGNO (r)])
12849 return true;
12850 }
875862bf 12851 }
b5fdc416 12852
875862bf 12853 }
12854 return false;
12855}
be00aaa8 12856
875862bf 12857/* Return true if the given call expression can be
12858 turned into a sibling call.
12859 DECL holds the declaration of the function to be called whereas
12860 EXP is the call expression itself. */
be00aaa8 12861
875862bf 12862static bool
12863s390_function_ok_for_sibcall (tree decl, tree exp)
12864{
12865 /* The TPF epilogue uses register 1. */
12866 if (TARGET_TPF_PROFILING)
12867 return false;
be00aaa8 12868
875862bf 12869 /* The 31 bit PLT code uses register 12 (GOT pointer - caller saved)
12870 which would have to be restored before the sibcall. */
a47b0dc3 12871 if (!TARGET_64BIT && flag_pic && decl && !targetm.binds_local_p (decl))
875862bf 12872 return false;
be00aaa8 12873
875862bf 12874 /* Register 6 on s390 is available as an argument register but unfortunately
12875 "caller saved". This makes functions needing this register for arguments
12876 not suitable for sibcalls. */
347301d6 12877 return !s390_call_saved_register_used (exp);
875862bf 12878}
be00aaa8 12879
875862bf 12880/* Return the fixed registers used for condition codes. */
be00aaa8 12881
875862bf 12882static bool
12883s390_fixed_condition_code_regs (unsigned int *p1, unsigned int *p2)
12884{
12885 *p1 = CC_REGNUM;
12886 *p2 = INVALID_REGNUM;
ffead1ca 12887
875862bf 12888 return true;
12889}
be00aaa8 12890
875862bf 12891/* This function is used by the call expanders of the machine description.
12892 It emits the call insn itself together with the necessary operations
12893 to adjust the target address and returns the emitted insn.
12894 ADDR_LOCATION is the target address rtx
12895 TLS_CALL the location of the thread-local symbol
12896 RESULT_REG the register where the result of the call should be stored
12897 RETADDR_REG the register where the return address should be stored
12898 If this parameter is NULL_RTX the call is considered
12899 to be a sibling call. */
be00aaa8 12900
93e0956b 12901rtx_insn *
875862bf 12902s390_emit_call (rtx addr_location, rtx tls_call, rtx result_reg,
12903 rtx retaddr_reg)
4673c1a0 12904{
875862bf 12905 bool plt_call = false;
93e0956b 12906 rtx_insn *insn;
875862bf 12907 rtx call;
12908 rtx clobber;
12909 rtvec vec;
4a1c604e 12910
875862bf 12911 /* Direct function calls need special treatment. */
12912 if (GET_CODE (addr_location) == SYMBOL_REF)
4673c1a0 12913 {
875862bf 12914 /* When calling a global routine in PIC mode, we must
12915 replace the symbol itself with the PLT stub. */
12916 if (flag_pic && !SYMBOL_REF_LOCAL_P (addr_location))
12917 {
aa5b4778 12918 if (TARGET_64BIT || retaddr_reg != NULL_RTX)
9c7185f7 12919 {
12920 addr_location = gen_rtx_UNSPEC (Pmode,
12921 gen_rtvec (1, addr_location),
12922 UNSPEC_PLT);
12923 addr_location = gen_rtx_CONST (Pmode, addr_location);
12924 plt_call = true;
12925 }
12926 else
12927 /* For -fpic code the PLT entries might use r12 which is
12928 call-saved. Therefore we cannot do a sibcall when
12929 calling directly using a symbol ref. When reaching
12930 this point we decided (in s390_function_ok_for_sibcall)
12931 to do a sibcall for a function pointer but one of the
12932 optimizers was able to get rid of the function pointer
12933 by propagating the symbol ref into the call. This
12934 optimization is illegal for S/390 so we turn the direct
12935 call into a indirect call again. */
12936 addr_location = force_reg (Pmode, addr_location);
875862bf 12937 }
12938
12939 /* Unless we can use the bras(l) insn, force the
12940 routine address into a register. */
12941 if (!TARGET_SMALL_EXEC && !TARGET_CPU_ZARCH)
12942 {
12943 if (flag_pic)
12944 addr_location = legitimize_pic_address (addr_location, 0);
12945 else
12946 addr_location = force_reg (Pmode, addr_location);
12947 }
4673c1a0 12948 }
875862bf 12949
12950 /* If it is already an indirect call or the code above moved the
12951 SYMBOL_REF to somewhere else make sure the address can be found in
12952 register 1. */
12953 if (retaddr_reg == NULL_RTX
12954 && GET_CODE (addr_location) != SYMBOL_REF
12955 && !plt_call)
4673c1a0 12956 {
875862bf 12957 emit_move_insn (gen_rtx_REG (Pmode, SIBCALL_REGNUM), addr_location);
12958 addr_location = gen_rtx_REG (Pmode, SIBCALL_REGNUM);
4673c1a0 12959 }
4673c1a0 12960
875862bf 12961 addr_location = gen_rtx_MEM (QImode, addr_location);
12962 call = gen_rtx_CALL (VOIDmode, addr_location, const0_rtx);
8b4a4127 12963
875862bf 12964 if (result_reg != NULL_RTX)
d1f9b275 12965 call = gen_rtx_SET (result_reg, call);
8b4a4127 12966
875862bf 12967 if (retaddr_reg != NULL_RTX)
12968 {
12969 clobber = gen_rtx_CLOBBER (VOIDmode, retaddr_reg);
f81e845f 12970
875862bf 12971 if (tls_call != NULL_RTX)
12972 vec = gen_rtvec (3, call, clobber,
12973 gen_rtx_USE (VOIDmode, tls_call));
12974 else
12975 vec = gen_rtvec (2, call, clobber);
8b4a4127 12976
875862bf 12977 call = gen_rtx_PARALLEL (VOIDmode, vec);
12978 }
8b4a4127 12979
875862bf 12980 insn = emit_call_insn (call);
8b4a4127 12981
875862bf 12982 /* 31-bit PLT stubs and tls calls use the GOT register implicitly. */
12983 if ((!TARGET_64BIT && plt_call) || tls_call != NULL_RTX)
12984 {
12985 /* s390_function_ok_for_sibcall should
12986 have denied sibcalls in this case. */
32eda510 12987 gcc_assert (retaddr_reg != NULL_RTX);
c60a7572 12988 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), gen_rtx_REG (Pmode, 12));
875862bf 12989 }
12990 return insn;
12991}
8b4a4127 12992
b2d7ede1 12993/* Implement TARGET_CONDITIONAL_REGISTER_USAGE. */
8b4a4127 12994
b2d7ede1 12995static void
875862bf 12996s390_conditional_register_usage (void)
12997{
12998 int i;
8b4a4127 12999
8b4a4127 13000 if (flag_pic)
13001 {
875862bf 13002 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
13003 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
8b4a4127 13004 }
875862bf 13005 if (TARGET_CPU_ZARCH)
8b4a4127 13006 {
d1a5573e 13007 fixed_regs[BASE_REGNUM] = 0;
13008 call_used_regs[BASE_REGNUM] = 0;
875862bf 13009 fixed_regs[RETURN_REGNUM] = 0;
13010 call_used_regs[RETURN_REGNUM] = 0;
8b4a4127 13011 }
875862bf 13012 if (TARGET_64BIT)
8b4a4127 13013 {
6a2469fe 13014 for (i = FPR8_REGNUM; i <= FPR15_REGNUM; i++)
875862bf 13015 call_used_regs[i] = call_really_used_regs[i] = 0;
8b4a4127 13016 }
13017 else
13018 {
6a2469fe 13019 call_used_regs[FPR4_REGNUM] = call_really_used_regs[FPR4_REGNUM] = 0;
13020 call_used_regs[FPR6_REGNUM] = call_really_used_regs[FPR6_REGNUM] = 0;
875862bf 13021 }
8b4a4127 13022
875862bf 13023 if (TARGET_SOFT_FLOAT)
13024 {
6a2469fe 13025 for (i = FPR0_REGNUM; i <= FPR15_REGNUM; i++)
875862bf 13026 call_used_regs[i] = fixed_regs[i] = 1;
8b4a4127 13027 }
76a4c804 13028
13029 /* Disable v16 - v31 for non-vector target. */
13030 if (!TARGET_VX)
13031 {
13032 for (i = VR16_REGNUM; i <= VR31_REGNUM; i++)
13033 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
13034 }
8b4a4127 13035}
13036
875862bf 13037/* Corresponding function to eh_return expander. */
7811991d 13038
875862bf 13039static GTY(()) rtx s390_tpf_eh_return_symbol;
13040void
13041s390_emit_tpf_eh_return (rtx target)
7811991d 13042{
93e0956b 13043 rtx_insn *insn;
13044 rtx reg, orig_ra;
525d1294 13045
875862bf 13046 if (!s390_tpf_eh_return_symbol)
13047 s390_tpf_eh_return_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tpf_eh_return");
13048
13049 reg = gen_rtx_REG (Pmode, 2);
bcd3133e 13050 orig_ra = gen_rtx_REG (Pmode, 3);
875862bf 13051
13052 emit_move_insn (reg, target);
bcd3133e 13053 emit_move_insn (orig_ra, get_hard_reg_initial_val (Pmode, RETURN_REGNUM));
875862bf 13054 insn = s390_emit_call (s390_tpf_eh_return_symbol, NULL_RTX, reg,
13055 gen_rtx_REG (Pmode, RETURN_REGNUM));
13056 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), reg);
bcd3133e 13057 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), orig_ra);
875862bf 13058
13059 emit_move_insn (EH_RETURN_HANDLER_RTX, reg);
be00aaa8 13060}
13061
875862bf 13062/* Rework the prologue/epilogue to avoid saving/restoring
13063 registers unnecessarily. */
c20f8a1d 13064
6988553d 13065static void
875862bf 13066s390_optimize_prologue (void)
c6933ba6 13067{
93e0956b 13068 rtx_insn *insn, *new_insn, *next_insn;
c20f8a1d 13069
875862bf 13070 /* Do a final recompute of the frame-related data. */
ff4ce128 13071 s390_optimize_register_info ();
c20f8a1d 13072
875862bf 13073 /* If all special registers are in fact used, there's nothing we
13074 can do, so no point in walking the insn list. */
c20f8a1d 13075
ffead1ca 13076 if (cfun_frame_layout.first_save_gpr <= BASE_REGNUM
875862bf 13077 && cfun_frame_layout.last_save_gpr >= BASE_REGNUM
ffead1ca 13078 && (TARGET_CPU_ZARCH
13079 || (cfun_frame_layout.first_save_gpr <= RETURN_REGNUM
875862bf 13080 && cfun_frame_layout.last_save_gpr >= RETURN_REGNUM)))
13081 return;
c20f8a1d 13082
875862bf 13083 /* Search for prologue/epilogue insns and replace them. */
c20f8a1d 13084
875862bf 13085 for (insn = get_insns (); insn; insn = next_insn)
13086 {
13087 int first, last, off;
13088 rtx set, base, offset;
ff4ce128 13089 rtx pat;
c20f8a1d 13090
875862bf 13091 next_insn = NEXT_INSN (insn);
d7bec695 13092
ff4ce128 13093 if (! NONJUMP_INSN_P (insn) || ! RTX_FRAME_RELATED_P (insn))
875862bf 13094 continue;
c20f8a1d 13095
ff4ce128 13096 pat = PATTERN (insn);
13097
13098 /* Remove ldgr/lgdr instructions used for saving and restore
13099 GPRs if possible. */
54530437 13100 if (TARGET_Z10)
13101 {
13102 rtx tmp_pat = pat;
ff4ce128 13103
54530437 13104 if (INSN_CODE (insn) == CODE_FOR_stack_restore_from_fpr)
13105 tmp_pat = XVECEXP (pat, 0, 0);
ff4ce128 13106
54530437 13107 if (GET_CODE (tmp_pat) == SET
13108 && GET_MODE (SET_SRC (tmp_pat)) == DImode
13109 && REG_P (SET_SRC (tmp_pat))
13110 && REG_P (SET_DEST (tmp_pat)))
13111 {
13112 int src_regno = REGNO (SET_SRC (tmp_pat));
13113 int dest_regno = REGNO (SET_DEST (tmp_pat));
13114 int gpr_regno;
13115 int fpr_regno;
13116
13117 if (!((GENERAL_REGNO_P (src_regno)
13118 && FP_REGNO_P (dest_regno))
13119 || (FP_REGNO_P (src_regno)
13120 && GENERAL_REGNO_P (dest_regno))))
13121 continue;
ff4ce128 13122
54530437 13123 gpr_regno = GENERAL_REGNO_P (src_regno) ? src_regno : dest_regno;
13124 fpr_regno = FP_REGNO_P (src_regno) ? src_regno : dest_regno;
ff4ce128 13125
54530437 13126 /* GPR must be call-saved, FPR must be call-clobbered. */
13127 if (!call_really_used_regs[fpr_regno]
13128 || call_really_used_regs[gpr_regno])
13129 continue;
13130
13131 /* It must not happen that what we once saved in an FPR now
13132 needs a stack slot. */
13133 gcc_assert (cfun_gpr_save_slot (gpr_regno) != SAVE_SLOT_STACK);
13134
13135 if (cfun_gpr_save_slot (gpr_regno) == SAVE_SLOT_NONE)
13136 {
13137 remove_insn (insn);
13138 continue;
13139 }
ff4ce128 13140 }
13141 }
13142
13143 if (GET_CODE (pat) == PARALLEL
13144 && store_multiple_operation (pat, VOIDmode))
c20f8a1d 13145 {
ff4ce128 13146 set = XVECEXP (pat, 0, 0);
875862bf 13147 first = REGNO (SET_SRC (set));
ff4ce128 13148 last = first + XVECLEN (pat, 0) - 1;
875862bf 13149 offset = const0_rtx;
13150 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
13151 off = INTVAL (offset);
c20f8a1d 13152
875862bf 13153 if (GET_CODE (base) != REG || off < 0)
13154 continue;
43944aa4 13155 if (cfun_frame_layout.first_save_gpr != -1
13156 && (cfun_frame_layout.first_save_gpr < first
13157 || cfun_frame_layout.last_save_gpr > last))
13158 continue;
875862bf 13159 if (REGNO (base) != STACK_POINTER_REGNUM
13160 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
13161 continue;
13162 if (first > BASE_REGNUM || last < BASE_REGNUM)
13163 continue;
13164
13165 if (cfun_frame_layout.first_save_gpr != -1)
c20f8a1d 13166 {
93e0956b 13167 rtx s_pat = save_gprs (base,
875862bf 13168 off + (cfun_frame_layout.first_save_gpr
b5fdc416 13169 - first) * UNITS_PER_LONG,
875862bf 13170 cfun_frame_layout.first_save_gpr,
13171 cfun_frame_layout.last_save_gpr);
93e0956b 13172 new_insn = emit_insn_before (s_pat, insn);
875862bf 13173 INSN_ADDRESSES_NEW (new_insn, -1);
c20f8a1d 13174 }
c20f8a1d 13175
875862bf 13176 remove_insn (insn);
13177 continue;
c20f8a1d 13178 }
13179
43944aa4 13180 if (cfun_frame_layout.first_save_gpr == -1
ff4ce128 13181 && GET_CODE (pat) == SET
13182 && GENERAL_REG_P (SET_SRC (pat))
13183 && GET_CODE (SET_DEST (pat)) == MEM)
c20f8a1d 13184 {
ff4ce128 13185 set = pat;
875862bf 13186 first = REGNO (SET_SRC (set));
13187 offset = const0_rtx;
13188 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
13189 off = INTVAL (offset);
c20f8a1d 13190
875862bf 13191 if (GET_CODE (base) != REG || off < 0)
13192 continue;
13193 if (REGNO (base) != STACK_POINTER_REGNUM
13194 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
13195 continue;
c20f8a1d 13196
875862bf 13197 remove_insn (insn);
13198 continue;
c20f8a1d 13199 }
13200
ff4ce128 13201 if (GET_CODE (pat) == PARALLEL
13202 && load_multiple_operation (pat, VOIDmode))
d7bec695 13203 {
ff4ce128 13204 set = XVECEXP (pat, 0, 0);
875862bf 13205 first = REGNO (SET_DEST (set));
ff4ce128 13206 last = first + XVECLEN (pat, 0) - 1;
875862bf 13207 offset = const0_rtx;
13208 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
13209 off = INTVAL (offset);
d7bec695 13210
875862bf 13211 if (GET_CODE (base) != REG || off < 0)
13212 continue;
ff4ce128 13213
43944aa4 13214 if (cfun_frame_layout.first_restore_gpr != -1
13215 && (cfun_frame_layout.first_restore_gpr < first
13216 || cfun_frame_layout.last_restore_gpr > last))
13217 continue;
875862bf 13218 if (REGNO (base) != STACK_POINTER_REGNUM
13219 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
13220 continue;
13221 if (first > BASE_REGNUM || last < BASE_REGNUM)
13222 continue;
c20f8a1d 13223
875862bf 13224 if (cfun_frame_layout.first_restore_gpr != -1)
13225 {
93e0956b 13226 rtx rpat = restore_gprs (base,
875862bf 13227 off + (cfun_frame_layout.first_restore_gpr
b5fdc416 13228 - first) * UNITS_PER_LONG,
875862bf 13229 cfun_frame_layout.first_restore_gpr,
13230 cfun_frame_layout.last_restore_gpr);
8240ec0e 13231
13232 /* Remove REG_CFA_RESTOREs for registers that we no
13233 longer need to save. */
93e0956b 13234 REG_NOTES (rpat) = REG_NOTES (insn);
13235 for (rtx *ptr = &REG_NOTES (rpat); *ptr; )
8240ec0e 13236 if (REG_NOTE_KIND (*ptr) == REG_CFA_RESTORE
13237 && ((int) REGNO (XEXP (*ptr, 0))
13238 < cfun_frame_layout.first_restore_gpr))
13239 *ptr = XEXP (*ptr, 1);
13240 else
13241 ptr = &XEXP (*ptr, 1);
93e0956b 13242 new_insn = emit_insn_before (rpat, insn);
8240ec0e 13243 RTX_FRAME_RELATED_P (new_insn) = 1;
875862bf 13244 INSN_ADDRESSES_NEW (new_insn, -1);
13245 }
d7bec695 13246
875862bf 13247 remove_insn (insn);
13248 continue;
d7bec695 13249 }
13250
43944aa4 13251 if (cfun_frame_layout.first_restore_gpr == -1
ff4ce128 13252 && GET_CODE (pat) == SET
13253 && GENERAL_REG_P (SET_DEST (pat))
13254 && GET_CODE (SET_SRC (pat)) == MEM)
c20f8a1d 13255 {
ff4ce128 13256 set = pat;
875862bf 13257 first = REGNO (SET_DEST (set));
13258 offset = const0_rtx;
13259 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
13260 off = INTVAL (offset);
f81e845f 13261
875862bf 13262 if (GET_CODE (base) != REG || off < 0)
13263 continue;
ff4ce128 13264
875862bf 13265 if (REGNO (base) != STACK_POINTER_REGNUM
13266 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
13267 continue;
5a5e802f 13268
875862bf 13269 remove_insn (insn);
13270 continue;
13271 }
13272 }
5a5e802f 13273}
13274
33d033da 13275/* On z10 and later the dynamic branch prediction must see the
13276 backward jump within a certain windows. If not it falls back to
13277 the static prediction. This function rearranges the loop backward
13278 branch in a way which makes the static prediction always correct.
13279 The function returns true if it added an instruction. */
73df8a45 13280static bool
93e0956b 13281s390_fix_long_loop_prediction (rtx_insn *insn)
73df8a45 13282{
13283 rtx set = single_set (insn);
db7dd023 13284 rtx code_label, label_ref;
158a522b 13285 rtx_insn *uncond_jump;
93e0956b 13286 rtx_insn *cur_insn;
73df8a45 13287 rtx tmp;
13288 int distance;
13289
13290 /* This will exclude branch on count and branch on index patterns
13291 since these are correctly statically predicted. */
13292 if (!set
13293 || SET_DEST (set) != pc_rtx
13294 || GET_CODE (SET_SRC(set)) != IF_THEN_ELSE)
13295 return false;
13296
7a64c761 13297 /* Skip conditional returns. */
13298 if (ANY_RETURN_P (XEXP (SET_SRC (set), 1))
13299 && XEXP (SET_SRC (set), 2) == pc_rtx)
13300 return false;
13301
73df8a45 13302 label_ref = (GET_CODE (XEXP (SET_SRC (set), 1)) == LABEL_REF ?
13303 XEXP (SET_SRC (set), 1) : XEXP (SET_SRC (set), 2));
13304
13305 gcc_assert (GET_CODE (label_ref) == LABEL_REF);
13306
13307 code_label = XEXP (label_ref, 0);
13308
13309 if (INSN_ADDRESSES (INSN_UID (code_label)) == -1
13310 || INSN_ADDRESSES (INSN_UID (insn)) == -1
13311 || (INSN_ADDRESSES (INSN_UID (insn))
33d033da 13312 - INSN_ADDRESSES (INSN_UID (code_label)) < PREDICT_DISTANCE))
73df8a45 13313 return false;
13314
13315 for (distance = 0, cur_insn = PREV_INSN (insn);
33d033da 13316 distance < PREDICT_DISTANCE - 6;
73df8a45 13317 distance += get_attr_length (cur_insn), cur_insn = PREV_INSN (cur_insn))
13318 if (!cur_insn || JUMP_P (cur_insn) || LABEL_P (cur_insn))
13319 return false;
13320
db7dd023 13321 rtx_code_label *new_label = gen_label_rtx ();
73df8a45 13322 uncond_jump = emit_jump_insn_after (
d1f9b275 13323 gen_rtx_SET (pc_rtx,
73df8a45 13324 gen_rtx_LABEL_REF (VOIDmode, code_label)),
13325 insn);
13326 emit_label_after (new_label, uncond_jump);
13327
13328 tmp = XEXP (SET_SRC (set), 1);
13329 XEXP (SET_SRC (set), 1) = XEXP (SET_SRC (set), 2);
13330 XEXP (SET_SRC (set), 2) = tmp;
13331 INSN_CODE (insn) = -1;
13332
13333 XEXP (label_ref, 0) = new_label;
13334 JUMP_LABEL (insn) = new_label;
13335 JUMP_LABEL (uncond_jump) = code_label;
13336
13337 return true;
13338}
13339
3b14a2e6 13340/* Returns 1 if INSN reads the value of REG for purposes not related
13341 to addressing of memory, and 0 otherwise. */
13342static int
93e0956b 13343s390_non_addr_reg_read_p (rtx reg, rtx_insn *insn)
3b14a2e6 13344{
13345 return reg_referenced_p (reg, PATTERN (insn))
13346 && !reg_used_in_mem_p (REGNO (reg), PATTERN (insn));
13347}
13348
512d9edf 13349/* Starting from INSN find_cond_jump looks downwards in the insn
13350 stream for a single jump insn which is the last user of the
13351 condition code set in INSN. */
93e0956b 13352static rtx_insn *
13353find_cond_jump (rtx_insn *insn)
512d9edf 13354{
13355 for (; insn; insn = NEXT_INSN (insn))
13356 {
13357 rtx ite, cc;
13358
13359 if (LABEL_P (insn))
13360 break;
13361
13362 if (!JUMP_P (insn))
13363 {
13364 if (reg_mentioned_p (gen_rtx_REG (CCmode, CC_REGNUM), insn))
13365 break;
13366 continue;
13367 }
13368
13369 /* This will be triggered by a return. */
13370 if (GET_CODE (PATTERN (insn)) != SET)
13371 break;
13372
13373 gcc_assert (SET_DEST (PATTERN (insn)) == pc_rtx);
13374 ite = SET_SRC (PATTERN (insn));
13375
13376 if (GET_CODE (ite) != IF_THEN_ELSE)
13377 break;
13378
13379 cc = XEXP (XEXP (ite, 0), 0);
13380 if (!REG_P (cc) || !CC_REGNO_P (REGNO (cc)))
13381 break;
13382
13383 if (find_reg_note (insn, REG_DEAD, cc))
13384 return insn;
13385 break;
13386 }
13387
93e0956b 13388 return NULL;
512d9edf 13389}
13390
13391/* Swap the condition in COND and the operands in OP0 and OP1 so that
13392 the semantics does not change. If NULL_RTX is passed as COND the
13393 function tries to find the conditional jump starting with INSN. */
13394static void
93e0956b 13395s390_swap_cmp (rtx cond, rtx *op0, rtx *op1, rtx_insn *insn)
512d9edf 13396{
13397 rtx tmp = *op0;
13398
13399 if (cond == NULL_RTX)
13400 {
50fc2d35 13401 rtx_insn *jump = find_cond_jump (NEXT_INSN (insn));
13402 rtx set = jump ? single_set (jump) : NULL_RTX;
512d9edf 13403
50fc2d35 13404 if (set == NULL_RTX)
512d9edf 13405 return;
13406
50fc2d35 13407 cond = XEXP (SET_SRC (set), 0);
512d9edf 13408 }
13409
13410 *op0 = *op1;
13411 *op1 = tmp;
13412 PUT_CODE (cond, swap_condition (GET_CODE (cond)));
13413}
3b14a2e6 13414
13415/* On z10, instructions of the compare-and-branch family have the
13416 property to access the register occurring as second operand with
13417 its bits complemented. If such a compare is grouped with a second
13418 instruction that accesses the same register non-complemented, and
13419 if that register's value is delivered via a bypass, then the
13420 pipeline recycles, thereby causing significant performance decline.
13421 This function locates such situations and exchanges the two
73df8a45 13422 operands of the compare. The function return true whenever it
13423 added an insn. */
13424static bool
93e0956b 13425s390_z10_optimize_cmp (rtx_insn *insn)
3b14a2e6 13426{
93e0956b 13427 rtx_insn *prev_insn, *next_insn;
73df8a45 13428 bool insn_added_p = false;
13429 rtx cond, *op0, *op1;
3b14a2e6 13430
73df8a45 13431 if (GET_CODE (PATTERN (insn)) == PARALLEL)
3b14a2e6 13432 {
73df8a45 13433 /* Handle compare and branch and branch on count
13434 instructions. */
13435 rtx pattern = single_set (insn);
512d9edf 13436
73df8a45 13437 if (!pattern
13438 || SET_DEST (pattern) != pc_rtx
13439 || GET_CODE (SET_SRC (pattern)) != IF_THEN_ELSE)
13440 return false;
3b14a2e6 13441
73df8a45 13442 cond = XEXP (SET_SRC (pattern), 0);
13443 op0 = &XEXP (cond, 0);
13444 op1 = &XEXP (cond, 1);
13445 }
13446 else if (GET_CODE (PATTERN (insn)) == SET)
13447 {
13448 rtx src, dest;
3b14a2e6 13449
73df8a45 13450 /* Handle normal compare instructions. */
13451 src = SET_SRC (PATTERN (insn));
13452 dest = SET_DEST (PATTERN (insn));
512d9edf 13453
73df8a45 13454 if (!REG_P (dest)
13455 || !CC_REGNO_P (REGNO (dest))
13456 || GET_CODE (src) != COMPARE)
13457 return false;
512d9edf 13458
73df8a45 13459 /* s390_swap_cmp will try to find the conditional
13460 jump when passing NULL_RTX as condition. */
13461 cond = NULL_RTX;
13462 op0 = &XEXP (src, 0);
13463 op1 = &XEXP (src, 1);
13464 }
13465 else
13466 return false;
512d9edf 13467
73df8a45 13468 if (!REG_P (*op0) || !REG_P (*op1))
13469 return false;
512d9edf 13470
cc6056e1 13471 if (GET_MODE_CLASS (GET_MODE (*op0)) != MODE_INT)
13472 return false;
13473
73df8a45 13474 /* Swap the COMPARE arguments and its mask if there is a
13475 conflicting access in the previous insn. */
bc1c8bc5 13476 prev_insn = prev_active_insn (insn);
73df8a45 13477 if (prev_insn != NULL_RTX && INSN_P (prev_insn)
13478 && reg_referenced_p (*op1, PATTERN (prev_insn)))
13479 s390_swap_cmp (cond, op0, op1, insn);
13480
13481 /* Check if there is a conflict with the next insn. If there
13482 was no conflict with the previous insn, then swap the
13483 COMPARE arguments and its mask. If we already swapped
13484 the operands, or if swapping them would cause a conflict
13485 with the previous insn, issue a NOP after the COMPARE in
13486 order to separate the two instuctions. */
bc1c8bc5 13487 next_insn = next_active_insn (insn);
73df8a45 13488 if (next_insn != NULL_RTX && INSN_P (next_insn)
13489 && s390_non_addr_reg_read_p (*op1, next_insn))
13490 {
512d9edf 13491 if (prev_insn != NULL_RTX && INSN_P (prev_insn)
73df8a45 13492 && s390_non_addr_reg_read_p (*op0, prev_insn))
512d9edf 13493 {
73df8a45 13494 if (REGNO (*op1) == 0)
13495 emit_insn_after (gen_nop1 (), insn);
512d9edf 13496 else
73df8a45 13497 emit_insn_after (gen_nop (), insn);
13498 insn_added_p = true;
3b14a2e6 13499 }
73df8a45 13500 else
13501 s390_swap_cmp (cond, op0, op1, insn);
3b14a2e6 13502 }
73df8a45 13503 return insn_added_p;
3b14a2e6 13504}
13505
987860a9 13506/* Number of INSNs to be scanned backward in the last BB of the loop
13507 and forward in the first BB of the loop. This usually should be a
13508 bit more than the number of INSNs which could go into one
13509 group. */
13510#define S390_OSC_SCAN_INSN_NUM 5
13511
13512/* Scan LOOP for static OSC collisions and return true if a osc_break
13513 should be issued for this loop. */
13514static bool
13515s390_adjust_loop_scan_osc (struct loop* loop)
13516
13517{
13518 HARD_REG_SET modregs, newregs;
13519 rtx_insn *insn, *store_insn = NULL;
13520 rtx set;
13521 struct s390_address addr_store, addr_load;
13522 subrtx_iterator::array_type array;
13523 int insn_count;
13524
13525 CLEAR_HARD_REG_SET (modregs);
13526
13527 insn_count = 0;
13528 FOR_BB_INSNS_REVERSE (loop->latch, insn)
13529 {
13530 if (!INSN_P (insn) || INSN_CODE (insn) <= 0)
13531 continue;
13532
13533 insn_count++;
13534 if (insn_count > S390_OSC_SCAN_INSN_NUM)
13535 return false;
13536
13537 find_all_hard_reg_sets (insn, &newregs, true);
13538 IOR_HARD_REG_SET (modregs, newregs);
13539
13540 set = single_set (insn);
13541 if (!set)
13542 continue;
13543
13544 if (MEM_P (SET_DEST (set))
13545 && s390_decompose_address (XEXP (SET_DEST (set), 0), &addr_store))
13546 {
13547 store_insn = insn;
13548 break;
13549 }
13550 }
13551
13552 if (store_insn == NULL_RTX)
13553 return false;
13554
13555 insn_count = 0;
13556 FOR_BB_INSNS (loop->header, insn)
13557 {
13558 if (!INSN_P (insn) || INSN_CODE (insn) <= 0)
13559 continue;
13560
13561 if (insn == store_insn)
13562 return false;
13563
13564 insn_count++;
13565 if (insn_count > S390_OSC_SCAN_INSN_NUM)
13566 return false;
13567
13568 find_all_hard_reg_sets (insn, &newregs, true);
13569 IOR_HARD_REG_SET (modregs, newregs);
13570
13571 set = single_set (insn);
13572 if (!set)
13573 continue;
13574
13575 /* An intermediate store disrupts static OSC checking
13576 anyway. */
13577 if (MEM_P (SET_DEST (set))
13578 && s390_decompose_address (XEXP (SET_DEST (set), 0), NULL))
13579 return false;
13580
13581 FOR_EACH_SUBRTX (iter, array, SET_SRC (set), NONCONST)
13582 if (MEM_P (*iter)
13583 && s390_decompose_address (XEXP (*iter, 0), &addr_load)
13584 && rtx_equal_p (addr_load.base, addr_store.base)
13585 && rtx_equal_p (addr_load.indx, addr_store.indx)
13586 && rtx_equal_p (addr_load.disp, addr_store.disp))
13587 {
13588 if ((addr_load.base != NULL_RTX
13589 && TEST_HARD_REG_BIT (modregs, REGNO (addr_load.base)))
13590 || (addr_load.indx != NULL_RTX
13591 && TEST_HARD_REG_BIT (modregs, REGNO (addr_load.indx))))
13592 return true;
13593 }
13594 }
13595 return false;
13596}
13597
13598/* Look for adjustments which can be done on simple innermost
13599 loops. */
13600static void
13601s390_adjust_loops ()
13602{
13603 struct loop *loop = NULL;
13604
13605 df_analyze ();
13606 compute_bb_for_insn ();
13607
13608 /* Find the loops. */
13609 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
13610
13611 FOR_EACH_LOOP (loop, LI_ONLY_INNERMOST)
13612 {
13613 if (dump_file)
13614 {
13615 flow_loop_dump (loop, dump_file, NULL, 0);
13616 fprintf (dump_file, ";; OSC loop scan Loop: ");
13617 }
13618 if (loop->latch == NULL
13619 || pc_set (BB_END (loop->latch)) == NULL_RTX
13620 || !s390_adjust_loop_scan_osc (loop))
13621 {
13622 if (dump_file)
13623 {
13624 if (loop->latch == NULL)
13625 fprintf (dump_file, " muliple backward jumps\n");
13626 else
13627 {
13628 fprintf (dump_file, " header insn: %d latch insn: %d ",
13629 INSN_UID (BB_HEAD (loop->header)),
13630 INSN_UID (BB_END (loop->latch)));
13631 if (pc_set (BB_END (loop->latch)) == NULL_RTX)
13632 fprintf (dump_file, " loop does not end with jump\n");
13633 else
13634 fprintf (dump_file, " not instrumented\n");
13635 }
13636 }
13637 }
13638 else
13639 {
13640 rtx_insn *new_insn;
13641
13642 if (dump_file)
13643 fprintf (dump_file, " adding OSC break insn: ");
13644 new_insn = emit_insn_before (gen_osc_break (),
13645 BB_END (loop->latch));
13646 INSN_ADDRESSES_NEW (new_insn, -1);
13647 }
13648 }
13649
13650 loop_optimizer_finalize ();
13651
13652 df_finish_pass (false);
13653}
13654
875862bf 13655/* Perform machine-dependent processing. */
7346ca58 13656
875862bf 13657static void
13658s390_reorg (void)
7346ca58 13659{
875862bf 13660 bool pool_overflow = false;
f4252e72 13661 int hw_before, hw_after;
7346ca58 13662
987860a9 13663 if (s390_tune == PROCESSOR_2964_Z13)
13664 s390_adjust_loops ();
13665
875862bf 13666 /* Make sure all splits have been performed; splits after
13667 machine_dependent_reorg might confuse insn length counts. */
13668 split_all_insns_noflow ();
f588eb9f 13669
875862bf 13670 /* Install the main literal pool and the associated base
13671 register load insns.
f588eb9f 13672
875862bf 13673 In addition, there are two problematic situations we need
13674 to correct:
7346ca58 13675
875862bf 13676 - the literal pool might be > 4096 bytes in size, so that
13677 some of its elements cannot be directly accessed
7346ca58 13678
875862bf 13679 - a branch target might be > 64K away from the branch, so that
13680 it is not possible to use a PC-relative instruction.
7346ca58 13681
875862bf 13682 To fix those, we split the single literal pool into multiple
13683 pool chunks, reloading the pool base register at various
13684 points throughout the function to ensure it always points to
13685 the pool chunk the following code expects, and / or replace
13686 PC-relative branches by absolute branches.
7346ca58 13687
875862bf 13688 However, the two problems are interdependent: splitting the
13689 literal pool can move a branch further away from its target,
13690 causing the 64K limit to overflow, and on the other hand,
13691 replacing a PC-relative branch by an absolute branch means
13692 we need to put the branch target address into the literal
13693 pool, possibly causing it to overflow.
44a61e21 13694
875862bf 13695 So, we loop trying to fix up both problems until we manage
13696 to satisfy both conditions at the same time. Note that the
13697 loop is guaranteed to terminate as every pass of the loop
13698 strictly decreases the total number of PC-relative branches
13699 in the function. (This is not completely true as there
13700 might be branch-over-pool insns introduced by chunkify_start.
13701 Those never need to be split however.) */
44a61e21 13702
875862bf 13703 for (;;)
13704 {
13705 struct constant_pool *pool = NULL;
80aaaa56 13706
875862bf 13707 /* Collect the literal pool. */
13708 if (!pool_overflow)
13709 {
13710 pool = s390_mainpool_start ();
13711 if (!pool)
13712 pool_overflow = true;
13713 }
80aaaa56 13714
875862bf 13715 /* If literal pool overflowed, start to chunkify it. */
13716 if (pool_overflow)
13717 pool = s390_chunkify_start ();
80aaaa56 13718
875862bf 13719 /* Split out-of-range branches. If this has created new
13720 literal pool entries, cancel current chunk list and
13721 recompute it. zSeries machines have large branch
13722 instructions, so we never need to split a branch. */
13723 if (!TARGET_CPU_ZARCH && s390_split_branches ())
13724 {
13725 if (pool_overflow)
13726 s390_chunkify_cancel (pool);
13727 else
13728 s390_mainpool_cancel (pool);
80aaaa56 13729
875862bf 13730 continue;
13731 }
13732
13733 /* If we made it up to here, both conditions are satisfied.
13734 Finish up literal pool related changes. */
13735 if (pool_overflow)
13736 s390_chunkify_finish (pool);
13737 else
13738 s390_mainpool_finish (pool);
13739
13740 /* We're done splitting branches. */
13741 cfun->machine->split_branches_pending_p = false;
13742 break;
80aaaa56 13743 }
80aaaa56 13744
babfdedf 13745 /* Generate out-of-pool execute target insns. */
13746 if (TARGET_CPU_ZARCH)
13747 {
93e0956b 13748 rtx_insn *insn, *target;
13749 rtx label;
babfdedf 13750
13751 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
13752 {
13753 label = s390_execute_label (insn);
13754 if (!label)
13755 continue;
13756
13757 gcc_assert (label != const0_rtx);
13758
13759 target = emit_label (XEXP (label, 0));
13760 INSN_ADDRESSES_NEW (target, -1);
13761
13762 target = emit_insn (s390_execute_target (insn));
13763 INSN_ADDRESSES_NEW (target, -1);
13764 }
13765 }
13766
13767 /* Try to optimize prologue and epilogue further. */
875862bf 13768 s390_optimize_prologue ();
3b14a2e6 13769
33d033da 13770 /* Walk over the insns and do some >=z10 specific changes. */
117d67d0 13771 if (s390_tune >= PROCESSOR_2097_Z10)
73df8a45 13772 {
93e0956b 13773 rtx_insn *insn;
73df8a45 13774 bool insn_added_p = false;
13775
13776 /* The insn lengths and addresses have to be up to date for the
13777 following manipulations. */
13778 shorten_branches (get_insns ());
13779
13780 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
13781 {
13782 if (!INSN_P (insn) || INSN_CODE (insn) <= 0)
13783 continue;
13784
13785 if (JUMP_P (insn))
33d033da 13786 insn_added_p |= s390_fix_long_loop_prediction (insn);
73df8a45 13787
33d033da 13788 if ((GET_CODE (PATTERN (insn)) == PARALLEL
13789 || GET_CODE (PATTERN (insn)) == SET)
13790 && s390_tune == PROCESSOR_2097_Z10)
73df8a45 13791 insn_added_p |= s390_z10_optimize_cmp (insn);
13792 }
13793
13794 /* Adjust branches if we added new instructions. */
13795 if (insn_added_p)
13796 shorten_branches (get_insns ());
13797 }
f4252e72 13798
13799 s390_function_num_hotpatch_hw (current_function_decl, &hw_before, &hw_after);
13800 if (hw_after > 0)
13801 {
13802 rtx_insn *insn;
13803
06877232 13804 /* Insert NOPs for hotpatching. */
f4252e72 13805 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8ae6e291 13806 /* Emit NOPs
13807 1. inside the area covered by debug information to allow setting
13808 breakpoints at the NOPs,
13809 2. before any insn which results in an asm instruction,
13810 3. before in-function labels to avoid jumping to the NOPs, for
13811 example as part of a loop,
13812 4. before any barrier in case the function is completely empty
13813 (__builtin_unreachable ()) and has neither internal labels nor
13814 active insns.
13815 */
13816 if (active_insn_p (insn) || BARRIER_P (insn) || LABEL_P (insn))
13817 break;
13818 /* Output a series of NOPs before the first active insn. */
13819 while (insn && hw_after > 0)
f4252e72 13820 {
13821 if (hw_after >= 3 && TARGET_CPU_ZARCH)
13822 {
8ae6e291 13823 emit_insn_before (gen_nop_6_byte (), insn);
f4252e72 13824 hw_after -= 3;
13825 }
13826 else if (hw_after >= 2)
13827 {
8ae6e291 13828 emit_insn_before (gen_nop_4_byte (), insn);
f4252e72 13829 hw_after -= 2;
13830 }
13831 else
13832 {
8ae6e291 13833 emit_insn_before (gen_nop_2_byte (), insn);
f4252e72 13834 hw_after -= 1;
13835 }
13836 }
f4252e72 13837 }
875862bf 13838}
7346ca58 13839
8a2a84e3 13840/* Return true if INSN is a fp load insn writing register REGNO. */
13841static inline bool
ed3e6e5d 13842s390_fpload_toreg (rtx_insn *insn, unsigned int regno)
8a2a84e3 13843{
13844 rtx set;
13845 enum attr_type flag = s390_safe_attr_type (insn);
13846
13847 if (flag != TYPE_FLOADSF && flag != TYPE_FLOADDF)
13848 return false;
13849
13850 set = single_set (insn);
13851
13852 if (set == NULL_RTX)
13853 return false;
13854
13855 if (!REG_P (SET_DEST (set)) || !MEM_P (SET_SRC (set)))
13856 return false;
13857
13858 if (REGNO (SET_DEST (set)) != regno)
13859 return false;
13860
13861 return true;
13862}
13863
13864/* This value describes the distance to be avoided between an
13865 aritmetic fp instruction and an fp load writing the same register.
13866 Z10_EARLYLOAD_DISTANCE - 1 as well as Z10_EARLYLOAD_DISTANCE + 1 is
13867 fine but the exact value has to be avoided. Otherwise the FP
13868 pipeline will throw an exception causing a major penalty. */
13869#define Z10_EARLYLOAD_DISTANCE 7
13870
13871/* Rearrange the ready list in order to avoid the situation described
13872 for Z10_EARLYLOAD_DISTANCE. A problematic load instruction is
13873 moved to the very end of the ready list. */
13874static void
b24ef467 13875s390_z10_prevent_earlyload_conflicts (rtx_insn **ready, int *nready_p)
8a2a84e3 13876{
13877 unsigned int regno;
13878 int nready = *nready_p;
b24ef467 13879 rtx_insn *tmp;
8a2a84e3 13880 int i;
93e0956b 13881 rtx_insn *insn;
8a2a84e3 13882 rtx set;
13883 enum attr_type flag;
13884 int distance;
13885
13886 /* Skip DISTANCE - 1 active insns. */
13887 for (insn = last_scheduled_insn, distance = Z10_EARLYLOAD_DISTANCE - 1;
13888 distance > 0 && insn != NULL_RTX;
13889 distance--, insn = prev_active_insn (insn))
13890 if (CALL_P (insn) || JUMP_P (insn))
13891 return;
13892
13893 if (insn == NULL_RTX)
13894 return;
13895
13896 set = single_set (insn);
13897
13898 if (set == NULL_RTX || !REG_P (SET_DEST (set))
13899 || GET_MODE_CLASS (GET_MODE (SET_DEST (set))) != MODE_FLOAT)
13900 return;
13901
13902 flag = s390_safe_attr_type (insn);
13903
13904 if (flag == TYPE_FLOADSF || flag == TYPE_FLOADDF)
13905 return;
13906
13907 regno = REGNO (SET_DEST (set));
13908 i = nready - 1;
13909
13910 while (!s390_fpload_toreg (ready[i], regno) && i > 0)
13911 i--;
13912
13913 if (!i)
13914 return;
13915
13916 tmp = ready[i];
b24ef467 13917 memmove (&ready[1], &ready[0], sizeof (rtx_insn *) * i);
8a2a84e3 13918 ready[0] = tmp;
13919}
13920
81769881 13921
13922/* The s390_sched_state variable tracks the state of the current or
13923 the last instruction group.
13924
13925 0,1,2 number of instructions scheduled in the current group
13926 3 the last group is complete - normal insns
13927 4 the last group was a cracked/expanded insn */
13928
13929static int s390_sched_state;
13930
0cb69051 13931#define S390_SCHED_STATE_NORMAL 3
13932#define S390_SCHED_STATE_CRACKED 4
81769881 13933
0cb69051 13934#define S390_SCHED_ATTR_MASK_CRACKED 0x1
13935#define S390_SCHED_ATTR_MASK_EXPANDED 0x2
13936#define S390_SCHED_ATTR_MASK_ENDGROUP 0x4
13937#define S390_SCHED_ATTR_MASK_GROUPALONE 0x8
81769881 13938
13939static unsigned int
d3ffa7b4 13940s390_get_sched_attrmask (rtx_insn *insn)
81769881 13941{
13942 unsigned int mask = 0;
13943
0cb69051 13944 switch (s390_tune)
13945 {
13946 case PROCESSOR_2827_ZEC12:
13947 if (get_attr_zEC12_cracked (insn))
13948 mask |= S390_SCHED_ATTR_MASK_CRACKED;
13949 if (get_attr_zEC12_expanded (insn))
13950 mask |= S390_SCHED_ATTR_MASK_EXPANDED;
13951 if (get_attr_zEC12_endgroup (insn))
13952 mask |= S390_SCHED_ATTR_MASK_ENDGROUP;
13953 if (get_attr_zEC12_groupalone (insn))
13954 mask |= S390_SCHED_ATTR_MASK_GROUPALONE;
13955 break;
13956 case PROCESSOR_2964_Z13:
13957 if (get_attr_z13_cracked (insn))
13958 mask |= S390_SCHED_ATTR_MASK_CRACKED;
13959 if (get_attr_z13_expanded (insn))
13960 mask |= S390_SCHED_ATTR_MASK_EXPANDED;
13961 if (get_attr_z13_endgroup (insn))
13962 mask |= S390_SCHED_ATTR_MASK_ENDGROUP;
13963 if (get_attr_z13_groupalone (insn))
13964 mask |= S390_SCHED_ATTR_MASK_GROUPALONE;
13965 break;
13966 default:
13967 gcc_unreachable ();
13968 }
13969 return mask;
13970}
13971
13972static unsigned int
13973s390_get_unit_mask (rtx_insn *insn, int *units)
13974{
13975 unsigned int mask = 0;
13976
13977 switch (s390_tune)
13978 {
13979 case PROCESSOR_2964_Z13:
13980 *units = 3;
13981 if (get_attr_z13_unit_lsu (insn))
13982 mask |= 1 << 0;
13983 if (get_attr_z13_unit_fxu (insn))
13984 mask |= 1 << 1;
13985 if (get_attr_z13_unit_vfu (insn))
13986 mask |= 1 << 2;
13987 break;
13988 default:
13989 gcc_unreachable ();
13990 }
81769881 13991 return mask;
13992}
13993
13994/* Return the scheduling score for INSN. The higher the score the
13995 better. The score is calculated from the OOO scheduling attributes
13996 of INSN and the scheduling state s390_sched_state. */
13997static int
d3ffa7b4 13998s390_sched_score (rtx_insn *insn)
81769881 13999{
14000 unsigned int mask = s390_get_sched_attrmask (insn);
14001 int score = 0;
14002
14003 switch (s390_sched_state)
14004 {
14005 case 0:
14006 /* Try to put insns into the first slot which would otherwise
14007 break a group. */
0cb69051 14008 if ((mask & S390_SCHED_ATTR_MASK_CRACKED) != 0
14009 || (mask & S390_SCHED_ATTR_MASK_EXPANDED) != 0)
81769881 14010 score += 5;
0cb69051 14011 if ((mask & S390_SCHED_ATTR_MASK_GROUPALONE) != 0)
81769881 14012 score += 10;
0903985d 14013 /* fallthrough */
81769881 14014 case 1:
14015 /* Prefer not cracked insns while trying to put together a
14016 group. */
0cb69051 14017 if ((mask & S390_SCHED_ATTR_MASK_CRACKED) == 0
14018 && (mask & S390_SCHED_ATTR_MASK_EXPANDED) == 0
14019 && (mask & S390_SCHED_ATTR_MASK_GROUPALONE) == 0)
81769881 14020 score += 10;
0cb69051 14021 if ((mask & S390_SCHED_ATTR_MASK_ENDGROUP) == 0)
81769881 14022 score += 5;
14023 break;
14024 case 2:
14025 /* Prefer not cracked insns while trying to put together a
14026 group. */
0cb69051 14027 if ((mask & S390_SCHED_ATTR_MASK_CRACKED) == 0
14028 && (mask & S390_SCHED_ATTR_MASK_EXPANDED) == 0
14029 && (mask & S390_SCHED_ATTR_MASK_GROUPALONE) == 0)
81769881 14030 score += 10;
14031 /* Prefer endgroup insns in the last slot. */
0cb69051 14032 if ((mask & S390_SCHED_ATTR_MASK_ENDGROUP) != 0)
81769881 14033 score += 10;
14034 break;
0cb69051 14035 case S390_SCHED_STATE_NORMAL:
81769881 14036 /* Prefer not cracked insns if the last was not cracked. */
0cb69051 14037 if ((mask & S390_SCHED_ATTR_MASK_CRACKED) == 0
14038 && (mask & S390_SCHED_ATTR_MASK_EXPANDED) == 0)
81769881 14039 score += 5;
0cb69051 14040 if ((mask & S390_SCHED_ATTR_MASK_GROUPALONE) != 0)
81769881 14041 score += 10;
14042 break;
0cb69051 14043 case S390_SCHED_STATE_CRACKED:
81769881 14044 /* Try to keep cracked insns together to prevent them from
14045 interrupting groups. */
0cb69051 14046 if ((mask & S390_SCHED_ATTR_MASK_CRACKED) != 0
14047 || (mask & S390_SCHED_ATTR_MASK_EXPANDED) != 0)
81769881 14048 score += 5;
14049 break;
14050 }
0cb69051 14051
14052 if (s390_tune == PROCESSOR_2964_Z13)
14053 {
14054 int units, i;
14055 unsigned unit_mask, m = 1;
14056
14057 unit_mask = s390_get_unit_mask (insn, &units);
14058 gcc_assert (units <= MAX_SCHED_UNITS);
14059
14060 /* Add a score in range 0..MAX_SCHED_MIX_SCORE depending on how long
14061 ago the last insn of this unit type got scheduled. This is
14062 supposed to help providing a proper instruction mix to the
14063 CPU. */
14064 for (i = 0; i < units; i++, m <<= 1)
14065 if (m & unit_mask)
14066 score += (last_scheduled_unit_distance[i] * MAX_SCHED_MIX_SCORE /
14067 MAX_SCHED_MIX_DISTANCE);
14068 }
81769881 14069 return score;
14070}
14071
8a2a84e3 14072/* This function is called via hook TARGET_SCHED_REORDER before
4246a5c7 14073 issuing one insn from list READY which contains *NREADYP entries.
8a2a84e3 14074 For target z10 it reorders load instructions to avoid early load
14075 conflicts in the floating point pipeline */
14076static int
81769881 14077s390_sched_reorder (FILE *file, int verbose,
b24ef467 14078 rtx_insn **ready, int *nreadyp, int clock ATTRIBUTE_UNUSED)
8a2a84e3 14079{
117d67d0 14080 if (s390_tune == PROCESSOR_2097_Z10
14081 && reload_completed
14082 && *nreadyp > 1)
14083 s390_z10_prevent_earlyload_conflicts (ready, nreadyp);
8a2a84e3 14084
117d67d0 14085 if (s390_tune >= PROCESSOR_2827_ZEC12
81769881 14086 && reload_completed
14087 && *nreadyp > 1)
14088 {
14089 int i;
14090 int last_index = *nreadyp - 1;
14091 int max_index = -1;
14092 int max_score = -1;
b24ef467 14093 rtx_insn *tmp;
81769881 14094
14095 /* Just move the insn with the highest score to the top (the
14096 end) of the list. A full sort is not needed since a conflict
14097 in the hazard recognition cannot happen. So the top insn in
14098 the ready list will always be taken. */
14099 for (i = last_index; i >= 0; i--)
14100 {
14101 int score;
14102
14103 if (recog_memoized (ready[i]) < 0)
14104 continue;
14105
14106 score = s390_sched_score (ready[i]);
14107 if (score > max_score)
14108 {
14109 max_score = score;
14110 max_index = i;
14111 }
14112 }
14113
14114 if (max_index != -1)
14115 {
14116 if (max_index != last_index)
14117 {
14118 tmp = ready[max_index];
14119 ready[max_index] = ready[last_index];
14120 ready[last_index] = tmp;
14121
14122 if (verbose > 5)
14123 fprintf (file,
0cb69051 14124 ";;\t\tBACKEND: move insn %d to the top of list\n",
81769881 14125 INSN_UID (ready[last_index]));
14126 }
14127 else if (verbose > 5)
14128 fprintf (file,
0cb69051 14129 ";;\t\tBACKEND: best insn %d already on top\n",
81769881 14130 INSN_UID (ready[last_index]));
14131 }
14132
14133 if (verbose > 5)
14134 {
14135 fprintf (file, "ready list ooo attributes - sched state: %d\n",
14136 s390_sched_state);
14137
14138 for (i = last_index; i >= 0; i--)
14139 {
0cb69051 14140 unsigned int sched_mask;
14141 rtx_insn *insn = ready[i];
14142
14143 if (recog_memoized (insn) < 0)
81769881 14144 continue;
0cb69051 14145
14146 sched_mask = s390_get_sched_attrmask (insn);
14147 fprintf (file, ";;\t\tBACKEND: insn %d score: %d: ",
14148 INSN_UID (insn),
14149 s390_sched_score (insn));
14150#define PRINT_SCHED_ATTR(M, ATTR) fprintf (file, "%s ",\
14151 ((M) & sched_mask) ? #ATTR : "");
14152 PRINT_SCHED_ATTR (S390_SCHED_ATTR_MASK_CRACKED, cracked);
14153 PRINT_SCHED_ATTR (S390_SCHED_ATTR_MASK_EXPANDED, expanded);
14154 PRINT_SCHED_ATTR (S390_SCHED_ATTR_MASK_ENDGROUP, endgroup);
14155 PRINT_SCHED_ATTR (S390_SCHED_ATTR_MASK_GROUPALONE, groupalone);
14156#undef PRINT_SCHED_ATTR
14157 if (s390_tune == PROCESSOR_2964_Z13)
14158 {
14159 unsigned int unit_mask, m = 1;
14160 int units, j;
14161
14162 unit_mask = s390_get_unit_mask (insn, &units);
14163 fprintf (file, "(units:");
14164 for (j = 0; j < units; j++, m <<= 1)
14165 if (m & unit_mask)
14166 fprintf (file, " u%d", j);
14167 fprintf (file, ")");
14168 }
81769881 14169 fprintf (file, "\n");
14170 }
14171 }
14172 }
14173
8a2a84e3 14174 return s390_issue_rate ();
14175}
14176
81769881 14177
8a2a84e3 14178/* This function is called via hook TARGET_SCHED_VARIABLE_ISSUE after
14179 the scheduler has issued INSN. It stores the last issued insn into
14180 last_scheduled_insn in order to make it available for
14181 s390_sched_reorder. */
14182static int
18282db0 14183s390_sched_variable_issue (FILE *file, int verbose, rtx_insn *insn, int more)
8a2a84e3 14184{
14185 last_scheduled_insn = insn;
14186
117d67d0 14187 if (s390_tune >= PROCESSOR_2827_ZEC12
81769881 14188 && reload_completed
14189 && recog_memoized (insn) >= 0)
14190 {
14191 unsigned int mask = s390_get_sched_attrmask (insn);
14192
0cb69051 14193 if ((mask & S390_SCHED_ATTR_MASK_CRACKED) != 0
14194 || (mask & S390_SCHED_ATTR_MASK_EXPANDED) != 0)
14195 s390_sched_state = S390_SCHED_STATE_CRACKED;
14196 else if ((mask & S390_SCHED_ATTR_MASK_ENDGROUP) != 0
14197 || (mask & S390_SCHED_ATTR_MASK_GROUPALONE) != 0)
14198 s390_sched_state = S390_SCHED_STATE_NORMAL;
81769881 14199 else
14200 {
14201 /* Only normal insns are left (mask == 0). */
14202 switch (s390_sched_state)
14203 {
14204 case 0:
14205 case 1:
14206 case 2:
0cb69051 14207 case S390_SCHED_STATE_NORMAL:
14208 if (s390_sched_state == S390_SCHED_STATE_NORMAL)
81769881 14209 s390_sched_state = 1;
14210 else
14211 s390_sched_state++;
14212
14213 break;
0cb69051 14214 case S390_SCHED_STATE_CRACKED:
14215 s390_sched_state = S390_SCHED_STATE_NORMAL;
81769881 14216 break;
14217 }
14218 }
0cb69051 14219
14220 if (s390_tune == PROCESSOR_2964_Z13)
14221 {
14222 int units, i;
14223 unsigned unit_mask, m = 1;
14224
14225 unit_mask = s390_get_unit_mask (insn, &units);
14226 gcc_assert (units <= MAX_SCHED_UNITS);
14227
14228 for (i = 0; i < units; i++, m <<= 1)
14229 if (m & unit_mask)
14230 last_scheduled_unit_distance[i] = 0;
14231 else if (last_scheduled_unit_distance[i] < MAX_SCHED_MIX_DISTANCE)
14232 last_scheduled_unit_distance[i]++;
14233 }
14234
81769881 14235 if (verbose > 5)
14236 {
0cb69051 14237 unsigned int sched_mask;
14238
14239 sched_mask = s390_get_sched_attrmask (insn);
14240
14241 fprintf (file, ";;\t\tBACKEND: insn %d: ", INSN_UID (insn));
14242#define PRINT_SCHED_ATTR(M, ATTR) fprintf (file, "%s ", ((M) & sched_mask) ? #ATTR : "");
14243 PRINT_SCHED_ATTR (S390_SCHED_ATTR_MASK_CRACKED, cracked);
14244 PRINT_SCHED_ATTR (S390_SCHED_ATTR_MASK_EXPANDED, expanded);
14245 PRINT_SCHED_ATTR (S390_SCHED_ATTR_MASK_ENDGROUP, endgroup);
14246 PRINT_SCHED_ATTR (S390_SCHED_ATTR_MASK_GROUPALONE, groupalone);
14247#undef PRINT_SCHED_ATTR
14248
14249 if (s390_tune == PROCESSOR_2964_Z13)
14250 {
14251 unsigned int unit_mask, m = 1;
14252 int units, j;
14253
14254 unit_mask = s390_get_unit_mask (insn, &units);
14255 fprintf (file, "(units:");
14256 for (j = 0; j < units; j++, m <<= 1)
14257 if (m & unit_mask)
14258 fprintf (file, " %d", j);
14259 fprintf (file, ")");
14260 }
14261 fprintf (file, " sched state: %d\n", s390_sched_state);
14262
14263 if (s390_tune == PROCESSOR_2964_Z13)
14264 {
14265 int units, j;
14266
14267 s390_get_unit_mask (insn, &units);
14268
14269 fprintf (file, ";;\t\tBACKEND: units unused for: ");
14270 for (j = 0; j < units; j++)
14271 fprintf (file, "%d:%d ", j, last_scheduled_unit_distance[j]);
14272 fprintf (file, "\n");
14273 }
81769881 14274 }
14275 }
14276
8a2a84e3 14277 if (GET_CODE (PATTERN (insn)) != USE
14278 && GET_CODE (PATTERN (insn)) != CLOBBER)
14279 return more - 1;
14280 else
14281 return more;
14282}
7346ca58 14283
494d0169 14284static void
14285s390_sched_init (FILE *file ATTRIBUTE_UNUSED,
14286 int verbose ATTRIBUTE_UNUSED,
14287 int max_ready ATTRIBUTE_UNUSED)
14288{
93e0956b 14289 last_scheduled_insn = NULL;
0cb69051 14290 memset (last_scheduled_unit_distance, 0, MAX_SCHED_UNITS * sizeof (int));
81769881 14291 s390_sched_state = 0;
494d0169 14292}
14293
9ccaa774 14294/* This target hook implementation for TARGET_LOOP_UNROLL_ADJUST calculates
33d033da 14295 a new number struct loop *loop should be unrolled if tuned for cpus with
14296 a built-in stride prefetcher.
14297 The loop is analyzed for memory accesses by calling check_dpu for
9ccaa774 14298 each rtx of the loop. Depending on the loop_depth and the amount of
14299 memory accesses a new number <=nunroll is returned to improve the
67cf9b55 14300 behavior of the hardware prefetch unit. */
9ccaa774 14301static unsigned
14302s390_loop_unroll_adjust (unsigned nunroll, struct loop *loop)
14303{
14304 basic_block *bbs;
93e0956b 14305 rtx_insn *insn;
9ccaa774 14306 unsigned i;
14307 unsigned mem_count = 0;
14308
117d67d0 14309 if (s390_tune < PROCESSOR_2097_Z10)
9ccaa774 14310 return nunroll;
14311
14312 /* Count the number of memory references within the loop body. */
14313 bbs = get_loop_body (loop);
15e472ec 14314 subrtx_iterator::array_type array;
9ccaa774 14315 for (i = 0; i < loop->num_nodes; i++)
15e472ec 14316 FOR_BB_INSNS (bbs[i], insn)
14317 if (INSN_P (insn) && INSN_CODE (insn) != -1)
14318 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
14319 if (MEM_P (*iter))
14320 mem_count += 1;
9ccaa774 14321 free (bbs);
14322
14323 /* Prevent division by zero, and we do not need to adjust nunroll in this case. */
14324 if (mem_count == 0)
14325 return nunroll;
14326
14327 switch (loop_depth(loop))
14328 {
14329 case 1:
14330 return MIN (nunroll, 28 / mem_count);
14331 case 2:
14332 return MIN (nunroll, 22 / mem_count);
14333 default:
14334 return MIN (nunroll, 16 / mem_count);
14335 }
14336}
14337
7a0cee35 14338/* Restore the current options. This is a hook function and also called
14339 internally. */
14340
0b8be04c 14341static void
7a0cee35 14342s390_function_specific_restore (struct gcc_options *opts,
14343 struct cl_target_option *ptr ATTRIBUTE_UNUSED)
0b8be04c 14344{
7a0cee35 14345 opts->x_s390_cost_pointer = (long)processor_table[opts->x_s390_tune].cost;
14346}
0b8be04c 14347
7a0cee35 14348static void
3bd8520f 14349s390_option_override_internal (bool main_args_p,
14350 struct gcc_options *opts,
7a0cee35 14351 const struct gcc_options *opts_set)
14352{
3bd8520f 14353 const char *prefix;
14354 const char *suffix;
14355
14356 /* Set up prefix/suffix so the error messages refer to either the command
14357 line argument, or the attribute(target). */
14358 if (main_args_p)
14359 {
14360 prefix = "-m";
14361 suffix = "";
14362 }
14363 else
14364 {
14365 prefix = "option(\"";
14366 suffix = "\")";
14367 }
14368
14369
0b8be04c 14370 /* Architecture mode defaults according to ABI. */
7a0cee35 14371 if (!(opts_set->x_target_flags & MASK_ZARCH))
0b8be04c 14372 {
14373 if (TARGET_64BIT)
7a0cee35 14374 opts->x_target_flags |= MASK_ZARCH;
0b8be04c 14375 else
7a0cee35 14376 opts->x_target_flags &= ~MASK_ZARCH;
0b8be04c 14377 }
14378
7a0cee35 14379 /* Set the march default in case it hasn't been specified on cmdline. */
14380 if (!opts_set->x_s390_arch)
3bd8520f 14381 opts->x_s390_arch = PROCESSOR_2064_Z900;
14382 else if (opts->x_s390_arch == PROCESSOR_9672_G5
14383 || opts->x_s390_arch == PROCESSOR_9672_G6)
14384 warning (OPT_Wdeprecated, "%sarch=%s%s is deprecated and will be removed "
14385 "in future releases; use at least %sarch=z900%s",
14386 prefix, opts->x_s390_arch == PROCESSOR_9672_G5 ? "g5" : "g6",
14387 suffix, prefix, suffix);
14388
7a0cee35 14389 opts->x_s390_arch_flags = processor_flags_table[(int) opts->x_s390_arch];
0b8be04c 14390
14391 /* Determine processor to tune for. */
7a0cee35 14392 if (!opts_set->x_s390_tune)
14393 opts->x_s390_tune = opts->x_s390_arch;
3bd8520f 14394 else if (opts->x_s390_tune == PROCESSOR_9672_G5
14395 || opts->x_s390_tune == PROCESSOR_9672_G6)
14396 warning (OPT_Wdeprecated, "%stune=%s%s is deprecated and will be removed "
14397 "in future releases; use at least %stune=z900%s",
14398 prefix, opts->x_s390_tune == PROCESSOR_9672_G5 ? "g5" : "g6",
14399 suffix, prefix, suffix);
14400
7a0cee35 14401 opts->x_s390_tune_flags = processor_flags_table[opts->x_s390_tune];
0b8be04c 14402
14403 /* Sanity checks. */
7a0cee35 14404 if (opts->x_s390_arch == PROCESSOR_NATIVE
14405 || opts->x_s390_tune == PROCESSOR_NATIVE)
db249f37 14406 gcc_unreachable ();
7a0cee35 14407 if (TARGET_ZARCH_P (opts->x_target_flags) && !TARGET_CPU_ZARCH_P (opts))
14408 error ("z/Architecture mode not supported on %s",
14409 processor_table[(int)opts->x_s390_arch].name);
14410 if (TARGET_64BIT && !TARGET_ZARCH_P (opts->x_target_flags))
0b8be04c 14411 error ("64-bit ABI not supported in ESA/390 mode");
14412
0b8be04c 14413 /* Enable hardware transactions if available and not explicitly
14414 disabled by user. E.g. with -m31 -march=zEC12 -mzarch */
7a0cee35 14415 if (!TARGET_OPT_HTM_P (opts_set->x_target_flags))
14416 {
14417 if (TARGET_CPU_HTM_P (opts) && TARGET_ZARCH_P (opts->x_target_flags))
14418 opts->x_target_flags |= MASK_OPT_HTM;
14419 else
14420 opts->x_target_flags &= ~MASK_OPT_HTM;
14421 }
0b8be04c 14422
7a0cee35 14423 if (TARGET_OPT_VX_P (opts_set->x_target_flags))
cc79fcc9 14424 {
7a0cee35 14425 if (TARGET_OPT_VX_P (opts->x_target_flags))
cc79fcc9 14426 {
7a0cee35 14427 if (!TARGET_CPU_VX_P (opts))
cc79fcc9 14428 error ("hardware vector support not available on %s",
7a0cee35 14429 processor_table[(int)opts->x_s390_arch].name);
14430 if (TARGET_SOFT_FLOAT_P (opts->x_target_flags))
cc79fcc9 14431 error ("hardware vector support not available with -msoft-float");
14432 }
14433 }
7a0cee35 14434 else
14435 {
14436 if (TARGET_CPU_VX_P (opts))
14437 /* Enable vector support if available and not explicitly disabled
14438 by user. E.g. with -m31 -march=z13 -mzarch */
14439 opts->x_target_flags |= MASK_OPT_VX;
14440 else
14441 opts->x_target_flags &= ~MASK_OPT_VX;
14442 }
cc79fcc9 14443
7a0cee35 14444 /* Use hardware DFP if available and not explicitly disabled by
14445 user. E.g. with -m31 -march=z10 -mzarch */
14446 if (!TARGET_HARD_DFP_P (opts_set->x_target_flags))
14447 {
14448 if (TARGET_DFP_P (opts))
14449 opts->x_target_flags |= MASK_HARD_DFP;
14450 else
14451 opts->x_target_flags &= ~MASK_HARD_DFP;
14452 }
14453
14454 if (TARGET_HARD_DFP_P (opts->x_target_flags) && !TARGET_DFP_P (opts))
0b8be04c 14455 {
7a0cee35 14456 if (TARGET_HARD_DFP_P (opts_set->x_target_flags))
0b8be04c 14457 {
7a0cee35 14458 if (!TARGET_CPU_DFP_P (opts))
0b8be04c 14459 error ("hardware decimal floating point instructions"
7a0cee35 14460 " not available on %s",
14461 processor_table[(int)opts->x_s390_arch].name);
14462 if (!TARGET_ZARCH_P (opts->x_target_flags))
0b8be04c 14463 error ("hardware decimal floating point instructions"
14464 " not available in ESA/390 mode");
14465 }
14466 else
7a0cee35 14467 opts->x_target_flags &= ~MASK_HARD_DFP;
0b8be04c 14468 }
14469
7a0cee35 14470 if (TARGET_SOFT_FLOAT_P (opts_set->x_target_flags)
14471 && TARGET_SOFT_FLOAT_P (opts->x_target_flags))
0b8be04c 14472 {
7a0cee35 14473 if (TARGET_HARD_DFP_P (opts_set->x_target_flags)
14474 && TARGET_HARD_DFP_P (opts->x_target_flags))
0b8be04c 14475 error ("-mhard-dfp can%'t be used in conjunction with -msoft-float");
14476
7a0cee35 14477 opts->x_target_flags &= ~MASK_HARD_DFP;
0b8be04c 14478 }
14479
7a0cee35 14480 if (TARGET_BACKCHAIN_P (opts->x_target_flags)
14481 && TARGET_PACKED_STACK_P (opts->x_target_flags)
14482 && TARGET_HARD_FLOAT_P (opts->x_target_flags))
0b8be04c 14483 error ("-mbackchain -mpacked-stack -mhard-float are not supported "
14484 "in combination");
14485
7a0cee35 14486 if (opts->x_s390_stack_size)
0b8be04c 14487 {
7a0cee35 14488 if (opts->x_s390_stack_guard >= opts->x_s390_stack_size)
0b8be04c 14489 error ("stack size must be greater than the stack guard value");
7a0cee35 14490 else if (opts->x_s390_stack_size > 1 << 16)
0b8be04c 14491 error ("stack size must not be greater than 64k");
14492 }
7a0cee35 14493 else if (opts->x_s390_stack_guard)
0b8be04c 14494 error ("-mstack-guard implies use of -mstack-size");
14495
14496#ifdef TARGET_DEFAULT_LONG_DOUBLE_128
7a0cee35 14497 if (!TARGET_LONG_DOUBLE_128_P (opts_set->x_target_flags))
14498 opts->x_target_flags |= MASK_LONG_DOUBLE_128;
0b8be04c 14499#endif
14500
7a0cee35 14501 if (opts->x_s390_tune >= PROCESSOR_2097_Z10)
0b8be04c 14502 {
14503 maybe_set_param_value (PARAM_MAX_UNROLLED_INSNS, 100,
7a0cee35 14504 opts->x_param_values,
14505 opts_set->x_param_values);
0b8be04c 14506 maybe_set_param_value (PARAM_MAX_UNROLL_TIMES, 32,
7a0cee35 14507 opts->x_param_values,
14508 opts_set->x_param_values);
0b8be04c 14509 maybe_set_param_value (PARAM_MAX_COMPLETELY_PEELED_INSNS, 2000,
7a0cee35 14510 opts->x_param_values,
14511 opts_set->x_param_values);
0b8be04c 14512 maybe_set_param_value (PARAM_MAX_COMPLETELY_PEEL_TIMES, 64,
7a0cee35 14513 opts->x_param_values,
14514 opts_set->x_param_values);
0b8be04c 14515 }
14516
14517 maybe_set_param_value (PARAM_MAX_PENDING_LIST_LENGTH, 256,
7a0cee35 14518 opts->x_param_values,
14519 opts_set->x_param_values);
0b8be04c 14520 /* values for loop prefetching */
14521 maybe_set_param_value (PARAM_L1_CACHE_LINE_SIZE, 256,
7a0cee35 14522 opts->x_param_values,
14523 opts_set->x_param_values);
0b8be04c 14524 maybe_set_param_value (PARAM_L1_CACHE_SIZE, 128,
7a0cee35 14525 opts->x_param_values,
14526 opts_set->x_param_values);
0b8be04c 14527 /* s390 has more than 2 levels and the size is much larger. Since
14528 we are always running virtualized assume that we only get a small
14529 part of the caches above l1. */
14530 maybe_set_param_value (PARAM_L2_CACHE_SIZE, 1500,
7a0cee35 14531 opts->x_param_values,
14532 opts_set->x_param_values);
0b8be04c 14533 maybe_set_param_value (PARAM_PREFETCH_MIN_INSN_TO_MEM_RATIO, 2,
7a0cee35 14534 opts->x_param_values,
14535 opts_set->x_param_values);
0b8be04c 14536 maybe_set_param_value (PARAM_SIMULTANEOUS_PREFETCHES, 6,
7a0cee35 14537 opts->x_param_values,
14538 opts_set->x_param_values);
14539
14540 /* Use the alternative scheduling-pressure algorithm by default. */
14541 maybe_set_param_value (PARAM_SCHED_PRESSURE_ALGORITHM, 2,
14542 opts->x_param_values,
14543 opts_set->x_param_values);
14544
14545 /* Call target specific restore function to do post-init work. At the moment,
14546 this just sets opts->x_s390_cost_pointer. */
14547 s390_function_specific_restore (opts, NULL);
14548}
14549
14550static void
14551s390_option_override (void)
14552{
14553 unsigned int i;
14554 cl_deferred_option *opt;
14555 vec<cl_deferred_option> *v =
14556 (vec<cl_deferred_option> *) s390_deferred_options;
14557
14558 if (v)
14559 FOR_EACH_VEC_ELT (*v, i, opt)
14560 {
14561 switch (opt->opt_index)
14562 {
14563 case OPT_mhotpatch_:
14564 {
14565 int val1;
14566 int val2;
14567 char s[256];
14568 char *t;
14569
14570 strncpy (s, opt->arg, 256);
14571 s[255] = 0;
14572 t = strchr (s, ',');
14573 if (t != NULL)
14574 {
14575 *t = 0;
14576 t++;
14577 val1 = integral_argument (s);
14578 val2 = integral_argument (t);
14579 }
14580 else
14581 {
14582 val1 = -1;
14583 val2 = -1;
14584 }
14585 if (val1 == -1 || val2 == -1)
14586 {
14587 /* argument is not a plain number */
14588 error ("arguments to %qs should be non-negative integers",
14589 "-mhotpatch=n,m");
14590 break;
14591 }
14592 else if (val1 > s390_hotpatch_hw_max
14593 || val2 > s390_hotpatch_hw_max)
14594 {
14595 error ("argument to %qs is too large (max. %d)",
14596 "-mhotpatch=n,m", s390_hotpatch_hw_max);
14597 break;
14598 }
14599 s390_hotpatch_hw_before_label = val1;
14600 s390_hotpatch_hw_after_label = val2;
14601 break;
14602 }
14603 default:
14604 gcc_unreachable ();
14605 }
14606 }
14607
14608 /* Set up function hooks. */
14609 init_machine_status = s390_init_machine_status;
14610
3bd8520f 14611 s390_option_override_internal (true, &global_options, &global_options_set);
7a0cee35 14612
14613 /* Save the initial options in case the user does function specific
14614 options. */
14615 target_option_default_node = build_target_option_node (&global_options);
14616 target_option_current_node = target_option_default_node;
0b8be04c 14617
14618 /* This cannot reside in s390_option_optimization_table since HAVE_prefetch
14619 requires the arch flags to be evaluated already. Since prefetching
14620 is beneficial on s390, we enable it if available. */
14621 if (flag_prefetch_loop_arrays < 0 && HAVE_prefetch && optimize >= 3)
14622 flag_prefetch_loop_arrays = 1;
14623
0b8be04c 14624 if (TARGET_TPF)
14625 {
14626 /* Don't emit DWARF3/4 unless specifically selected. The TPF
14627 debuggers do not yet support DWARF 3/4. */
14628 if (!global_options_set.x_dwarf_strict)
14629 dwarf_strict = 1;
14630 if (!global_options_set.x_dwarf_version)
14631 dwarf_version = 2;
14632 }
14633
14634 /* Register a target-specific optimization-and-lowering pass
14635 to run immediately before prologue and epilogue generation.
14636
14637 Registering the pass must be done at start up. It's
14638 convenient to do it here. */
14639 opt_pass *new_pass = new pass_s390_early_mach (g);
14640 struct register_pass_info insert_pass_s390_early_mach =
14641 {
14642 new_pass, /* pass */
14643 "pro_and_epilogue", /* reference_pass_name */
14644 1, /* ref_pass_instance_number */
14645 PASS_POS_INSERT_BEFORE /* po_op */
14646 };
14647 register_pass (&insert_pass_s390_early_mach);
14648}
14649
7a0cee35 14650#if S390_USE_TARGET_ATTRIBUTE
14651/* Inner function to process the attribute((target(...))), take an argument and
14652 set the current options from the argument. If we have a list, recursively go
14653 over the list. */
14654
14655static bool
14656s390_valid_target_attribute_inner_p (tree args,
14657 struct gcc_options *opts,
14658 struct gcc_options *new_opts_set,
14659 bool force_pragma)
14660{
14661 char *next_optstr;
14662 bool ret = true;
14663
14664#define S390_ATTRIB(S,O,A) { S, sizeof (S)-1, O, A, 0 }
14665#define S390_PRAGMA(S,O,A) { S, sizeof (S)-1, O, A, 1 }
14666 static const struct
14667 {
14668 const char *string;
14669 size_t len;
14670 int opt;
14671 int has_arg;
14672 int only_as_pragma;
14673 } attrs[] = {
14674 /* enum options */
14675 S390_ATTRIB ("arch=", OPT_march_, 1),
14676 S390_ATTRIB ("tune=", OPT_mtune_, 1),
14677 /* uinteger options */
14678 S390_ATTRIB ("stack-guard=", OPT_mstack_guard_, 1),
14679 S390_ATTRIB ("stack-size=", OPT_mstack_size_, 1),
14680 S390_ATTRIB ("branch-cost=", OPT_mbranch_cost_, 1),
14681 S390_ATTRIB ("warn-framesize=", OPT_mwarn_framesize_, 1),
14682 /* flag options */
14683 S390_ATTRIB ("backchain", OPT_mbackchain, 0),
14684 S390_ATTRIB ("hard-dfp", OPT_mhard_dfp, 0),
14685 S390_ATTRIB ("hard-float", OPT_mhard_float, 0),
14686 S390_ATTRIB ("htm", OPT_mhtm, 0),
14687 S390_ATTRIB ("vx", OPT_mvx, 0),
14688 S390_ATTRIB ("packed-stack", OPT_mpacked_stack, 0),
14689 S390_ATTRIB ("small-exec", OPT_msmall_exec, 0),
14690 S390_ATTRIB ("soft-float", OPT_msoft_float, 0),
14691 S390_ATTRIB ("mvcle", OPT_mmvcle, 0),
14692 S390_PRAGMA ("zvector", OPT_mzvector, 0),
14693 /* boolean options */
14694 S390_ATTRIB ("warn-dynamicstack", OPT_mwarn_dynamicstack, 0),
14695 };
14696#undef S390_ATTRIB
14697#undef S390_PRAGMA
14698
14699 /* If this is a list, recurse to get the options. */
14700 if (TREE_CODE (args) == TREE_LIST)
14701 {
14702 bool ret = true;
14703 int num_pragma_values;
14704 int i;
14705
14706 /* Note: attribs.c:decl_attributes prepends the values from
14707 current_target_pragma to the list of target attributes. To determine
14708 whether we're looking at a value of the attribute or the pragma we
14709 assume that the first [list_length (current_target_pragma)] values in
14710 the list are the values from the pragma. */
14711 num_pragma_values = (!force_pragma && current_target_pragma != NULL)
14712 ? list_length (current_target_pragma) : 0;
14713 for (i = 0; args; args = TREE_CHAIN (args), i++)
14714 {
14715 bool is_pragma;
14716
14717 is_pragma = (force_pragma || i < num_pragma_values);
14718 if (TREE_VALUE (args)
14719 && !s390_valid_target_attribute_inner_p (TREE_VALUE (args),
14720 opts, new_opts_set,
14721 is_pragma))
14722 {
14723 ret = false;
14724 }
14725 }
14726 return ret;
14727 }
14728
14729 else if (TREE_CODE (args) != STRING_CST)
14730 {
14731 error ("attribute %<target%> argument not a string");
14732 return false;
14733 }
14734
14735 /* Handle multiple arguments separated by commas. */
14736 next_optstr = ASTRDUP (TREE_STRING_POINTER (args));
14737
14738 while (next_optstr && *next_optstr != '\0')
14739 {
14740 char *p = next_optstr;
14741 char *orig_p = p;
14742 char *comma = strchr (next_optstr, ',');
14743 size_t len, opt_len;
14744 int opt;
14745 bool opt_set_p;
14746 char ch;
14747 unsigned i;
14748 int mask = 0;
14749 enum cl_var_type var_type;
14750 bool found;
14751
14752 if (comma)
14753 {
14754 *comma = '\0';
14755 len = comma - next_optstr;
14756 next_optstr = comma + 1;
14757 }
14758 else
14759 {
14760 len = strlen (p);
14761 next_optstr = NULL;
14762 }
14763
14764 /* Recognize no-xxx. */
14765 if (len > 3 && p[0] == 'n' && p[1] == 'o' && p[2] == '-')
14766 {
14767 opt_set_p = false;
14768 p += 3;
14769 len -= 3;
14770 }
14771 else
14772 opt_set_p = true;
14773
14774 /* Find the option. */
14775 ch = *p;
14776 found = false;
14777 for (i = 0; i < ARRAY_SIZE (attrs); i++)
14778 {
14779 opt_len = attrs[i].len;
14780 if (ch == attrs[i].string[0]
14781 && ((attrs[i].has_arg) ? len > opt_len : len == opt_len)
14782 && memcmp (p, attrs[i].string, opt_len) == 0)
14783 {
14784 opt = attrs[i].opt;
14785 if (!opt_set_p && cl_options[opt].cl_reject_negative)
14786 continue;
14787 mask = cl_options[opt].var_value;
14788 var_type = cl_options[opt].var_type;
14789 found = true;
14790 break;
14791 }
14792 }
14793
14794 /* Process the option. */
14795 if (!found)
14796 {
14797 error ("attribute(target(\"%s\")) is unknown", orig_p);
14798 return false;
14799 }
14800 else if (attrs[i].only_as_pragma && !force_pragma)
14801 {
14802 /* Value is not allowed for the target attribute. */
14803 error ("Value %qs is not supported by attribute %<target%>",
14804 attrs[i].string);
14805 return false;
14806 }
14807
14808 else if (var_type == CLVC_BIT_SET || var_type == CLVC_BIT_CLEAR)
14809 {
14810 if (var_type == CLVC_BIT_CLEAR)
14811 opt_set_p = !opt_set_p;
14812
14813 if (opt_set_p)
14814 opts->x_target_flags |= mask;
14815 else
14816 opts->x_target_flags &= ~mask;
14817 new_opts_set->x_target_flags |= mask;
14818 }
14819
14820 else if (cl_options[opt].var_type == CLVC_BOOLEAN)
14821 {
14822 int value;
14823
14824 if (cl_options[opt].cl_uinteger)
14825 {
14826 /* Unsigned integer argument. Code based on the function
14827 decode_cmdline_option () in opts-common.c. */
14828 value = integral_argument (p + opt_len);
14829 }
14830 else
14831 value = (opt_set_p) ? 1 : 0;
14832
14833 if (value != -1)
14834 {
14835 struct cl_decoded_option decoded;
14836
14837 /* Value range check; only implemented for numeric and boolean
14838 options at the moment. */
14839 generate_option (opt, NULL, value, CL_TARGET, &decoded);
14840 s390_handle_option (opts, new_opts_set, &decoded, input_location);
14841 set_option (opts, new_opts_set, opt, value,
14842 p + opt_len, DK_UNSPECIFIED, input_location,
14843 global_dc);
14844 }
14845 else
14846 {
14847 error ("attribute(target(\"%s\")) is unknown", orig_p);
14848 ret = false;
14849 }
14850 }
14851
14852 else if (cl_options[opt].var_type == CLVC_ENUM)
14853 {
14854 bool arg_ok;
14855 int value;
14856
14857 arg_ok = opt_enum_arg_to_value (opt, p + opt_len, &value, CL_TARGET);
14858 if (arg_ok)
14859 set_option (opts, new_opts_set, opt, value,
14860 p + opt_len, DK_UNSPECIFIED, input_location,
14861 global_dc);
14862 else
14863 {
14864 error ("attribute(target(\"%s\")) is unknown", orig_p);
14865 ret = false;
14866 }
14867 }
14868
14869 else
14870 gcc_unreachable ();
14871 }
14872 return ret;
14873}
14874
14875/* Return a TARGET_OPTION_NODE tree of the target options listed or NULL. */
14876
14877tree
14878s390_valid_target_attribute_tree (tree args,
14879 struct gcc_options *opts,
14880 const struct gcc_options *opts_set,
14881 bool force_pragma)
14882{
14883 tree t = NULL_TREE;
14884 struct gcc_options new_opts_set;
14885
14886 memset (&new_opts_set, 0, sizeof (new_opts_set));
14887
14888 /* Process each of the options on the chain. */
14889 if (! s390_valid_target_attribute_inner_p (args, opts, &new_opts_set,
14890 force_pragma))
14891 return error_mark_node;
14892
14893 /* If some option was set (even if it has not changed), rerun
14894 s390_option_override_internal, and then save the options away. */
14895 if (new_opts_set.x_target_flags
14896 || new_opts_set.x_s390_arch
14897 || new_opts_set.x_s390_tune
14898 || new_opts_set.x_s390_stack_guard
14899 || new_opts_set.x_s390_stack_size
14900 || new_opts_set.x_s390_branch_cost
14901 || new_opts_set.x_s390_warn_framesize
14902 || new_opts_set.x_s390_warn_dynamicstack_p)
14903 {
14904 const unsigned char *src = (const unsigned char *)opts_set;
14905 unsigned char *dest = (unsigned char *)&new_opts_set;
14906 unsigned int i;
14907
14908 /* Merge the original option flags into the new ones. */
14909 for (i = 0; i < sizeof(*opts_set); i++)
14910 dest[i] |= src[i];
14911
14912 /* Do any overrides, such as arch=xxx, or tune=xxx support. */
3bd8520f 14913 s390_option_override_internal (false, opts, &new_opts_set);
7a0cee35 14914 /* Save the current options unless we are validating options for
14915 #pragma. */
14916 t = build_target_option_node (opts);
14917 }
14918 return t;
14919}
14920
14921/* Hook to validate attribute((target("string"))). */
14922
14923static bool
14924s390_valid_target_attribute_p (tree fndecl,
14925 tree ARG_UNUSED (name),
14926 tree args,
14927 int ARG_UNUSED (flags))
14928{
14929 struct gcc_options func_options;
14930 tree new_target, new_optimize;
14931 bool ret = true;
14932
14933 /* attribute((target("default"))) does nothing, beyond
14934 affecting multi-versioning. */
14935 if (TREE_VALUE (args)
14936 && TREE_CODE (TREE_VALUE (args)) == STRING_CST
14937 && TREE_CHAIN (args) == NULL_TREE
14938 && strcmp (TREE_STRING_POINTER (TREE_VALUE (args)), "default") == 0)
14939 return true;
14940
14941 tree old_optimize = build_optimization_node (&global_options);
14942
14943 /* Get the optimization options of the current function. */
14944 tree func_optimize = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl);
14945
14946 if (!func_optimize)
14947 func_optimize = old_optimize;
14948
14949 /* Init func_options. */
14950 memset (&func_options, 0, sizeof (func_options));
14951 init_options_struct (&func_options, NULL);
14952 lang_hooks.init_options_struct (&func_options);
14953
14954 cl_optimization_restore (&func_options, TREE_OPTIMIZATION (func_optimize));
14955
14956 /* Initialize func_options to the default before its target options can
14957 be set. */
14958 cl_target_option_restore (&func_options,
14959 TREE_TARGET_OPTION (target_option_default_node));
14960
14961 new_target = s390_valid_target_attribute_tree (args, &func_options,
14962 &global_options_set,
14963 (args ==
14964 current_target_pragma));
14965 new_optimize = build_optimization_node (&func_options);
14966 if (new_target == error_mark_node)
14967 ret = false;
14968 else if (fndecl && new_target)
14969 {
14970 DECL_FUNCTION_SPECIFIC_TARGET (fndecl) = new_target;
14971 if (old_optimize != new_optimize)
14972 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl) = new_optimize;
14973 }
14974 return ret;
14975}
14976
14977/* Restore targets globals from NEW_TREE and invalidate s390_previous_fndecl
14978 cache. */
14979
14980void
14981s390_activate_target_options (tree new_tree)
14982{
14983 cl_target_option_restore (&global_options, TREE_TARGET_OPTION (new_tree));
14984 if (TREE_TARGET_GLOBALS (new_tree))
14985 restore_target_globals (TREE_TARGET_GLOBALS (new_tree));
14986 else if (new_tree == target_option_default_node)
14987 restore_target_globals (&default_target_globals);
14988 else
14989 TREE_TARGET_GLOBALS (new_tree) = save_target_globals_default_opts ();
14990 s390_previous_fndecl = NULL_TREE;
14991}
14992
14993/* Establish appropriate back-end context for processing the function
14994 FNDECL. The argument might be NULL to indicate processing at top
14995 level, outside of any function scope. */
14996static void
14997s390_set_current_function (tree fndecl)
14998{
14999 /* Only change the context if the function changes. This hook is called
15000 several times in the course of compiling a function, and we don't want to
15001 slow things down too much or call target_reinit when it isn't safe. */
15002 if (fndecl == s390_previous_fndecl)
15003 return;
15004
15005 tree old_tree;
15006 if (s390_previous_fndecl == NULL_TREE)
15007 old_tree = target_option_current_node;
15008 else if (DECL_FUNCTION_SPECIFIC_TARGET (s390_previous_fndecl))
15009 old_tree = DECL_FUNCTION_SPECIFIC_TARGET (s390_previous_fndecl);
15010 else
15011 old_tree = target_option_default_node;
15012
15013 if (fndecl == NULL_TREE)
15014 {
15015 if (old_tree != target_option_current_node)
15016 s390_activate_target_options (target_option_current_node);
15017 return;
15018 }
15019
15020 tree new_tree = DECL_FUNCTION_SPECIFIC_TARGET (fndecl);
15021 if (new_tree == NULL_TREE)
15022 new_tree = target_option_default_node;
15023
15024 if (old_tree != new_tree)
15025 s390_activate_target_options (new_tree);
15026 s390_previous_fndecl = fndecl;
15027}
15028#endif
15029
a83f0e2c 15030/* Implement TARGET_USE_BY_PIECES_INFRASTRUCTURE_P. */
15031
15032static bool
89da42b6 15033s390_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size,
a83f0e2c 15034 unsigned int align ATTRIBUTE_UNUSED,
15035 enum by_pieces_operation op ATTRIBUTE_UNUSED,
15036 bool speed_p ATTRIBUTE_UNUSED)
15037{
15038 return (size == 1 || size == 2
15039 || size == 4 || (TARGET_ZARCH && size == 8));
15040}
15041
90f58e2a 15042/* Implement TARGET_ATOMIC_ASSIGN_EXPAND_FENV hook. */
15043
15044static void
15045s390_atomic_assign_expand_fenv (tree *hold, tree *clear, tree *update)
15046{
07f32359 15047 tree sfpc = s390_builtin_decls[S390_BUILTIN_s390_sfpc];
15048 tree efpc = s390_builtin_decls[S390_BUILTIN_s390_efpc];
90f58e2a 15049 tree call_efpc = build_call_expr (efpc, 0);
9550ce87 15050 tree fenv_var = create_tmp_var_raw (unsigned_type_node);
90f58e2a 15051
15052#define FPC_EXCEPTION_MASK HOST_WIDE_INT_UC (0xf8000000)
15053#define FPC_FLAGS_MASK HOST_WIDE_INT_UC (0x00f80000)
15054#define FPC_DXC_MASK HOST_WIDE_INT_UC (0x0000ff00)
15055#define FPC_EXCEPTION_MASK_SHIFT HOST_WIDE_INT_UC (24)
15056#define FPC_FLAGS_SHIFT HOST_WIDE_INT_UC (16)
15057#define FPC_DXC_SHIFT HOST_WIDE_INT_UC (8)
15058
15059 /* Generates the equivalent of feholdexcept (&fenv_var)
15060
15061 fenv_var = __builtin_s390_efpc ();
15062 __builtin_s390_sfpc (fenv_var & mask) */
15063 tree old_fpc = build2 (MODIFY_EXPR, unsigned_type_node, fenv_var, call_efpc);
15064 tree new_fpc =
15065 build2 (BIT_AND_EXPR, unsigned_type_node, fenv_var,
15066 build_int_cst (unsigned_type_node,
15067 ~(FPC_DXC_MASK | FPC_FLAGS_MASK |
15068 FPC_EXCEPTION_MASK)));
15069 tree set_new_fpc = build_call_expr (sfpc, 1, new_fpc);
15070 *hold = build2 (COMPOUND_EXPR, void_type_node, old_fpc, set_new_fpc);
15071
15072 /* Generates the equivalent of feclearexcept (FE_ALL_EXCEPT)
15073
15074 __builtin_s390_sfpc (__builtin_s390_efpc () & mask) */
15075 new_fpc = build2 (BIT_AND_EXPR, unsigned_type_node, call_efpc,
15076 build_int_cst (unsigned_type_node,
15077 ~(FPC_DXC_MASK | FPC_FLAGS_MASK)));
15078 *clear = build_call_expr (sfpc, 1, new_fpc);
15079
15080 /* Generates the equivalent of feupdateenv (fenv_var)
15081
15082 old_fpc = __builtin_s390_efpc ();
15083 __builtin_s390_sfpc (fenv_var);
15084 __atomic_feraiseexcept ((old_fpc & FPC_FLAGS_MASK) >> FPC_FLAGS_SHIFT); */
15085
9550ce87 15086 old_fpc = create_tmp_var_raw (unsigned_type_node);
90f58e2a 15087 tree store_old_fpc = build2 (MODIFY_EXPR, void_type_node,
15088 old_fpc, call_efpc);
15089
15090 set_new_fpc = build_call_expr (sfpc, 1, fenv_var);
15091
15092 tree raise_old_except = build2 (BIT_AND_EXPR, unsigned_type_node, old_fpc,
15093 build_int_cst (unsigned_type_node,
15094 FPC_FLAGS_MASK));
15095 raise_old_except = build2 (RSHIFT_EXPR, unsigned_type_node, raise_old_except,
15096 build_int_cst (unsigned_type_node,
15097 FPC_FLAGS_SHIFT));
15098 tree atomic_feraiseexcept
15099 = builtin_decl_implicit (BUILT_IN_ATOMIC_FERAISEEXCEPT);
15100 raise_old_except = build_call_expr (atomic_feraiseexcept,
15101 1, raise_old_except);
15102
15103 *update = build2 (COMPOUND_EXPR, void_type_node,
15104 build2 (COMPOUND_EXPR, void_type_node,
15105 store_old_fpc, set_new_fpc),
15106 raise_old_except);
15107
15108#undef FPC_EXCEPTION_MASK
15109#undef FPC_FLAGS_MASK
15110#undef FPC_DXC_MASK
15111#undef FPC_EXCEPTION_MASK_SHIFT
15112#undef FPC_FLAGS_SHIFT
15113#undef FPC_DXC_SHIFT
15114}
15115
76a4c804 15116/* Return the vector mode to be used for inner mode MODE when doing
15117 vectorization. */
15118static machine_mode
15119s390_preferred_simd_mode (machine_mode mode)
15120{
15121 if (TARGET_VX)
15122 switch (mode)
15123 {
15124 case DFmode:
15125 return V2DFmode;
15126 case DImode:
15127 return V2DImode;
15128 case SImode:
15129 return V4SImode;
15130 case HImode:
15131 return V8HImode;
15132 case QImode:
15133 return V16QImode;
15134 default:;
15135 }
15136 return word_mode;
15137}
15138
15139/* Our hardware does not require vectors to be strictly aligned. */
15140static bool
15141s390_support_vector_misalignment (machine_mode mode ATTRIBUTE_UNUSED,
15142 const_tree type ATTRIBUTE_UNUSED,
15143 int misalignment ATTRIBUTE_UNUSED,
15144 bool is_packed ATTRIBUTE_UNUSED)
15145{
6bb09dc9 15146 if (TARGET_VX)
15147 return true;
15148
15149 return default_builtin_support_vector_misalignment (mode, type, misalignment,
15150 is_packed);
76a4c804 15151}
15152
15153/* The vector ABI requires vector types to be aligned on an 8 byte
15154 boundary (our stack alignment). However, we allow this to be
15155 overriden by the user, while this definitely breaks the ABI. */
15156static HOST_WIDE_INT
15157s390_vector_alignment (const_tree type)
15158{
15159 if (!TARGET_VX_ABI)
15160 return default_vector_alignment (type);
15161
15162 if (TYPE_USER_ALIGN (type))
15163 return TYPE_ALIGN (type);
15164
15165 return MIN (64, tree_to_shwi (TYPE_SIZE (type)));
15166}
15167
14d7e7e6 15168#ifdef HAVE_AS_MACHINE_MACHINEMODE
15169/* Implement TARGET_ASM_FILE_START. */
15170static void
15171s390_asm_file_start (void)
15172{
b904831d 15173 default_file_start ();
14d7e7e6 15174 s390_asm_output_machine_for_arch (asm_out_file);
15175}
15176#endif
15177
6b7cfb9c 15178/* Implement TARGET_ASM_FILE_END. */
15179static void
15180s390_asm_file_end (void)
15181{
15182#ifdef HAVE_AS_GNU_ATTRIBUTE
15183 varpool_node *vnode;
15184 cgraph_node *cnode;
15185
15186 FOR_EACH_VARIABLE (vnode)
15187 if (TREE_PUBLIC (vnode->decl))
15188 s390_check_type_for_vector_abi (TREE_TYPE (vnode->decl), false, false);
15189
15190 FOR_EACH_FUNCTION (cnode)
15191 if (TREE_PUBLIC (cnode->decl))
15192 s390_check_type_for_vector_abi (TREE_TYPE (cnode->decl), false, false);
15193
15194
15195 if (s390_vector_abi != 0)
15196 fprintf (asm_out_file, "\t.gnu_attribute 8, %d\n",
15197 s390_vector_abi);
15198#endif
15199 file_end_indicate_exec_stack ();
c6d481f7 15200
15201 if (flag_split_stack)
15202 file_end_indicate_split_stack ();
6b7cfb9c 15203}
76a4c804 15204
f0c550e7 15205/* Return true if TYPE is a vector bool type. */
15206static inline bool
15207s390_vector_bool_type_p (const_tree type)
15208{
15209 return TYPE_VECTOR_OPAQUE (type);
15210}
15211
15212/* Return the diagnostic message string if the binary operation OP is
15213 not permitted on TYPE1 and TYPE2, NULL otherwise. */
15214static const char*
15215s390_invalid_binary_op (int op ATTRIBUTE_UNUSED, const_tree type1, const_tree type2)
15216{
15217 bool bool1_p, bool2_p;
15218 bool plusminus_p;
15219 bool muldiv_p;
15220 bool compare_p;
15221 machine_mode mode1, mode2;
15222
15223 if (!TARGET_ZVECTOR)
15224 return NULL;
15225
15226 if (!VECTOR_TYPE_P (type1) || !VECTOR_TYPE_P (type2))
15227 return NULL;
15228
15229 bool1_p = s390_vector_bool_type_p (type1);
15230 bool2_p = s390_vector_bool_type_p (type2);
15231
15232 /* Mixing signed and unsigned types is forbidden for all
15233 operators. */
15234 if (!bool1_p && !bool2_p
15235 && TYPE_UNSIGNED (type1) != TYPE_UNSIGNED (type2))
15236 return N_("types differ in signess");
15237
15238 plusminus_p = (op == PLUS_EXPR || op == MINUS_EXPR);
15239 muldiv_p = (op == MULT_EXPR || op == RDIV_EXPR || op == TRUNC_DIV_EXPR
15240 || op == CEIL_DIV_EXPR || op == FLOOR_DIV_EXPR
15241 || op == ROUND_DIV_EXPR);
15242 compare_p = (op == LT_EXPR || op == LE_EXPR || op == GT_EXPR || op == GE_EXPR
15243 || op == EQ_EXPR || op == NE_EXPR);
15244
15245 if (bool1_p && bool2_p && (plusminus_p || muldiv_p))
15246 return N_("binary operator does not support two vector bool operands");
15247
15248 if (bool1_p != bool2_p && (muldiv_p || compare_p))
15249 return N_("binary operator does not support vector bool operand");
15250
15251 mode1 = TYPE_MODE (type1);
15252 mode2 = TYPE_MODE (type2);
15253
15254 if (bool1_p != bool2_p && plusminus_p
15255 && (GET_MODE_CLASS (mode1) == MODE_VECTOR_FLOAT
15256 || GET_MODE_CLASS (mode2) == MODE_VECTOR_FLOAT))
15257 return N_("binary operator does not support mixing vector "
15258 "bool with floating point vector operands");
15259
15260 return NULL;
15261}
15262
875862bf 15263/* Initialize GCC target structure. */
f588eb9f 15264
875862bf 15265#undef TARGET_ASM_ALIGNED_HI_OP
15266#define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
15267#undef TARGET_ASM_ALIGNED_DI_OP
15268#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
15269#undef TARGET_ASM_INTEGER
15270#define TARGET_ASM_INTEGER s390_assemble_integer
7346ca58 15271
875862bf 15272#undef TARGET_ASM_OPEN_PAREN
15273#define TARGET_ASM_OPEN_PAREN ""
f588eb9f 15274
875862bf 15275#undef TARGET_ASM_CLOSE_PAREN
15276#define TARGET_ASM_CLOSE_PAREN ""
7346ca58 15277
4c834714 15278#undef TARGET_OPTION_OVERRIDE
15279#define TARGET_OPTION_OVERRIDE s390_option_override
15280
8a23256f 15281#ifdef TARGET_THREAD_SSP_OFFSET
15282#undef TARGET_STACK_PROTECT_GUARD
15283#define TARGET_STACK_PROTECT_GUARD hook_tree_void_null
15284#endif
15285
875862bf 15286#undef TARGET_ENCODE_SECTION_INFO
15287#define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
7346ca58 15288
b5fdc416 15289#undef TARGET_SCALAR_MODE_SUPPORTED_P
15290#define TARGET_SCALAR_MODE_SUPPORTED_P s390_scalar_mode_supported_p
15291
875862bf 15292#ifdef HAVE_AS_TLS
15293#undef TARGET_HAVE_TLS
15294#define TARGET_HAVE_TLS true
15295#endif
15296#undef TARGET_CANNOT_FORCE_CONST_MEM
15297#define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
7346ca58 15298
875862bf 15299#undef TARGET_DELEGITIMIZE_ADDRESS
15300#define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
7346ca58 15301
41e3a0c7 15302#undef TARGET_LEGITIMIZE_ADDRESS
15303#define TARGET_LEGITIMIZE_ADDRESS s390_legitimize_address
15304
875862bf 15305#undef TARGET_RETURN_IN_MEMORY
15306#define TARGET_RETURN_IN_MEMORY s390_return_in_memory
f588eb9f 15307
5ada7a14 15308#undef TARGET_INIT_BUILTINS
15309#define TARGET_INIT_BUILTINS s390_init_builtins
15310#undef TARGET_EXPAND_BUILTIN
15311#define TARGET_EXPAND_BUILTIN s390_expand_builtin
751c914e 15312#undef TARGET_BUILTIN_DECL
15313#define TARGET_BUILTIN_DECL s390_builtin_decl
5ada7a14 15314
1a561788 15315#undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
15316#define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA s390_output_addr_const_extra
15317
875862bf 15318#undef TARGET_ASM_OUTPUT_MI_THUNK
15319#define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
15320#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
a9f1838b 15321#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
7346ca58 15322
875862bf 15323#undef TARGET_SCHED_ADJUST_PRIORITY
15324#define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
15325#undef TARGET_SCHED_ISSUE_RATE
15326#define TARGET_SCHED_ISSUE_RATE s390_issue_rate
15327#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
15328#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
f588eb9f 15329
8a2a84e3 15330#undef TARGET_SCHED_VARIABLE_ISSUE
15331#define TARGET_SCHED_VARIABLE_ISSUE s390_sched_variable_issue
15332#undef TARGET_SCHED_REORDER
15333#define TARGET_SCHED_REORDER s390_sched_reorder
494d0169 15334#undef TARGET_SCHED_INIT
15335#define TARGET_SCHED_INIT s390_sched_init
8a2a84e3 15336
875862bf 15337#undef TARGET_CANNOT_COPY_INSN_P
15338#define TARGET_CANNOT_COPY_INSN_P s390_cannot_copy_insn_p
15339#undef TARGET_RTX_COSTS
15340#define TARGET_RTX_COSTS s390_rtx_costs
15341#undef TARGET_ADDRESS_COST
15342#define TARGET_ADDRESS_COST s390_address_cost
fa7a995b 15343#undef TARGET_REGISTER_MOVE_COST
15344#define TARGET_REGISTER_MOVE_COST s390_register_move_cost
15345#undef TARGET_MEMORY_MOVE_COST
15346#define TARGET_MEMORY_MOVE_COST s390_memory_move_cost
f588eb9f 15347
875862bf 15348#undef TARGET_MACHINE_DEPENDENT_REORG
15349#define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
71597dac 15350
875862bf 15351#undef TARGET_VALID_POINTER_MODE
15352#define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
f588eb9f 15353
875862bf 15354#undef TARGET_BUILD_BUILTIN_VA_LIST
15355#define TARGET_BUILD_BUILTIN_VA_LIST s390_build_builtin_va_list
8a58ed0a 15356#undef TARGET_EXPAND_BUILTIN_VA_START
15357#define TARGET_EXPAND_BUILTIN_VA_START s390_va_start
875862bf 15358#undef TARGET_GIMPLIFY_VA_ARG_EXPR
15359#define TARGET_GIMPLIFY_VA_ARG_EXPR s390_gimplify_va_arg
b33c41a1 15360
3b2411a8 15361#undef TARGET_PROMOTE_FUNCTION_MODE
15362#define TARGET_PROMOTE_FUNCTION_MODE s390_promote_function_mode
875862bf 15363#undef TARGET_PASS_BY_REFERENCE
15364#define TARGET_PASS_BY_REFERENCE s390_pass_by_reference
b33c41a1 15365
875862bf 15366#undef TARGET_FUNCTION_OK_FOR_SIBCALL
15367#define TARGET_FUNCTION_OK_FOR_SIBCALL s390_function_ok_for_sibcall
12bc26aa 15368#undef TARGET_FUNCTION_ARG
15369#define TARGET_FUNCTION_ARG s390_function_arg
15370#undef TARGET_FUNCTION_ARG_ADVANCE
15371#define TARGET_FUNCTION_ARG_ADVANCE s390_function_arg_advance
dc3b3062 15372#undef TARGET_FUNCTION_VALUE
15373#define TARGET_FUNCTION_VALUE s390_function_value
15374#undef TARGET_LIBCALL_VALUE
15375#define TARGET_LIBCALL_VALUE s390_libcall_value
76a4c804 15376#undef TARGET_STRICT_ARGUMENT_NAMING
15377#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
b33c41a1 15378
d44f2f7c 15379#undef TARGET_KEEP_LEAF_WHEN_PROFILED
15380#define TARGET_KEEP_LEAF_WHEN_PROFILED s390_keep_leaf_when_profiled
15381
875862bf 15382#undef TARGET_FIXED_CONDITION_CODE_REGS
15383#define TARGET_FIXED_CONDITION_CODE_REGS s390_fixed_condition_code_regs
b33c41a1 15384
875862bf 15385#undef TARGET_CC_MODES_COMPATIBLE
15386#define TARGET_CC_MODES_COMPATIBLE s390_cc_modes_compatible
b33c41a1 15387
1606e68a 15388#undef TARGET_INVALID_WITHIN_DOLOOP
18282db0 15389#define TARGET_INVALID_WITHIN_DOLOOP hook_constcharptr_const_rtx_insn_null
e75dabf7 15390
40af64cc 15391#ifdef HAVE_AS_TLS
15392#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
15393#define TARGET_ASM_OUTPUT_DWARF_DTPREL s390_output_dwarf_dtprel
15394#endif
15395
76a4c804 15396#undef TARGET_DWARF_FRAME_REG_MODE
15397#define TARGET_DWARF_FRAME_REG_MODE s390_dwarf_frame_reg_mode
15398
4257b08a 15399#ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
eddcdde1 15400#undef TARGET_MANGLE_TYPE
15401#define TARGET_MANGLE_TYPE s390_mangle_type
4257b08a 15402#endif
15403
36868490 15404#undef TARGET_SCALAR_MODE_SUPPORTED_P
15405#define TARGET_SCALAR_MODE_SUPPORTED_P s390_scalar_mode_supported_p
15406
76a4c804 15407#undef TARGET_VECTOR_MODE_SUPPORTED_P
15408#define TARGET_VECTOR_MODE_SUPPORTED_P s390_vector_mode_supported_p
15409
3359ccfd 15410#undef TARGET_PREFERRED_RELOAD_CLASS
15411#define TARGET_PREFERRED_RELOAD_CLASS s390_preferred_reload_class
15412
328d5423 15413#undef TARGET_SECONDARY_RELOAD
15414#define TARGET_SECONDARY_RELOAD s390_secondary_reload
15415
0ef89dfd 15416#undef TARGET_LIBGCC_CMP_RETURN_MODE
15417#define TARGET_LIBGCC_CMP_RETURN_MODE s390_libgcc_cmp_return_mode
15418
15419#undef TARGET_LIBGCC_SHIFT_COUNT_MODE
15420#define TARGET_LIBGCC_SHIFT_COUNT_MODE s390_libgcc_shift_count_mode
15421
fd50b071 15422#undef TARGET_LEGITIMATE_ADDRESS_P
15423#define TARGET_LEGITIMATE_ADDRESS_P s390_legitimate_address_p
15424
ca316360 15425#undef TARGET_LEGITIMATE_CONSTANT_P
15426#define TARGET_LEGITIMATE_CONSTANT_P s390_legitimate_constant_p
15427
7b1bda1c 15428#undef TARGET_LRA_P
15429#define TARGET_LRA_P s390_lra_p
15430
cd90919d 15431#undef TARGET_CAN_ELIMINATE
15432#define TARGET_CAN_ELIMINATE s390_can_eliminate
15433
b2d7ede1 15434#undef TARGET_CONDITIONAL_REGISTER_USAGE
15435#define TARGET_CONDITIONAL_REGISTER_USAGE s390_conditional_register_usage
15436
9ccaa774 15437#undef TARGET_LOOP_UNROLL_ADJUST
15438#define TARGET_LOOP_UNROLL_ADJUST s390_loop_unroll_adjust
15439
4d946732 15440#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
15441#define TARGET_ASM_TRAMPOLINE_TEMPLATE s390_asm_trampoline_template
15442#undef TARGET_TRAMPOLINE_INIT
15443#define TARGET_TRAMPOLINE_INIT s390_trampoline_init
15444
b5fdc416 15445#undef TARGET_UNWIND_WORD_MODE
15446#define TARGET_UNWIND_WORD_MODE s390_unwind_word_mode
15447
d5065e6e 15448#undef TARGET_CANONICALIZE_COMPARISON
15449#define TARGET_CANONICALIZE_COMPARISON s390_canonicalize_comparison
15450
ff4ce128 15451#undef TARGET_HARD_REGNO_SCRATCH_OK
15452#define TARGET_HARD_REGNO_SCRATCH_OK s390_hard_regno_scratch_ok
15453
77bc9912 15454#undef TARGET_ATTRIBUTE_TABLE
15455#define TARGET_ATTRIBUTE_TABLE s390_attribute_table
15456
11762b83 15457#undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
15458#define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P hook_bool_const_tree_true
77bc9912 15459
7a64c761 15460#undef TARGET_SET_UP_BY_PROLOGUE
15461#define TARGET_SET_UP_BY_PROLOGUE s300_set_up_by_prologue
15462
c6d481f7 15463#undef TARGET_EXTRA_LIVE_ON_ENTRY
15464#define TARGET_EXTRA_LIVE_ON_ENTRY s390_live_on_entry
15465
a83f0e2c 15466#undef TARGET_USE_BY_PIECES_INFRASTRUCTURE_P
15467#define TARGET_USE_BY_PIECES_INFRASTRUCTURE_P \
15468 s390_use_by_pieces_infrastructure_p
15469
90f58e2a 15470#undef TARGET_ATOMIC_ASSIGN_EXPAND_FENV
15471#define TARGET_ATOMIC_ASSIGN_EXPAND_FENV s390_atomic_assign_expand_fenv
15472
76a4c804 15473#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
15474#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN s390_invalid_arg_for_unprototyped_fn
15475
15476#undef TARGET_VECTORIZE_PREFERRED_SIMD_MODE
15477#define TARGET_VECTORIZE_PREFERRED_SIMD_MODE s390_preferred_simd_mode
15478
15479#undef TARGET_VECTORIZE_SUPPORT_VECTOR_MISALIGNMENT
15480#define TARGET_VECTORIZE_SUPPORT_VECTOR_MISALIGNMENT s390_support_vector_misalignment
15481
15482#undef TARGET_VECTOR_ALIGNMENT
15483#define TARGET_VECTOR_ALIGNMENT s390_vector_alignment
15484
f0c550e7 15485#undef TARGET_INVALID_BINARY_OP
15486#define TARGET_INVALID_BINARY_OP s390_invalid_binary_op
15487
14d7e7e6 15488#ifdef HAVE_AS_MACHINE_MACHINEMODE
15489#undef TARGET_ASM_FILE_START
15490#define TARGET_ASM_FILE_START s390_asm_file_start
15491#endif
15492
6b7cfb9c 15493#undef TARGET_ASM_FILE_END
15494#define TARGET_ASM_FILE_END s390_asm_file_end
15495
7a0cee35 15496#if S390_USE_TARGET_ATTRIBUTE
15497#undef TARGET_SET_CURRENT_FUNCTION
15498#define TARGET_SET_CURRENT_FUNCTION s390_set_current_function
15499
15500#undef TARGET_OPTION_VALID_ATTRIBUTE_P
15501#define TARGET_OPTION_VALID_ATTRIBUTE_P s390_valid_target_attribute_p
15502#endif
15503
15504#undef TARGET_OPTION_RESTORE
15505#define TARGET_OPTION_RESTORE s390_function_specific_restore
15506
875862bf 15507struct gcc_target targetm = TARGET_INITIALIZER;
f588eb9f 15508
5a5e802f 15509#include "gt-s390.h"