]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/s390/s390.c
[62/77] Big machine_mode to scalar_int_mode replacement
[thirdparty/gcc.git] / gcc / config / s390 / s390.c
CommitLineData
9db1d521 1/* Subroutines used for code generation on IBM S/390 and zSeries
cbe34bb5 2 Copyright (C) 1999-2017 Free Software Foundation, Inc.
9db1d521 3 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
963fc8d0
AK
4 Ulrich Weigand (uweigand@de.ibm.com) and
5 Andreas Krebbel (Andreas.Krebbel@de.ibm.com).
9db1d521 6
58add37a 7This file is part of GCC.
9db1d521 8
58add37a
UW
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
2f83c7d6 11Software Foundation; either version 3, or (at your option) any later
58add37a 12version.
9db1d521 13
58add37a
UW
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
9db1d521
HP
18
19You should have received a copy of the GNU General Public License
2f83c7d6
NC
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
9db1d521
HP
22
23#include "config.h"
9db1d521 24#include "system.h"
4977bab6 25#include "coretypes.h"
c7131fb2 26#include "backend.h"
e11c4407 27#include "target.h"
ec47b086 28#include "target-globals.h"
e11c4407 29#include "rtl.h"
c7131fb2
AM
30#include "tree.h"
31#include "gimple.h"
e11c4407
AM
32#include "cfghooks.h"
33#include "cfgloop.h"
c7131fb2 34#include "df.h"
4d0cdd0c 35#include "memmodel.h"
e11c4407
AM
36#include "tm_p.h"
37#include "stringpool.h"
314e6352 38#include "attribs.h"
e11c4407
AM
39#include "expmed.h"
40#include "optabs.h"
41#include "regs.h"
42#include "emit-rtl.h"
43#include "recog.h"
44#include "cgraph.h"
45#include "diagnostic-core.h"
ec47b086 46#include "diagnostic.h"
40e23961 47#include "alias.h"
40e23961 48#include "fold-const.h"
d8a2d370 49#include "print-tree.h"
d8a2d370
DN
50#include "stor-layout.h"
51#include "varasm.h"
52#include "calls.h"
9db1d521
HP
53#include "conditions.h"
54#include "output.h"
55#include "insn-attr.h"
56#include "flags.h"
57#include "except.h"
36566b39
PK
58#include "dojump.h"
59#include "explow.h"
36566b39 60#include "stmt.h"
9db1d521 61#include "expr.h"
7c82a1ed 62#include "reload.h"
60393bbc
AM
63#include "cfgrtl.h"
64#include "cfganal.h"
65#include "lcm.h"
66#include "cfgbuild.h"
67#include "cfgcleanup.h"
0d3c08b6 68#include "debug.h"
f1e639b1 69#include "langhooks.h"
2fb9a547
AM
70#include "internal-fn.h"
71#include "gimple-fold.h"
72#include "tree-eh.h"
45b0be94 73#include "gimplify.h"
ff5b964d 74#include "params.h"
96e45421 75#include "opts.h"
4099494d
RS
76#include "tree-pass.h"
77#include "context.h"
9b2b7279 78#include "builtins.h"
9dc7a9da 79#include "rtl-iter.h"
085261c8 80#include "intl.h"
b0057efd 81#include "tm-constrs.h"
9db1d521 82
994c5d85 83/* This file should be included last. */
d58627a0
RS
84#include "target-def.h"
85
ec47b086
DV
86/* Remember the last target of s390_set_current_function. */
87static GTY(()) tree s390_previous_fndecl;
88
017e0eb9
MD
89/* Define the specific costs for a given cpu. */
90
f4aa3848 91struct processor_costs
017e0eb9 92{
98fd0d70 93 /* multiplication */
017e0eb9
MD
94 const int m; /* cost of an M instruction. */
95 const int mghi; /* cost of an MGHI instruction. */
96 const int mh; /* cost of an MH instruction. */
97 const int mhi; /* cost of an MHI instruction. */
2742a1ed 98 const int ml; /* cost of an ML instruction. */
017e0eb9
MD
99 const int mr; /* cost of an MR instruction. */
100 const int ms; /* cost of an MS instruction. */
101 const int msg; /* cost of an MSG instruction. */
102 const int msgf; /* cost of an MSGF instruction. */
103 const int msgfr; /* cost of an MSGFR instruction. */
104 const int msgr; /* cost of an MSGR instruction. */
105 const int msr; /* cost of an MSR instruction. */
106 const int mult_df; /* cost of multiplication in DFmode. */
f61a2c7d 107 const int mxbr;
98fd0d70 108 /* square root */
f61a2c7d 109 const int sqxbr; /* cost of square root in TFmode. */
2742a1ed
MD
110 const int sqdbr; /* cost of square root in DFmode. */
111 const int sqebr; /* cost of square root in SFmode. */
98fd0d70 112 /* multiply and add */
b75d6bab
MD
113 const int madbr; /* cost of multiply and add in DFmode. */
114 const int maebr; /* cost of multiply and add in SFmode. */
98fd0d70 115 /* division */
f61a2c7d 116 const int dxbr;
98fd0d70 117 const int ddbr;
98fd0d70 118 const int debr;
6fa5b390
MD
119 const int dlgr;
120 const int dlr;
121 const int dr;
122 const int dsgfr;
123 const int dsgr;
017e0eb9
MD
124};
125
ec47b086 126#define s390_cost ((const struct processor_costs *)(s390_cost_pointer))
017e0eb9
MD
127
128static const
f4aa3848 129struct processor_costs z900_cost =
017e0eb9
MD
130{
131 COSTS_N_INSNS (5), /* M */
132 COSTS_N_INSNS (10), /* MGHI */
133 COSTS_N_INSNS (5), /* MH */
134 COSTS_N_INSNS (4), /* MHI */
2742a1ed 135 COSTS_N_INSNS (5), /* ML */
017e0eb9
MD
136 COSTS_N_INSNS (5), /* MR */
137 COSTS_N_INSNS (4), /* MS */
138 COSTS_N_INSNS (15), /* MSG */
139 COSTS_N_INSNS (7), /* MSGF */
140 COSTS_N_INSNS (7), /* MSGFR */
141 COSTS_N_INSNS (10), /* MSGR */
142 COSTS_N_INSNS (4), /* MSR */
143 COSTS_N_INSNS (7), /* multiplication in DFmode */
f61a2c7d
AK
144 COSTS_N_INSNS (13), /* MXBR */
145 COSTS_N_INSNS (136), /* SQXBR */
2742a1ed
MD
146 COSTS_N_INSNS (44), /* SQDBR */
147 COSTS_N_INSNS (35), /* SQEBR */
b75d6bab
MD
148 COSTS_N_INSNS (18), /* MADBR */
149 COSTS_N_INSNS (13), /* MAEBR */
f61a2c7d 150 COSTS_N_INSNS (134), /* DXBR */
98fd0d70 151 COSTS_N_INSNS (30), /* DDBR */
98fd0d70 152 COSTS_N_INSNS (27), /* DEBR */
6fa5b390
MD
153 COSTS_N_INSNS (220), /* DLGR */
154 COSTS_N_INSNS (34), /* DLR */
155 COSTS_N_INSNS (34), /* DR */
156 COSTS_N_INSNS (32), /* DSGFR */
157 COSTS_N_INSNS (32), /* DSGR */
017e0eb9
MD
158};
159
160static const
f4aa3848 161struct processor_costs z990_cost =
017e0eb9
MD
162{
163 COSTS_N_INSNS (4), /* M */
164 COSTS_N_INSNS (2), /* MGHI */
165 COSTS_N_INSNS (2), /* MH */
166 COSTS_N_INSNS (2), /* MHI */
2742a1ed 167 COSTS_N_INSNS (4), /* ML */
017e0eb9
MD
168 COSTS_N_INSNS (4), /* MR */
169 COSTS_N_INSNS (5), /* MS */
170 COSTS_N_INSNS (6), /* MSG */
171 COSTS_N_INSNS (4), /* MSGF */
172 COSTS_N_INSNS (4), /* MSGFR */
173 COSTS_N_INSNS (4), /* MSGR */
174 COSTS_N_INSNS (4), /* MSR */
175 COSTS_N_INSNS (1), /* multiplication in DFmode */
f61a2c7d
AK
176 COSTS_N_INSNS (28), /* MXBR */
177 COSTS_N_INSNS (130), /* SQXBR */
2742a1ed
MD
178 COSTS_N_INSNS (66), /* SQDBR */
179 COSTS_N_INSNS (38), /* SQEBR */
b75d6bab
MD
180 COSTS_N_INSNS (1), /* MADBR */
181 COSTS_N_INSNS (1), /* MAEBR */
f61a2c7d 182 COSTS_N_INSNS (60), /* DXBR */
98fd0d70 183 COSTS_N_INSNS (40), /* DDBR */
142cd70f 184 COSTS_N_INSNS (26), /* DEBR */
6fa5b390
MD
185 COSTS_N_INSNS (176), /* DLGR */
186 COSTS_N_INSNS (31), /* DLR */
187 COSTS_N_INSNS (31), /* DR */
188 COSTS_N_INSNS (31), /* DSGFR */
189 COSTS_N_INSNS (31), /* DSGR */
017e0eb9
MD
190};
191
ec24698e 192static const
f4aa3848 193struct processor_costs z9_109_cost =
ec24698e
UW
194{
195 COSTS_N_INSNS (4), /* M */
196 COSTS_N_INSNS (2), /* MGHI */
197 COSTS_N_INSNS (2), /* MH */
198 COSTS_N_INSNS (2), /* MHI */
199 COSTS_N_INSNS (4), /* ML */
200 COSTS_N_INSNS (4), /* MR */
201 COSTS_N_INSNS (5), /* MS */
202 COSTS_N_INSNS (6), /* MSG */
203 COSTS_N_INSNS (4), /* MSGF */
204 COSTS_N_INSNS (4), /* MSGFR */
205 COSTS_N_INSNS (4), /* MSGR */
206 COSTS_N_INSNS (4), /* MSR */
207 COSTS_N_INSNS (1), /* multiplication in DFmode */
f61a2c7d
AK
208 COSTS_N_INSNS (28), /* MXBR */
209 COSTS_N_INSNS (130), /* SQXBR */
ec24698e
UW
210 COSTS_N_INSNS (66), /* SQDBR */
211 COSTS_N_INSNS (38), /* SQEBR */
212 COSTS_N_INSNS (1), /* MADBR */
213 COSTS_N_INSNS (1), /* MAEBR */
f61a2c7d 214 COSTS_N_INSNS (60), /* DXBR */
ec24698e 215 COSTS_N_INSNS (40), /* DDBR */
142cd70f 216 COSTS_N_INSNS (26), /* DEBR */
ec24698e
UW
217 COSTS_N_INSNS (30), /* DLGR */
218 COSTS_N_INSNS (23), /* DLR */
219 COSTS_N_INSNS (23), /* DR */
220 COSTS_N_INSNS (24), /* DSGFR */
221 COSTS_N_INSNS (24), /* DSGR */
222};
017e0eb9 223
93538e8e
AK
224static const
225struct processor_costs z10_cost =
226{
9381e3f1
WG
227 COSTS_N_INSNS (10), /* M */
228 COSTS_N_INSNS (10), /* MGHI */
229 COSTS_N_INSNS (10), /* MH */
230 COSTS_N_INSNS (10), /* MHI */
231 COSTS_N_INSNS (10), /* ML */
232 COSTS_N_INSNS (10), /* MR */
233 COSTS_N_INSNS (10), /* MS */
234 COSTS_N_INSNS (10), /* MSG */
235 COSTS_N_INSNS (10), /* MSGF */
236 COSTS_N_INSNS (10), /* MSGFR */
237 COSTS_N_INSNS (10), /* MSGR */
238 COSTS_N_INSNS (10), /* MSR */
2cdece44 239 COSTS_N_INSNS (1) , /* multiplication in DFmode */
9381e3f1
WG
240 COSTS_N_INSNS (50), /* MXBR */
241 COSTS_N_INSNS (120), /* SQXBR */
242 COSTS_N_INSNS (52), /* SQDBR */
93538e8e 243 COSTS_N_INSNS (38), /* SQEBR */
2cdece44
WG
244 COSTS_N_INSNS (1), /* MADBR */
245 COSTS_N_INSNS (1), /* MAEBR */
9381e3f1
WG
246 COSTS_N_INSNS (111), /* DXBR */
247 COSTS_N_INSNS (39), /* DDBR */
248 COSTS_N_INSNS (32), /* DEBR */
249 COSTS_N_INSNS (160), /* DLGR */
250 COSTS_N_INSNS (71), /* DLR */
251 COSTS_N_INSNS (71), /* DR */
252 COSTS_N_INSNS (71), /* DSGFR */
253 COSTS_N_INSNS (71), /* DSGR */
93538e8e
AK
254};
255
65b1d8ea
AK
256static const
257struct processor_costs z196_cost =
258{
259 COSTS_N_INSNS (7), /* M */
260 COSTS_N_INSNS (5), /* MGHI */
261 COSTS_N_INSNS (5), /* MH */
262 COSTS_N_INSNS (5), /* MHI */
263 COSTS_N_INSNS (7), /* ML */
264 COSTS_N_INSNS (7), /* MR */
265 COSTS_N_INSNS (6), /* MS */
266 COSTS_N_INSNS (8), /* MSG */
267 COSTS_N_INSNS (6), /* MSGF */
268 COSTS_N_INSNS (6), /* MSGFR */
269 COSTS_N_INSNS (8), /* MSGR */
270 COSTS_N_INSNS (6), /* MSR */
271 COSTS_N_INSNS (1) , /* multiplication in DFmode */
272 COSTS_N_INSNS (40), /* MXBR B+40 */
273 COSTS_N_INSNS (100), /* SQXBR B+100 */
274 COSTS_N_INSNS (42), /* SQDBR B+42 */
275 COSTS_N_INSNS (28), /* SQEBR B+28 */
276 COSTS_N_INSNS (1), /* MADBR B */
277 COSTS_N_INSNS (1), /* MAEBR B */
278 COSTS_N_INSNS (101), /* DXBR B+101 */
279 COSTS_N_INSNS (29), /* DDBR */
280 COSTS_N_INSNS (22), /* DEBR */
281 COSTS_N_INSNS (160), /* DLGR cracked */
282 COSTS_N_INSNS (160), /* DLR cracked */
283 COSTS_N_INSNS (160), /* DR expanded */
284 COSTS_N_INSNS (160), /* DSGFR cracked */
285 COSTS_N_INSNS (160), /* DSGR cracked */
286};
287
22ac2c2f
AK
288static const
289struct processor_costs zEC12_cost =
290{
291 COSTS_N_INSNS (7), /* M */
292 COSTS_N_INSNS (5), /* MGHI */
293 COSTS_N_INSNS (5), /* MH */
294 COSTS_N_INSNS (5), /* MHI */
295 COSTS_N_INSNS (7), /* ML */
296 COSTS_N_INSNS (7), /* MR */
297 COSTS_N_INSNS (6), /* MS */
298 COSTS_N_INSNS (8), /* MSG */
299 COSTS_N_INSNS (6), /* MSGF */
300 COSTS_N_INSNS (6), /* MSGFR */
301 COSTS_N_INSNS (8), /* MSGR */
302 COSTS_N_INSNS (6), /* MSR */
303 COSTS_N_INSNS (1) , /* multiplication in DFmode */
304 COSTS_N_INSNS (40), /* MXBR B+40 */
305 COSTS_N_INSNS (100), /* SQXBR B+100 */
306 COSTS_N_INSNS (42), /* SQDBR B+42 */
307 COSTS_N_INSNS (28), /* SQEBR B+28 */
308 COSTS_N_INSNS (1), /* MADBR B */
309 COSTS_N_INSNS (1), /* MAEBR B */
310 COSTS_N_INSNS (131), /* DXBR B+131 */
311 COSTS_N_INSNS (29), /* DDBR */
312 COSTS_N_INSNS (22), /* DEBR */
313 COSTS_N_INSNS (160), /* DLGR cracked */
314 COSTS_N_INSNS (160), /* DLR cracked */
315 COSTS_N_INSNS (160), /* DR expanded */
316 COSTS_N_INSNS (160), /* DSGFR cracked */
317 COSTS_N_INSNS (160), /* DSGR cracked */
318};
319
ec47b086
DV
320static struct
321{
2731a5b3 322 /* The preferred name to be used in user visible output. */
ec47b086 323 const char *const name;
2731a5b3
AK
324 /* CPU name as it should be passed to Binutils via .machine */
325 const char *const binutils_name;
ec47b086
DV
326 const enum processor_type processor;
327 const struct processor_costs *cost;
328}
329const processor_table[] =
330{
2731a5b3
AK
331 { "g5", "g5", PROCESSOR_9672_G5, &z900_cost },
332 { "g6", "g6", PROCESSOR_9672_G6, &z900_cost },
333 { "z900", "z900", PROCESSOR_2064_Z900, &z900_cost },
334 { "z990", "z990", PROCESSOR_2084_Z990, &z990_cost },
335 { "z9-109", "z9-109", PROCESSOR_2094_Z9_109, &z9_109_cost },
336 { "z9-ec", "z9-ec", PROCESSOR_2094_Z9_EC, &z9_109_cost },
337 { "z10", "z10", PROCESSOR_2097_Z10, &z10_cost },
338 { "z196", "z196", PROCESSOR_2817_Z196, &z196_cost },
339 { "zEC12", "zEC12", PROCESSOR_2827_ZEC12, &zEC12_cost },
340 { "z13", "z13", PROCESSOR_2964_Z13, &zEC12_cost },
341 { "z14", "arch12", PROCESSOR_3906_Z14, &zEC12_cost },
342 { "native", "", PROCESSOR_NATIVE, NULL }
ec47b086
DV
343};
344
9db1d521
HP
345extern int reload_completed;
346
3a892e44 347/* Kept up to date using the SCHED_VARIABLE_ISSUE hook. */
775c43d3 348static rtx_insn *last_scheduled_insn;
23902021
AK
349#define MAX_SCHED_UNITS 3
350static int last_scheduled_unit_distance[MAX_SCHED_UNITS];
351
352/* The maximum score added for an instruction whose unit hasn't been
353 in use for MAX_SCHED_MIX_DISTANCE steps. Increase this value to
354 give instruction mix scheduling more priority over instruction
355 grouping. */
356#define MAX_SCHED_MIX_SCORE 8
357
358/* The maximum distance up to which individual scores will be
359 calculated. Everything beyond this gives MAX_SCHED_MIX_SCORE.
360 Increase this with the OOO windows size of the machine. */
361#define MAX_SCHED_MIX_DISTANCE 100
3a892e44 362
994fe660
UW
363/* Structure used to hold the components of a S/390 memory
364 address. A legitimate address on S/390 is of the general
365 form
366 base + index + displacement
367 where any of the components is optional.
368
369 base and index are registers of the class ADDR_REGS,
370 displacement is an unsigned 12-bit immediate constant. */
9db1d521
HP
371
372struct s390_address
373{
374 rtx base;
375 rtx indx;
376 rtx disp;
3ed99cc9 377 bool pointer;
f01cf809 378 bool literal_pool;
9db1d521
HP
379};
380
f4aa3848 381/* The following structure is embedded in the machine
adf39f8f
AK
382 specific part of struct function. */
383
d1b38208 384struct GTY (()) s390_frame_layout
adf39f8f
AK
385{
386 /* Offset within stack frame. */
387 HOST_WIDE_INT gprs_offset;
388 HOST_WIDE_INT f0_offset;
389 HOST_WIDE_INT f4_offset;
390 HOST_WIDE_INT f8_offset;
391 HOST_WIDE_INT backchain_offset;
fb3712f6
AK
392
393 /* Number of first and last gpr where slots in the register
394 save area are reserved for. */
395 int first_save_gpr_slot;
396 int last_save_gpr_slot;
397
6455a49e
AK
398 /* Location (FP register number) where GPRs (r0-r15) should
399 be saved to.
400 0 - does not need to be saved at all
401 -1 - stack slot */
82379bdf
AK
402#define SAVE_SLOT_NONE 0
403#define SAVE_SLOT_STACK -1
6455a49e
AK
404 signed char gpr_save_slots[16];
405
29742ba4 406 /* Number of first and last gpr to be saved, restored. */
4023fb28
UW
407 int first_save_gpr;
408 int first_restore_gpr;
409 int last_save_gpr;
b767fc11 410 int last_restore_gpr;
4023fb28 411
f4aa3848
AK
412 /* Bits standing for floating point registers. Set, if the
413 respective register has to be saved. Starting with reg 16 (f0)
adf39f8f 414 at the rightmost bit.
2cf4c39e
AK
415 Bit 15 14 13 12 11 10 9 8 7 6 5 4 3 2 1 0
416 fpr 15 13 11 9 14 12 10 8 7 5 3 1 6 4 2 0
417 reg 31 30 29 28 27 26 25 24 23 22 21 20 19 18 17 16 */
adf39f8f
AK
418 unsigned int fpr_bitmap;
419
420 /* Number of floating point registers f8-f15 which must be saved. */
421 int high_fprs;
422
dc4477f5
AK
423 /* Set if return address needs to be saved.
424 This flag is set by s390_return_addr_rtx if it could not use
425 the initial value of r14 and therefore depends on r14 saved
426 to the stack. */
adf39f8f
AK
427 bool save_return_addr_p;
428
29742ba4 429 /* Size of stack frame. */
4023fb28 430 HOST_WIDE_INT frame_size;
adf39f8f
AK
431};
432
433/* Define the structure for the machine field in struct function. */
434
d1b38208 435struct GTY(()) machine_function
adf39f8f
AK
436{
437 struct s390_frame_layout frame_layout;
fd3cd001 438
585539a1
UW
439 /* Literal pool base register. */
440 rtx base_reg;
441
91086990
UW
442 /* True if we may need to perform branch splitting. */
443 bool split_branches_pending_p;
444
7bcebb25 445 bool has_landing_pad_p;
5a3fe9b6
AK
446
447 /* True if the current function may contain a tbegin clobbering
448 FPRs. */
449 bool tbegin_p;
4cb4721f
MK
450
451 /* For -fsplit-stack support: A stack local which holds a pointer to
452 the stack arguments for a function with a variable number of
453 arguments. This is set at the start of the function and is used
454 to initialize the overflow_arg_area field of the va_list
455 structure. */
456 rtx split_stack_varargs_pointer;
4023fb28
UW
457};
458
adf39f8f
AK
459/* Few accessor macros for struct cfun->machine->s390_frame_layout. */
460
461#define cfun_frame_layout (cfun->machine->frame_layout)
462#define cfun_save_high_fprs_p (!!cfun_frame_layout.high_fprs)
6455a49e
AK
463#define cfun_save_arg_fprs_p (!!(TARGET_64BIT \
464 ? cfun_frame_layout.fpr_bitmap & 0x0f \
465 : cfun_frame_layout.fpr_bitmap & 0x03))
466#define cfun_gprs_save_area_size ((cfun_frame_layout.last_save_gpr_slot - \
9602b6a1 467 cfun_frame_layout.first_save_gpr_slot + 1) * UNITS_PER_LONG)
b89b22fc 468#define cfun_set_fpr_save(REGNO) (cfun->machine->frame_layout.fpr_bitmap |= \
2cf4c39e 469 (1 << (REGNO - FPR0_REGNUM)))
b89b22fc 470#define cfun_fpr_save_p(REGNO) (!!(cfun->machine->frame_layout.fpr_bitmap & \
2cf4c39e 471 (1 << (REGNO - FPR0_REGNUM))))
6455a49e
AK
472#define cfun_gpr_save_slot(REGNO) \
473 cfun->machine->frame_layout.gpr_save_slots[REGNO]
adf39f8f 474
29a79fcf
UW
475/* Number of GPRs and FPRs used for argument passing. */
476#define GP_ARG_NUM_REG 5
477#define FP_ARG_NUM_REG (TARGET_64BIT? 4 : 2)
085261c8 478#define VEC_ARG_NUM_REG 8
29a79fcf 479
b5c67a49
AK
480/* A couple of shortcuts. */
481#define CONST_OK_FOR_J(x) \
482 CONST_OK_FOR_CONSTRAINT_P((x), 'J', "J")
483#define CONST_OK_FOR_K(x) \
484 CONST_OK_FOR_CONSTRAINT_P((x), 'K', "K")
ec24698e
UW
485#define CONST_OK_FOR_Os(x) \
486 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Os")
487#define CONST_OK_FOR_Op(x) \
488 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Op")
489#define CONST_OK_FOR_On(x) \
490 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "On")
b5c67a49 491
74aa8b4b
AK
492#define REGNO_PAIR_OK(REGNO, MODE) \
493 (HARD_REGNO_NREGS ((REGNO), (MODE)) == 1 || !((REGNO) & 1))
494
b0f86a7e 495/* That's the read ahead of the dynamic branch prediction unit in
65b1d8ea
AK
496 bytes on a z10 (or higher) CPU. */
497#define PREDICT_DISTANCE (TARGET_Z10 ? 384 : 2048)
b0f86a7e 498
3af82a61 499
45901378
AK
500/* Indicate which ABI has been used for passing vector args.
501 0 - no vector type arguments have been passed where the ABI is relevant
502 1 - the old ABI has been used
503 2 - a vector type argument has been passed either in a vector register
504 or on the stack by value */
505static int s390_vector_abi = 0;
506
507/* Set the vector ABI marker if TYPE is subject to the vector ABI
508 switch. The vector ABI affects only vector data types. There are
509 two aspects of the vector ABI relevant here:
510
511 1. vectors >= 16 bytes have an alignment of 8 bytes with the new
512 ABI and natural alignment with the old.
513
514 2. vector <= 16 bytes are passed in VRs or by value on the stack
515 with the new ABI but by reference on the stack with the old.
516
517 If ARG_P is true TYPE is used for a function argument or return
518 value. The ABI marker then is set for all vector data types. If
519 ARG_P is false only type 1 vectors are being checked. */
520
521static void
522s390_check_type_for_vector_abi (const_tree type, bool arg_p, bool in_struct_p)
523{
524 static hash_set<const_tree> visited_types_hash;
525
526 if (s390_vector_abi)
527 return;
528
529 if (type == NULL_TREE || TREE_CODE (type) == ERROR_MARK)
530 return;
531
532 if (visited_types_hash.contains (type))
533 return;
534
535 visited_types_hash.add (type);
536
537 if (VECTOR_TYPE_P (type))
538 {
539 int type_size = int_size_in_bytes (type);
540
541 /* Outside arguments only the alignment is changing and this
542 only happens for vector types >= 16 bytes. */
543 if (!arg_p && type_size < 16)
544 return;
545
546 /* In arguments vector types > 16 are passed as before (GCC
547 never enforced the bigger alignment for arguments which was
548 required by the old vector ABI). However, it might still be
549 ABI relevant due to the changed alignment if it is a struct
550 member. */
551 if (arg_p && type_size > 16 && !in_struct_p)
552 return;
553
554 s390_vector_abi = TARGET_VX_ABI ? 2 : 1;
555 }
556 else if (POINTER_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE)
557 {
558 /* ARRAY_TYPE: Since with neither of the ABIs we have more than
559 natural alignment there will never be ABI dependent padding
560 in an array type. That's why we do not set in_struct_p to
561 true here. */
562 s390_check_type_for_vector_abi (TREE_TYPE (type), arg_p, in_struct_p);
563 }
564 else if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
565 {
566 tree arg_chain;
567
568 /* Check the return type. */
569 s390_check_type_for_vector_abi (TREE_TYPE (type), true, false);
570
571 for (arg_chain = TYPE_ARG_TYPES (type);
572 arg_chain;
573 arg_chain = TREE_CHAIN (arg_chain))
574 s390_check_type_for_vector_abi (TREE_VALUE (arg_chain), true, false);
575 }
576 else if (RECORD_OR_UNION_TYPE_P (type))
577 {
578 tree field;
579
580 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
581 {
582 if (TREE_CODE (field) != FIELD_DECL)
583 continue;
584
585 s390_check_type_for_vector_abi (TREE_TYPE (field), arg_p, true);
586 }
587 }
588}
589
590
3af82a61
AK
591/* System z builtins. */
592
593#include "s390-builtins.h"
594
f4d28290 595const unsigned int bflags_builtin[S390_BUILTIN_MAX + 1] =
3af82a61
AK
596 {
597#undef B_DEF
598#undef OB_DEF
599#undef OB_DEF_VAR
f4d28290 600#define B_DEF(NAME, PATTERN, ATTRS, BFLAGS, ...) BFLAGS,
3af82a61
AK
601#define OB_DEF(...)
602#define OB_DEF_VAR(...)
603#include "s390-builtins.def"
604 0
605 };
606
f4d28290
AK
607const unsigned int opflags_builtin[S390_BUILTIN_MAX + 1] =
608 {
609#undef B_DEF
610#undef OB_DEF
611#undef OB_DEF_VAR
612#define B_DEF(NAME, PATTERN, ATTRS, BFLAGS, OPFLAGS, ...) OPFLAGS,
613#define OB_DEF(...)
614#define OB_DEF_VAR(...)
615#include "s390-builtins.def"
616 0
617 };
618
619const unsigned int bflags_overloaded_builtin[S390_OVERLOADED_BUILTIN_MAX + 1] =
620 {
621#undef B_DEF
622#undef OB_DEF
623#undef OB_DEF_VAR
624#define B_DEF(...)
625#define OB_DEF(NAME, FIRST_VAR_NAME, LAST_VAR_NAME, BFLAGS, ...) BFLAGS,
626#define OB_DEF_VAR(...)
627#include "s390-builtins.def"
628 0
629 };
630
76794c52
AK
631const unsigned int
632bflags_overloaded_builtin_var[S390_OVERLOADED_BUILTIN_VAR_MAX + 1] =
633 {
634#undef B_DEF
635#undef OB_DEF
636#undef OB_DEF_VAR
637#define B_DEF(...)
638#define OB_DEF(...)
639#define OB_DEF_VAR(NAME, PATTERN, FLAGS, OPFLAGS, FNTYPE) FLAGS,
640#include "s390-builtins.def"
641 0
642 };
643
f4d28290
AK
644const unsigned int
645opflags_overloaded_builtin_var[S390_OVERLOADED_BUILTIN_VAR_MAX + 1] =
3af82a61
AK
646 {
647#undef B_DEF
648#undef OB_DEF
649#undef OB_DEF_VAR
650#define B_DEF(...)
651#define OB_DEF(...)
76794c52 652#define OB_DEF_VAR(NAME, PATTERN, FLAGS, OPFLAGS, FNTYPE) OPFLAGS,
3af82a61
AK
653#include "s390-builtins.def"
654 0
655 };
656
657tree s390_builtin_types[BT_MAX];
658tree s390_builtin_fn_types[BT_FN_MAX];
659tree s390_builtin_decls[S390_BUILTIN_MAX +
660 S390_OVERLOADED_BUILTIN_MAX +
661 S390_OVERLOADED_BUILTIN_VAR_MAX];
662
663static enum insn_code const code_for_builtin[S390_BUILTIN_MAX + 1] = {
664#undef B_DEF
665#undef OB_DEF
666#undef OB_DEF_VAR
667#define B_DEF(NAME, PATTERN, ...) CODE_FOR_##PATTERN,
668#define OB_DEF(...)
669#define OB_DEF_VAR(...)
670
671#include "s390-builtins.def"
672 CODE_FOR_nothing
673};
674
675static void
676s390_init_builtins (void)
677{
678 /* These definitions are being used in s390-builtins.def. */
679 tree returns_twice_attr = tree_cons (get_identifier ("returns_twice"),
680 NULL, NULL);
681 tree noreturn_attr = tree_cons (get_identifier ("noreturn"), NULL, NULL);
682 tree c_uint64_type_node;
683
684 /* The uint64_type_node from tree.c is not compatible to the C99
685 uint64_t data type. What we want is c_uint64_type_node from
686 c-common.c. But since backend code is not supposed to interface
687 with the frontend we recreate it here. */
688 if (TARGET_64BIT)
689 c_uint64_type_node = long_unsigned_type_node;
690 else
691 c_uint64_type_node = long_long_unsigned_type_node;
692
693#undef DEF_TYPE
c145a510 694#define DEF_TYPE(INDEX, NODE, CONST_P) \
ec47b086 695 if (s390_builtin_types[INDEX] == NULL) \
f4d28290
AK
696 s390_builtin_types[INDEX] = (!CONST_P) ? \
697 (NODE) : build_type_variant ((NODE), 1, 0);
3af82a61
AK
698
699#undef DEF_POINTER_TYPE
c145a510 700#define DEF_POINTER_TYPE(INDEX, INDEX_BASE) \
ec47b086 701 if (s390_builtin_types[INDEX] == NULL) \
f4d28290
AK
702 s390_builtin_types[INDEX] = \
703 build_pointer_type (s390_builtin_types[INDEX_BASE]);
3af82a61
AK
704
705#undef DEF_DISTINCT_TYPE
c145a510 706#define DEF_DISTINCT_TYPE(INDEX, INDEX_BASE) \
ec47b086 707 if (s390_builtin_types[INDEX] == NULL) \
f4d28290
AK
708 s390_builtin_types[INDEX] = \
709 build_distinct_type_copy (s390_builtin_types[INDEX_BASE]);
3af82a61
AK
710
711#undef DEF_VECTOR_TYPE
c145a510 712#define DEF_VECTOR_TYPE(INDEX, INDEX_BASE, ELEMENTS) \
ec47b086 713 if (s390_builtin_types[INDEX] == NULL) \
f4d28290
AK
714 s390_builtin_types[INDEX] = \
715 build_vector_type (s390_builtin_types[INDEX_BASE], ELEMENTS);
3af82a61
AK
716
717#undef DEF_OPAQUE_VECTOR_TYPE
c145a510 718#define DEF_OPAQUE_VECTOR_TYPE(INDEX, INDEX_BASE, ELEMENTS) \
ec47b086 719 if (s390_builtin_types[INDEX] == NULL) \
f4d28290
AK
720 s390_builtin_types[INDEX] = \
721 build_opaque_vector_type (s390_builtin_types[INDEX_BASE], ELEMENTS);
3af82a61
AK
722
723#undef DEF_FN_TYPE
c145a510 724#define DEF_FN_TYPE(INDEX, args...) \
ec47b086 725 if (s390_builtin_fn_types[INDEX] == NULL) \
f4d28290 726 s390_builtin_fn_types[INDEX] = \
ec47b086 727 build_function_type_list (args, NULL_TREE);
3af82a61
AK
728#undef DEF_OV_TYPE
729#define DEF_OV_TYPE(...)
730#include "s390-builtin-types.def"
731
732#undef B_DEF
f4d28290 733#define B_DEF(NAME, PATTERN, ATTRS, BFLAGS, OPFLAGS, FNTYPE) \
ec47b086 734 if (s390_builtin_decls[S390_BUILTIN_##NAME] == NULL) \
f4d28290
AK
735 s390_builtin_decls[S390_BUILTIN_##NAME] = \
736 add_builtin_function ("__builtin_" #NAME, \
737 s390_builtin_fn_types[FNTYPE], \
738 S390_BUILTIN_##NAME, \
739 BUILT_IN_MD, \
740 NULL, \
741 ATTRS);
3af82a61 742#undef OB_DEF
f4d28290 743#define OB_DEF(NAME, FIRST_VAR_NAME, LAST_VAR_NAME, BFLAGS, FNTYPE) \
ec47b086
DV
744 if (s390_builtin_decls[S390_OVERLOADED_BUILTIN_##NAME + S390_BUILTIN_MAX] \
745 == NULL) \
f4d28290
AK
746 s390_builtin_decls[S390_OVERLOADED_BUILTIN_##NAME + S390_BUILTIN_MAX] = \
747 add_builtin_function ("__builtin_" #NAME, \
748 s390_builtin_fn_types[FNTYPE], \
749 S390_OVERLOADED_BUILTIN_##NAME + S390_BUILTIN_MAX, \
750 BUILT_IN_MD, \
751 NULL, \
752 0);
3af82a61
AK
753#undef OB_DEF_VAR
754#define OB_DEF_VAR(...)
755#include "s390-builtins.def"
756
757}
758
759/* Return true if ARG is appropriate as argument number ARGNUM of
760 builtin DECL. The operand flags from s390-builtins.def have to
761 passed as OP_FLAGS. */
762bool
763s390_const_operand_ok (tree arg, int argnum, int op_flags, tree decl)
764{
765 if (O_UIMM_P (op_flags))
766 {
767 int bitwidths[] = { 1, 2, 3, 4, 5, 8, 12, 16, 32 };
768 int bitwidth = bitwidths[op_flags - O_U1];
769
770 if (!tree_fits_uhwi_p (arg)
406fde6e 771 || tree_to_uhwi (arg) > (HOST_WIDE_INT_1U << bitwidth) - 1)
3af82a61
AK
772 {
773 error("constant argument %d for builtin %qF is out of range (0.."
774 HOST_WIDE_INT_PRINT_UNSIGNED ")",
775 argnum, decl,
406fde6e 776 (HOST_WIDE_INT_1U << bitwidth) - 1);
3af82a61
AK
777 return false;
778 }
779 }
780
781 if (O_SIMM_P (op_flags))
782 {
783 int bitwidths[] = { 2, 3, 4, 5, 8, 12, 16, 32 };
784 int bitwidth = bitwidths[op_flags - O_S2];
785
786 if (!tree_fits_shwi_p (arg)
406fde6e
DV
787 || tree_to_shwi (arg) < -(HOST_WIDE_INT_1 << (bitwidth - 1))
788 || tree_to_shwi (arg) > ((HOST_WIDE_INT_1 << (bitwidth - 1)) - 1))
3af82a61
AK
789 {
790 error("constant argument %d for builtin %qF is out of range ("
791 HOST_WIDE_INT_PRINT_DEC ".."
792 HOST_WIDE_INT_PRINT_DEC ")",
793 argnum, decl,
406fde6e
DV
794 -(HOST_WIDE_INT_1 << (bitwidth - 1)),
795 (HOST_WIDE_INT_1 << (bitwidth - 1)) - 1);
3af82a61
AK
796 return false;
797 }
798 }
799 return true;
800}
801
802/* Expand an expression EXP that calls a built-in function,
803 with result going to TARGET if that's convenient
804 (and in mode MODE if that's convenient).
805 SUBTARGET may be used as the target for computing one of EXP's operands.
806 IGNORE is nonzero if the value is to be ignored. */
807
808static rtx
809s390_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
810 machine_mode mode ATTRIBUTE_UNUSED,
811 int ignore ATTRIBUTE_UNUSED)
812{
f90eba2a 813#define MAX_ARGS 6
3af82a61
AK
814
815 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
816 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
817 enum insn_code icode;
818 rtx op[MAX_ARGS], pat;
819 int arity;
820 bool nonvoid;
821 tree arg;
822 call_expr_arg_iterator iter;
f4d28290 823 unsigned int all_op_flags = opflags_for_builtin (fcode);
3af82a61
AK
824 machine_mode last_vec_mode = VOIDmode;
825
826 if (TARGET_DEBUG_ARG)
827 {
828 fprintf (stderr,
ec47b086
DV
829 "s390_expand_builtin, code = %4d, %s, bflags = 0x%x\n",
830 (int)fcode, IDENTIFIER_POINTER (DECL_NAME (fndecl)),
831 bflags_for_builtin (fcode));
3af82a61
AK
832 }
833
ec47b086
DV
834 if (S390_USE_TARGET_ATTRIBUTE)
835 {
836 unsigned int bflags;
837
838 bflags = bflags_for_builtin (fcode);
839 if ((bflags & B_HTM) && !TARGET_HTM)
840 {
f3981e7e 841 error ("builtin %qF is not supported without -mhtm "
ec47b086
DV
842 "(default with -march=zEC12 and higher).", fndecl);
843 return const0_rtx;
844 }
6654e96f 845 if (((bflags & B_VX) || (bflags & B_VXE)) && !TARGET_VX)
ec47b086 846 {
76794c52 847 error ("builtin %qF requires -mvx "
ec47b086
DV
848 "(default with -march=z13 and higher).", fndecl);
849 return const0_rtx;
850 }
6654e96f
AK
851
852 if ((bflags & B_VXE) && !TARGET_VXE)
853 {
2731a5b3 854 error ("Builtin %qF requires z14 or higher.", fndecl);
6654e96f
AK
855 return const0_rtx;
856 }
ec47b086 857 }
3af82a61
AK
858 if (fcode >= S390_OVERLOADED_BUILTIN_VAR_OFFSET
859 && fcode < S390_ALL_BUILTIN_MAX)
860 {
861 gcc_unreachable ();
862 }
863 else if (fcode < S390_OVERLOADED_BUILTIN_OFFSET)
864 {
865 icode = code_for_builtin[fcode];
866 /* Set a flag in the machine specific cfun part in order to support
867 saving/restoring of FPRs. */
868 if (fcode == S390_BUILTIN_tbegin || fcode == S390_BUILTIN_tbegin_retry)
869 cfun->machine->tbegin_p = true;
870 }
871 else if (fcode < S390_OVERLOADED_BUILTIN_VAR_OFFSET)
872 {
f3981e7e 873 error ("unresolved overloaded builtin");
3af82a61
AK
874 return const0_rtx;
875 }
876 else
877 internal_error ("bad builtin fcode");
878
879 if (icode == 0)
880 internal_error ("bad builtin icode");
881
882 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
883
884 if (nonvoid)
885 {
886 machine_mode tmode = insn_data[icode].operand[0].mode;
887 if (!target
888 || GET_MODE (target) != tmode
889 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
890 target = gen_reg_rtx (tmode);
891
892 /* There are builtins (e.g. vec_promote) with no vector
893 arguments but an element selector. So we have to also look
894 at the vector return type when emitting the modulo
895 operation. */
896 if (VECTOR_MODE_P (insn_data[icode].operand[0].mode))
897 last_vec_mode = insn_data[icode].operand[0].mode;
898 }
899
900 arity = 0;
901 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
902 {
2d71f118 903 rtx tmp_rtx;
3af82a61
AK
904 const struct insn_operand_data *insn_op;
905 unsigned int op_flags = all_op_flags & ((1 << O_SHIFT) - 1);
906
907 all_op_flags = all_op_flags >> O_SHIFT;
908
909 if (arg == error_mark_node)
910 return NULL_RTX;
911 if (arity >= MAX_ARGS)
912 return NULL_RTX;
913
914 if (O_IMM_P (op_flags)
915 && TREE_CODE (arg) != INTEGER_CST)
916 {
917 error ("constant value required for builtin %qF argument %d",
918 fndecl, arity + 1);
919 return const0_rtx;
920 }
921
922 if (!s390_const_operand_ok (arg, arity + 1, op_flags, fndecl))
923 return const0_rtx;
924
925 insn_op = &insn_data[icode].operand[arity + nonvoid];
926 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, EXPAND_NORMAL);
927
dfbe4dfd
AK
928 /* expand_expr truncates constants to the target mode only if it
929 is "convenient". However, our checks below rely on this
930 being done. */
931 if (CONST_INT_P (op[arity])
932 && SCALAR_INT_MODE_P (insn_op->mode)
933 && GET_MODE (op[arity]) != insn_op->mode)
934 op[arity] = GEN_INT (trunc_int_for_mode (INTVAL (op[arity]),
935 insn_op->mode));
936
3af82a61
AK
937 /* Wrap the expanded RTX for pointer types into a MEM expr with
938 the proper mode. This allows us to use e.g. (match_operand
939 "memory_operand"..) in the insn patterns instead of (mem
940 (match_operand "address_operand)). This is helpful for
941 patterns not just accepting MEMs. */
942 if (POINTER_TYPE_P (TREE_TYPE (arg))
943 && insn_op->predicate != address_operand)
944 op[arity] = gen_rtx_MEM (insn_op->mode, op[arity]);
945
946 /* Expand the module operation required on element selectors. */
947 if (op_flags == O_ELEM)
948 {
949 gcc_assert (last_vec_mode != VOIDmode);
950 op[arity] = simplify_expand_binop (SImode, code_to_optab (AND),
951 op[arity],
952 GEN_INT (GET_MODE_NUNITS (last_vec_mode) - 1),
953 NULL_RTX, 1, OPTAB_DIRECT);
954 }
955
956 /* Record the vector mode used for an element selector. This assumes:
957 1. There is no builtin with two different vector modes and an element selector
958 2. The element selector comes after the vector type it is referring to.
959 This currently the true for all the builtins but FIXME we
960 should better check for that. */
961 if (VECTOR_MODE_P (insn_op->mode))
962 last_vec_mode = insn_op->mode;
963
964 if (insn_op->predicate (op[arity], insn_op->mode))
965 {
966 arity++;
967 continue;
968 }
969
970 if (MEM_P (op[arity])
971 && insn_op->predicate == memory_operand
972 && (GET_MODE (XEXP (op[arity], 0)) == Pmode
973 || GET_MODE (XEXP (op[arity], 0)) == VOIDmode))
974 {
975 op[arity] = replace_equiv_address (op[arity],
976 copy_to_mode_reg (Pmode,
977 XEXP (op[arity], 0)));
978 }
2d71f118
AK
979 /* Some of the builtins require different modes/types than the
980 pattern in order to implement a specific API. Instead of
981 adding many expanders which do the mode change we do it here.
982 E.g. s390_vec_add_u128 required to have vector unsigned char
983 arguments is mapped to addti3. */
984 else if (insn_op->mode != VOIDmode
985 && GET_MODE (op[arity]) != VOIDmode
986 && GET_MODE (op[arity]) != insn_op->mode
987 && ((tmp_rtx = simplify_gen_subreg (insn_op->mode, op[arity],
988 GET_MODE (op[arity]), 0))
989 != NULL_RTX))
990 {
991 op[arity] = tmp_rtx;
992 }
3af82a61
AK
993 else if (GET_MODE (op[arity]) == insn_op->mode
994 || GET_MODE (op[arity]) == VOIDmode
995 || (insn_op->predicate == address_operand
996 && GET_MODE (op[arity]) == Pmode))
997 {
998 /* An address_operand usually has VOIDmode in the expander
999 so we cannot use this. */
1000 machine_mode target_mode =
1001 (insn_op->predicate == address_operand
501623d4 1002 ? (machine_mode) Pmode : insn_op->mode);
3af82a61
AK
1003 op[arity] = copy_to_mode_reg (target_mode, op[arity]);
1004 }
1005
1006 if (!insn_op->predicate (op[arity], insn_op->mode))
1007 {
f3981e7e 1008 error ("invalid argument %d for builtin %qF", arity + 1, fndecl);
3af82a61
AK
1009 return const0_rtx;
1010 }
1011 arity++;
1012 }
1013
3af82a61
AK
1014 switch (arity)
1015 {
1016 case 0:
1017 pat = GEN_FCN (icode) (target);
1018 break;
1019 case 1:
1020 if (nonvoid)
1021 pat = GEN_FCN (icode) (target, op[0]);
1022 else
1023 pat = GEN_FCN (icode) (op[0]);
1024 break;
1025 case 2:
1026 if (nonvoid)
1027 pat = GEN_FCN (icode) (target, op[0], op[1]);
1028 else
1029 pat = GEN_FCN (icode) (op[0], op[1]);
1030 break;
1031 case 3:
1032 if (nonvoid)
1033 pat = GEN_FCN (icode) (target, op[0], op[1], op[2]);
1034 else
1035 pat = GEN_FCN (icode) (op[0], op[1], op[2]);
1036 break;
1037 case 4:
1038 if (nonvoid)
1039 pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3]);
1040 else
1041 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
1042 break;
1043 case 5:
1044 if (nonvoid)
1045 pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3], op[4]);
1046 else
1047 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
1048 break;
1049 case 6:
1050 if (nonvoid)
1051 pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3], op[4], op[5]);
1052 else
1053 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4], op[5]);
1054 break;
1055 default:
1056 gcc_unreachable ();
1057 }
1058 if (!pat)
1059 return NULL_RTX;
1060 emit_insn (pat);
1061
1062 if (nonvoid)
1063 return target;
1064 else
1065 return const0_rtx;
1066}
1067
1068
94091f43
DV
1069static const int s390_hotpatch_hw_max = 1000000;
1070static int s390_hotpatch_hw_before_label = 0;
1071static int s390_hotpatch_hw_after_label = 0;
d0de9e13
DV
1072
1073/* Check whether the hotpatch attribute is applied to a function and, if it has
1074 an argument, the argument is valid. */
1075
1076static tree
1077s390_handle_hotpatch_attribute (tree *node, tree name, tree args,
1078 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
1079{
94091f43
DV
1080 tree expr;
1081 tree expr2;
1082 int err;
1083
d0de9e13
DV
1084 if (TREE_CODE (*node) != FUNCTION_DECL)
1085 {
1086 warning (OPT_Wattributes, "%qE attribute only applies to functions",
1087 name);
1088 *no_add_attrs = true;
1089 }
94091f43
DV
1090 if (args != NULL && TREE_CHAIN (args) != NULL)
1091 {
1092 expr = TREE_VALUE (args);
1093 expr2 = TREE_VALUE (TREE_CHAIN (args));
1094 }
1095 if (args == NULL || TREE_CHAIN (args) == NULL)
1096 err = 1;
1097 else if (TREE_CODE (expr) != INTEGER_CST
1098 || !INTEGRAL_TYPE_P (TREE_TYPE (expr))
1099 || wi::gtu_p (expr, s390_hotpatch_hw_max))
1100 err = 1;
1101 else if (TREE_CODE (expr2) != INTEGER_CST
1102 || !INTEGRAL_TYPE_P (TREE_TYPE (expr2))
1103 || wi::gtu_p (expr2, s390_hotpatch_hw_max))
1104 err = 1;
1105 else
1106 err = 0;
1107 if (err)
d0de9e13 1108 {
94091f43
DV
1109 error ("requested %qE attribute is not a comma separated pair of"
1110 " non-negative integer constants or too large (max. %d)", name,
1111 s390_hotpatch_hw_max);
1112 *no_add_attrs = true;
d0de9e13
DV
1113 }
1114
1115 return NULL_TREE;
1116}
1117
3af82a61
AK
1118/* Expand the s390_vector_bool type attribute. */
1119
1120static tree
1121s390_handle_vectorbool_attribute (tree *node, tree name ATTRIBUTE_UNUSED,
1122 tree args ATTRIBUTE_UNUSED,
1123 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
1124{
1125 tree type = *node, result = NULL_TREE;
1126 machine_mode mode;
1127
1128 while (POINTER_TYPE_P (type)
1129 || TREE_CODE (type) == FUNCTION_TYPE
1130 || TREE_CODE (type) == METHOD_TYPE
1131 || TREE_CODE (type) == ARRAY_TYPE)
1132 type = TREE_TYPE (type);
1133
1134 mode = TYPE_MODE (type);
1135 switch (mode)
1136 {
4e10a5a7
RS
1137 case E_DImode: case E_V2DImode:
1138 result = s390_builtin_types[BT_BV2DI];
1139 break;
1140 case E_SImode: case E_V4SImode:
1141 result = s390_builtin_types[BT_BV4SI];
1142 break;
1143 case E_HImode: case E_V8HImode:
1144 result = s390_builtin_types[BT_BV8HI];
1145 break;
1146 case E_QImode: case E_V16QImode:
1147 result = s390_builtin_types[BT_BV16QI];
1148 break;
1149 default:
1150 break;
3af82a61
AK
1151 }
1152
1153 *no_add_attrs = true; /* No need to hang on to the attribute. */
1154
1155 if (result)
1156 *node = lang_hooks.types.reconstruct_complex_type (*node, result);
1157
1158 return NULL_TREE;
1159}
1160
d0de9e13 1161static const struct attribute_spec s390_attribute_table[] = {
3af82a61
AK
1162 { "hotpatch", 2, 2, true, false, false, s390_handle_hotpatch_attribute, false },
1163 { "s390_vector_bool", 0, 0, false, true, false, s390_handle_vectorbool_attribute, true },
d0de9e13
DV
1164 /* End element. */
1165 { NULL, 0, 0, false, false, false, NULL, false }
1166};
1167
5d304e47
AK
1168/* Return the alignment for LABEL. We default to the -falign-labels
1169 value except for the literal pool base label. */
1170int
82082f65 1171s390_label_align (rtx_insn *label)
5d304e47 1172{
e8a54173
DM
1173 rtx_insn *prev_insn = prev_active_insn (label);
1174 rtx set, src;
5d304e47
AK
1175
1176 if (prev_insn == NULL_RTX)
1177 goto old;
1178
e8a54173 1179 set = single_set (prev_insn);
5d304e47 1180
e8a54173 1181 if (set == NULL_RTX)
5d304e47
AK
1182 goto old;
1183
e8a54173 1184 src = SET_SRC (set);
5d304e47
AK
1185
1186 /* Don't align literal pool base labels. */
e8a54173
DM
1187 if (GET_CODE (src) == UNSPEC
1188 && XINT (src, 1) == UNSPEC_MAIN_BASE)
5d304e47
AK
1189 return 0;
1190
1191 old:
1192 return align_labels_log;
1193}
1194
935b5226
AK
1195static GTY(()) rtx got_symbol;
1196
1197/* Return the GOT table symbol. The symbol will be created when the
1198 function is invoked for the first time. */
1199
1200static rtx
1201s390_got_symbol (void)
1202{
1203 if (!got_symbol)
1204 {
1205 got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1206 SYMBOL_REF_FLAGS (got_symbol) = SYMBOL_FLAG_LOCAL;
1207 }
1208
1209 return got_symbol;
1210}
1211
095a2d76 1212static scalar_int_mode
c7ff6e7a
AK
1213s390_libgcc_cmp_return_mode (void)
1214{
1215 return TARGET_64BIT ? DImode : SImode;
1216}
1217
095a2d76 1218static scalar_int_mode
c7ff6e7a
AK
1219s390_libgcc_shift_count_mode (void)
1220{
1221 return TARGET_64BIT ? DImode : SImode;
1222}
1223
095a2d76 1224static scalar_int_mode
9602b6a1
AK
1225s390_unwind_word_mode (void)
1226{
1227 return TARGET_64BIT ? DImode : SImode;
1228}
1229
4dc19cc0
AK
1230/* Return true if the back end supports mode MODE. */
1231static bool
ef4bddc2 1232s390_scalar_mode_supported_p (machine_mode mode)
4dc19cc0 1233{
9602b6a1
AK
1234 /* In contrast to the default implementation reject TImode constants on 31bit
1235 TARGET_ZARCH for ABI compliance. */
1236 if (!TARGET_64BIT && TARGET_ZARCH && mode == TImode)
1237 return false;
1238
4dc19cc0 1239 if (DECIMAL_FLOAT_MODE_P (mode))
a9ab39d3 1240 return default_decimal_float_supported_p ();
9602b6a1
AK
1241
1242 return default_scalar_mode_supported_p (mode);
4dc19cc0
AK
1243}
1244
085261c8
AK
1245/* Return true if the back end supports vector mode MODE. */
1246static bool
1247s390_vector_mode_supported_p (machine_mode mode)
1248{
1249 machine_mode inner;
1250
1251 if (!VECTOR_MODE_P (mode)
1252 || !TARGET_VX
1253 || GET_MODE_SIZE (mode) > 16)
1254 return false;
1255
1256 inner = GET_MODE_INNER (mode);
1257
1258 switch (inner)
1259 {
4e10a5a7
RS
1260 case E_QImode:
1261 case E_HImode:
1262 case E_SImode:
1263 case E_DImode:
1264 case E_TImode:
1265 case E_SFmode:
1266 case E_DFmode:
1267 case E_TFmode:
085261c8
AK
1268 return true;
1269 default:
1270 return false;
1271 }
1272}
1273
7bcebb25
AK
1274/* Set the has_landing_pad_p flag in struct machine_function to VALUE. */
1275
1276void
1277s390_set_has_landing_pad_p (bool value)
1278{
1279 cfun->machine->has_landing_pad_p = value;
1280}
29a79fcf 1281
69950452
AS
1282/* If two condition code modes are compatible, return a condition code
1283 mode which is compatible with both. Otherwise, return
1284 VOIDmode. */
1285
ef4bddc2
RS
1286static machine_mode
1287s390_cc_modes_compatible (machine_mode m1, machine_mode m2)
69950452
AS
1288{
1289 if (m1 == m2)
1290 return m1;
1291
1292 switch (m1)
1293 {
4e10a5a7 1294 case E_CCZmode:
69950452
AS
1295 if (m2 == CCUmode || m2 == CCTmode || m2 == CCZ1mode
1296 || m2 == CCSmode || m2 == CCSRmode || m2 == CCURmode)
1297 return m2;
1298 return VOIDmode;
1299
4e10a5a7
RS
1300 case E_CCSmode:
1301 case E_CCUmode:
1302 case E_CCTmode:
1303 case E_CCSRmode:
1304 case E_CCURmode:
1305 case E_CCZ1mode:
69950452
AS
1306 if (m2 == CCZmode)
1307 return m1;
f4aa3848 1308
69950452
AS
1309 return VOIDmode;
1310
1311 default:
1312 return VOIDmode;
1313 }
1314 return VOIDmode;
1315}
1316
994fe660 1317/* Return true if SET either doesn't set the CC register, or else
c7453384 1318 the source and destination have matching CC modes and that
994fe660 1319 CC mode is at least as constrained as REQ_MODE. */
c7453384 1320
3ed99cc9 1321static bool
ef4bddc2 1322s390_match_ccmode_set (rtx set, machine_mode req_mode)
9db1d521 1323{
ef4bddc2 1324 machine_mode set_mode;
9db1d521 1325
8d933e31 1326 gcc_assert (GET_CODE (set) == SET);
9db1d521 1327
a6a2b532
AK
1328 /* These modes are supposed to be used only in CC consumer
1329 patterns. */
1330 gcc_assert (req_mode != CCVIALLmode && req_mode != CCVIANYmode
1331 && req_mode != CCVFALLmode && req_mode != CCVFANYmode);
1332
9db1d521
HP
1333 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
1334 return 1;
1335
1336 set_mode = GET_MODE (SET_DEST (set));
1337 switch (set_mode)
1338 {
4e10a5a7
RS
1339 case E_CCZ1mode:
1340 case E_CCSmode:
1341 case E_CCSRmode:
1342 case E_CCUmode:
1343 case E_CCURmode:
1344 case E_CCLmode:
1345 case E_CCL1mode:
1346 case E_CCL2mode:
1347 case E_CCL3mode:
1348 case E_CCT1mode:
1349 case E_CCT2mode:
1350 case E_CCT3mode:
1351 case E_CCVEQmode:
1352 case E_CCVIHmode:
1353 case E_CCVIHUmode:
1354 case E_CCVFHmode:
1355 case E_CCVFHEmode:
07893d4f 1356 if (req_mode != set_mode)
ba956982
UW
1357 return 0;
1358 break;
07893d4f 1359
4e10a5a7 1360 case E_CCZmode:
07893d4f 1361 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
03db9ab5
DV
1362 && req_mode != CCSRmode && req_mode != CCURmode
1363 && req_mode != CCZ1mode)
9db1d521
HP
1364 return 0;
1365 break;
0a3bdf9d 1366
4e10a5a7
RS
1367 case E_CCAPmode:
1368 case E_CCANmode:
0a3bdf9d
UW
1369 if (req_mode != CCAmode)
1370 return 0;
1371 break;
c7453384 1372
9db1d521 1373 default:
8d933e31 1374 gcc_unreachable ();
9db1d521 1375 }
c7453384 1376
9db1d521
HP
1377 return (GET_MODE (SET_SRC (set)) == set_mode);
1378}
1379
c7453384
EC
1380/* Return true if every SET in INSN that sets the CC register
1381 has source and destination with matching CC modes and that
1382 CC mode is at least as constrained as REQ_MODE.
07893d4f 1383 If REQ_MODE is VOIDmode, always return false. */
c7453384 1384
3ed99cc9 1385bool
ef4bddc2 1386s390_match_ccmode (rtx_insn *insn, machine_mode req_mode)
9db1d521
HP
1387{
1388 int i;
1389
07893d4f
UW
1390 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
1391 if (req_mode == VOIDmode)
3ed99cc9 1392 return false;
07893d4f 1393
9db1d521
HP
1394 if (GET_CODE (PATTERN (insn)) == SET)
1395 return s390_match_ccmode_set (PATTERN (insn), req_mode);
1396
1397 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1398 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1399 {
1400 rtx set = XVECEXP (PATTERN (insn), 0, i);
1401 if (GET_CODE (set) == SET)
1402 if (!s390_match_ccmode_set (set, req_mode))
3ed99cc9 1403 return false;
9db1d521
HP
1404 }
1405
3ed99cc9 1406 return true;
9db1d521
HP
1407}
1408
c7453384 1409/* If a test-under-mask instruction can be used to implement
07893d4f 1410 (compare (and ... OP1) OP2), return the CC mode required
c7453384 1411 to do that. Otherwise, return VOIDmode.
07893d4f
UW
1412 MIXED is true if the instruction can distinguish between
1413 CC1 and CC2 for mixed selected bits (TMxx), it is false
1414 if the instruction cannot (TM). */
1415
ef4bddc2 1416machine_mode
3ed99cc9 1417s390_tm_ccmode (rtx op1, rtx op2, bool mixed)
07893d4f
UW
1418{
1419 int bit0, bit1;
1420
089b05b1 1421 /* ??? Fixme: should work on CONST_WIDE_INT as well. */
07893d4f
UW
1422 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
1423 return VOIDmode;
1424
00bda920
AK
1425 /* Selected bits all zero: CC0.
1426 e.g.: int a; if ((a & (16 + 128)) == 0) */
07893d4f
UW
1427 if (INTVAL (op2) == 0)
1428 return CCTmode;
1429
f4aa3848 1430 /* Selected bits all one: CC3.
00bda920 1431 e.g.: int a; if ((a & (16 + 128)) == 16 + 128) */
07893d4f
UW
1432 if (INTVAL (op2) == INTVAL (op1))
1433 return CCT3mode;
1434
00bda920
AK
1435 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. e.g.:
1436 int a;
1437 if ((a & (16 + 128)) == 16) -> CCT1
1438 if ((a & (16 + 128)) == 128) -> CCT2 */
07893d4f
UW
1439 if (mixed)
1440 {
1441 bit1 = exact_log2 (INTVAL (op2));
1442 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
1443 if (bit0 != -1 && bit1 != -1)
1444 return bit0 > bit1 ? CCT1mode : CCT2mode;
1445 }
1446
1447 return VOIDmode;
1448}
1449
c7453384
EC
1450/* Given a comparison code OP (EQ, NE, etc.) and the operands
1451 OP0 and OP1 of a COMPARE, return the mode to be used for the
ba956982
UW
1452 comparison. */
1453
ef4bddc2 1454machine_mode
9c808aad 1455s390_select_ccmode (enum rtx_code code, rtx op0, rtx op1)
ba956982
UW
1456{
1457 switch (code)
1458 {
1459 case EQ:
1460 case NE:
26a89301
UW
1461 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
1462 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
1463 return CCAPmode;
0a3bdf9d 1464 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
b5c67a49 1465 && CONST_OK_FOR_K (INTVAL (XEXP (op0, 1))))
0a3bdf9d 1466 return CCAPmode;
3ef093a8
AK
1467 if ((GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
1468 || GET_CODE (op1) == NEG)
1469 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
ba956982
UW
1470 return CCLmode;
1471
07893d4f
UW
1472 if (GET_CODE (op0) == AND)
1473 {
1474 /* Check whether we can potentially do it via TM. */
ef4bddc2 1475 machine_mode ccmode;
07893d4f
UW
1476 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
1477 if (ccmode != VOIDmode)
1478 {
1479 /* Relax CCTmode to CCZmode to allow fall-back to AND
1480 if that turns out to be beneficial. */
1481 return ccmode == CCTmode ? CCZmode : ccmode;
1482 }
1483 }
1484
c7453384 1485 if (register_operand (op0, HImode)
07893d4f
UW
1486 && GET_CODE (op1) == CONST_INT
1487 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
1488 return CCT3mode;
c7453384 1489 if (register_operand (op0, QImode)
07893d4f
UW
1490 && GET_CODE (op1) == CONST_INT
1491 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
1492 return CCT3mode;
1493
ba956982
UW
1494 return CCZmode;
1495
1496 case LE:
1497 case LT:
1498 case GE:
1499 case GT:
00bda920
AK
1500 /* The only overflow condition of NEG and ABS happens when
1501 -INT_MAX is used as parameter, which stays negative. So
f4aa3848 1502 we have an overflow from a positive value to a negative.
00bda920 1503 Using CCAP mode the resulting cc can be used for comparisons. */
26a89301
UW
1504 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
1505 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
1506 return CCAPmode;
00bda920
AK
1507
1508 /* If constants are involved in an add instruction it is possible to use
1509 the resulting cc for comparisons with zero. Knowing the sign of the
35fd3193 1510 constant the overflow behavior gets predictable. e.g.:
f4aa3848 1511 int a, b; if ((b = a + c) > 0)
00bda920 1512 with c as a constant value: c < 0 -> CCAN and c >= 0 -> CCAP */
26a89301 1513 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
71ce1a34
AK
1514 && (CONST_OK_FOR_K (INTVAL (XEXP (op0, 1)))
1515 || (CONST_OK_FOR_CONSTRAINT_P (INTVAL (XEXP (op0, 1)), 'O', "Os")
1516 /* Avoid INT32_MIN on 32 bit. */
1517 && (!TARGET_ZARCH || INTVAL (XEXP (op0, 1)) != -0x7fffffff - 1))))
26a89301
UW
1518 {
1519 if (INTVAL (XEXP((op0), 1)) < 0)
1520 return CCANmode;
1521 else
1522 return CCAPmode;
1523 }
1524 /* Fall through. */
ba956982
UW
1525 case UNORDERED:
1526 case ORDERED:
1527 case UNEQ:
1528 case UNLE:
1529 case UNLT:
1530 case UNGE:
1531 case UNGT:
1532 case LTGT:
07893d4f
UW
1533 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
1534 && GET_CODE (op1) != CONST_INT)
1535 return CCSRmode;
ba956982
UW
1536 return CCSmode;
1537
ba956982
UW
1538 case LTU:
1539 case GEU:
3ef093a8
AK
1540 if (GET_CODE (op0) == PLUS
1541 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
07893d4f
UW
1542 return CCL1mode;
1543
1544 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
1545 && GET_CODE (op1) != CONST_INT)
1546 return CCURmode;
1547 return CCUmode;
1548
1549 case LEU:
ba956982 1550 case GTU:
3ef093a8
AK
1551 if (GET_CODE (op0) == MINUS
1552 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
07893d4f
UW
1553 return CCL2mode;
1554
1555 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
1556 && GET_CODE (op1) != CONST_INT)
1557 return CCURmode;
ba956982
UW
1558 return CCUmode;
1559
1560 default:
8d933e31 1561 gcc_unreachable ();
ba956982
UW
1562 }
1563}
1564
68f9c5e2
UW
1565/* Replace the comparison OP0 CODE OP1 by a semantically equivalent one
1566 that we can implement more efficiently. */
1567
c354951b
AK
1568static void
1569s390_canonicalize_comparison (int *code, rtx *op0, rtx *op1,
1570 bool op0_preserve_value)
68f9c5e2 1571{
c354951b
AK
1572 if (op0_preserve_value)
1573 return;
1574
68f9c5e2
UW
1575 /* Convert ZERO_EXTRACT back to AND to enable TM patterns. */
1576 if ((*code == EQ || *code == NE)
1577 && *op1 == const0_rtx
1578 && GET_CODE (*op0) == ZERO_EXTRACT
1579 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
1580 && GET_CODE (XEXP (*op0, 2)) == CONST_INT
1581 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
1582 {
1583 rtx inner = XEXP (*op0, 0);
1584 HOST_WIDE_INT modesize = GET_MODE_BITSIZE (GET_MODE (inner));
1585 HOST_WIDE_INT len = INTVAL (XEXP (*op0, 1));
1586 HOST_WIDE_INT pos = INTVAL (XEXP (*op0, 2));
1587
1588 if (len > 0 && len < modesize
1589 && pos >= 0 && pos + len <= modesize
1590 && modesize <= HOST_BITS_PER_WIDE_INT)
1591 {
1592 unsigned HOST_WIDE_INT block;
406fde6e 1593 block = (HOST_WIDE_INT_1U << len) - 1;
68f9c5e2
UW
1594 block <<= modesize - pos - len;
1595
1596 *op0 = gen_rtx_AND (GET_MODE (inner), inner,
1597 gen_int_mode (block, GET_MODE (inner)));
1598 }
1599 }
1600
1601 /* Narrow AND of memory against immediate to enable TM. */
1602 if ((*code == EQ || *code == NE)
1603 && *op1 == const0_rtx
1604 && GET_CODE (*op0) == AND
1605 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
1606 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
1607 {
1608 rtx inner = XEXP (*op0, 0);
1609 rtx mask = XEXP (*op0, 1);
1610
1611 /* Ignore paradoxical SUBREGs if all extra bits are masked out. */
1612 if (GET_CODE (inner) == SUBREG
1613 && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (inner)))
1614 && (GET_MODE_SIZE (GET_MODE (inner))
1615 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner))))
1616 && ((INTVAL (mask)
1617 & GET_MODE_MASK (GET_MODE (inner))
1618 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (inner))))
1619 == 0))
1620 inner = SUBREG_REG (inner);
1621
1622 /* Do not change volatile MEMs. */
1623 if (MEM_P (inner) && !MEM_VOLATILE_P (inner))
1624 {
1625 int part = s390_single_part (XEXP (*op0, 1),
1626 GET_MODE (inner), QImode, 0);
1627 if (part >= 0)
1628 {
1629 mask = gen_int_mode (s390_extract_part (mask, QImode, 0), QImode);
1630 inner = adjust_address_nv (inner, QImode, part);
1631 *op0 = gen_rtx_AND (QImode, inner, mask);
1632 }
1633 }
1634 }
1635
1636 /* Narrow comparisons against 0xffff to HImode if possible. */
68f9c5e2
UW
1637 if ((*code == EQ || *code == NE)
1638 && GET_CODE (*op1) == CONST_INT
1639 && INTVAL (*op1) == 0xffff
1640 && SCALAR_INT_MODE_P (GET_MODE (*op0))
f4aa3848 1641 && (nonzero_bits (*op0, GET_MODE (*op0))
406fde6e 1642 & ~HOST_WIDE_INT_UC (0xffff)) == 0)
68f9c5e2
UW
1643 {
1644 *op0 = gen_lowpart (HImode, *op0);
1645 *op1 = constm1_rtx;
1646 }
5b022de5 1647
5a3fe9b6 1648 /* Remove redundant UNSPEC_STRCMPCC_TO_INT conversions if possible. */
5b022de5 1649 if (GET_CODE (*op0) == UNSPEC
5a3fe9b6 1650 && XINT (*op0, 1) == UNSPEC_STRCMPCC_TO_INT
5b022de5
UW
1651 && XVECLEN (*op0, 0) == 1
1652 && GET_MODE (XVECEXP (*op0, 0, 0)) == CCUmode
1653 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
1654 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
1655 && *op1 == const0_rtx)
1656 {
1657 enum rtx_code new_code = UNKNOWN;
1658 switch (*code)
1659 {
1660 case EQ: new_code = EQ; break;
1661 case NE: new_code = NE; break;
02887425
UW
1662 case LT: new_code = GTU; break;
1663 case GT: new_code = LTU; break;
1664 case LE: new_code = GEU; break;
1665 case GE: new_code = LEU; break;
5b022de5
UW
1666 default: break;
1667 }
1668
1669 if (new_code != UNKNOWN)
1670 {
1671 *op0 = XVECEXP (*op0, 0, 0);
1672 *code = new_code;
1673 }
1674 }
69950452 1675
5a3fe9b6 1676 /* Remove redundant UNSPEC_CC_TO_INT conversions if possible. */
638e37c2 1677 if (GET_CODE (*op0) == UNSPEC
5a3fe9b6 1678 && XINT (*op0, 1) == UNSPEC_CC_TO_INT
638e37c2 1679 && XVECLEN (*op0, 0) == 1
638e37c2
WG
1680 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
1681 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
5a3fe9b6 1682 && CONST_INT_P (*op1))
638e37c2
WG
1683 {
1684 enum rtx_code new_code = UNKNOWN;
5a3fe9b6 1685 switch (GET_MODE (XVECEXP (*op0, 0, 0)))
638e37c2 1686 {
4e10a5a7
RS
1687 case E_CCZmode:
1688 case E_CCRAWmode:
5a3fe9b6
AK
1689 switch (*code)
1690 {
1691 case EQ: new_code = EQ; break;
1692 case NE: new_code = NE; break;
1693 default: break;
1694 }
1695 break;
1696 default: break;
638e37c2
WG
1697 }
1698
1699 if (new_code != UNKNOWN)
1700 {
5a3fe9b6
AK
1701 /* For CCRAWmode put the required cc mask into the second
1702 operand. */
2561451d
AK
1703 if (GET_MODE (XVECEXP (*op0, 0, 0)) == CCRAWmode
1704 && INTVAL (*op1) >= 0 && INTVAL (*op1) <= 3)
5a3fe9b6 1705 *op1 = gen_rtx_CONST_INT (VOIDmode, 1 << (3 - INTVAL (*op1)));
638e37c2
WG
1706 *op0 = XVECEXP (*op0, 0, 0);
1707 *code = new_code;
1708 }
1709 }
1710
69950452
AS
1711 /* Simplify cascaded EQ, NE with const0_rtx. */
1712 if ((*code == NE || *code == EQ)
1713 && (GET_CODE (*op0) == EQ || GET_CODE (*op0) == NE)
1714 && GET_MODE (*op0) == SImode
1715 && GET_MODE (XEXP (*op0, 0)) == CCZ1mode
1716 && REG_P (XEXP (*op0, 0))
1717 && XEXP (*op0, 1) == const0_rtx
1718 && *op1 == const0_rtx)
1719 {
1720 if ((*code == EQ && GET_CODE (*op0) == NE)
1721 || (*code == NE && GET_CODE (*op0) == EQ))
1722 *code = EQ;
1723 else
1724 *code = NE;
1725 *op0 = XEXP (*op0, 0);
1726 }
c5b2a111
UW
1727
1728 /* Prefer register over memory as first operand. */
1729 if (MEM_P (*op0) && REG_P (*op1))
1730 {
1731 rtx tem = *op0; *op0 = *op1; *op1 = tem;
c354951b 1732 *code = (int)swap_condition ((enum rtx_code)*code);
c5b2a111 1733 }
6e5b5de8 1734
eca98038
AK
1735 /* A comparison result is compared against zero. Replace it with
1736 the (perhaps inverted) original comparison.
1737 This probably should be done by simplify_relational_operation. */
1738 if ((*code == EQ || *code == NE)
1739 && *op1 == const0_rtx
1740 && COMPARISON_P (*op0)
1741 && CC_REG_P (XEXP (*op0, 0)))
1742 {
1743 enum rtx_code new_code;
1744
1745 if (*code == EQ)
1746 new_code = reversed_comparison_code_parts (GET_CODE (*op0),
1747 XEXP (*op0, 0),
1748 XEXP (*op1, 0), NULL);
1749 else
1750 new_code = GET_CODE (*op0);
1751
1752 if (new_code != UNKNOWN)
1753 {
1754 *code = new_code;
1755 *op1 = XEXP (*op0, 1);
1756 *op0 = XEXP (*op0, 0);
1757 }
1758 }
68f9c5e2
UW
1759}
1760
6e5b5de8 1761
6590e19a
UW
1762/* Emit a compare instruction suitable to implement the comparison
1763 OP0 CODE OP1. Return the correct condition RTL to be placed in
1764 the IF_THEN_ELSE of the conditional branch testing the result. */
1765
1766rtx
1767s390_emit_compare (enum rtx_code code, rtx op0, rtx op1)
1768{
ef4bddc2 1769 machine_mode mode = s390_select_ccmode (code, op0, op1);
4a77c72b 1770 rtx cc;
6590e19a 1771
77c585ca 1772 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4a77c72b 1773 {
6e5b5de8
AK
1774 /* Do not output a redundant compare instruction if a
1775 compare_and_swap pattern already computed the result and the
1776 machine modes are compatible. */
4a77c72b
PB
1777 gcc_assert (s390_cc_modes_compatible (GET_MODE (op0), mode)
1778 == GET_MODE (op0));
1779 cc = op0;
1780 }
e0374221
AS
1781 else
1782 {
4a77c72b 1783 cc = gen_rtx_REG (mode, CC_REGNUM);
f7df4a84 1784 emit_insn (gen_rtx_SET (cc, gen_rtx_COMPARE (mode, op0, op1)));
e0374221 1785 }
4a77c72b 1786
f4aa3848 1787 return gen_rtx_fmt_ee (code, VOIDmode, cc, const0_rtx);
6590e19a
UW
1788}
1789
0a2aaacc 1790/* Emit a SImode compare and swap instruction setting MEM to NEW_RTX if OLD
8bb501bb
AK
1791 matches CMP.
1792 Return the correct condition RTL to be placed in the IF_THEN_ELSE of the
1793 conditional branch testing the result. */
1794
1795static rtx
78ce265b 1796s390_emit_compare_and_swap (enum rtx_code code, rtx old, rtx mem,
03db9ab5 1797 rtx cmp, rtx new_rtx, machine_mode ccmode)
8bb501bb 1798{
03db9ab5
DV
1799 rtx cc;
1800
1801 cc = gen_rtx_REG (ccmode, CC_REGNUM);
1802 switch (GET_MODE (mem))
1803 {
4e10a5a7 1804 case E_SImode:
03db9ab5
DV
1805 emit_insn (gen_atomic_compare_and_swapsi_internal (old, mem, cmp,
1806 new_rtx, cc));
1807 break;
4e10a5a7 1808 case E_DImode:
03db9ab5
DV
1809 emit_insn (gen_atomic_compare_and_swapdi_internal (old, mem, cmp,
1810 new_rtx, cc));
1811 break;
4e10a5a7 1812 case E_TImode:
03db9ab5
DV
1813 emit_insn (gen_atomic_compare_and_swapti_internal (old, mem, cmp,
1814 new_rtx, cc));
1815 break;
4e10a5a7
RS
1816 case E_QImode:
1817 case E_HImode:
03db9ab5
DV
1818 default:
1819 gcc_unreachable ();
1820 }
1821 return s390_emit_compare (code, cc, const0_rtx);
8bb501bb
AK
1822}
1823
5a3fe9b6
AK
1824/* Emit a jump instruction to TARGET and return it. If COND is
1825 NULL_RTX, emit an unconditional jump, else a conditional jump under
1826 condition COND. */
6590e19a 1827
775c43d3 1828rtx_insn *
6590e19a
UW
1829s390_emit_jump (rtx target, rtx cond)
1830{
1831 rtx insn;
1832
1833 target = gen_rtx_LABEL_REF (VOIDmode, target);
1834 if (cond)
1835 target = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, target, pc_rtx);
1836
f7df4a84 1837 insn = gen_rtx_SET (pc_rtx, target);
5a3fe9b6 1838 return emit_jump_insn (insn);
6590e19a
UW
1839}
1840
c7453384 1841/* Return branch condition mask to implement a branch
5b022de5 1842 specified by CODE. Return -1 for invalid comparisons. */
ba956982 1843
0bfc3f69 1844int
9c808aad 1845s390_branch_condition_mask (rtx code)
c7453384 1846{
ba956982
UW
1847 const int CC0 = 1 << 3;
1848 const int CC1 = 1 << 2;
1849 const int CC2 = 1 << 1;
1850 const int CC3 = 1 << 0;
1851
8d933e31
AS
1852 gcc_assert (GET_CODE (XEXP (code, 0)) == REG);
1853 gcc_assert (REGNO (XEXP (code, 0)) == CC_REGNUM);
5a3fe9b6
AK
1854 gcc_assert (XEXP (code, 1) == const0_rtx
1855 || (GET_MODE (XEXP (code, 0)) == CCRAWmode
1856 && CONST_INT_P (XEXP (code, 1))));
1857
ba956982
UW
1858
1859 switch (GET_MODE (XEXP (code, 0)))
1860 {
4e10a5a7
RS
1861 case E_CCZmode:
1862 case E_CCZ1mode:
ba956982
UW
1863 switch (GET_CODE (code))
1864 {
1865 case EQ: return CC0;
1866 case NE: return CC1 | CC2 | CC3;
5b022de5 1867 default: return -1;
ba956982
UW
1868 }
1869 break;
1870
4e10a5a7 1871 case E_CCT1mode:
07893d4f
UW
1872 switch (GET_CODE (code))
1873 {
1874 case EQ: return CC1;
1875 case NE: return CC0 | CC2 | CC3;
5b022de5 1876 default: return -1;
07893d4f
UW
1877 }
1878 break;
1879
4e10a5a7 1880 case E_CCT2mode:
07893d4f
UW
1881 switch (GET_CODE (code))
1882 {
1883 case EQ: return CC2;
1884 case NE: return CC0 | CC1 | CC3;
5b022de5 1885 default: return -1;
07893d4f
UW
1886 }
1887 break;
1888
4e10a5a7 1889 case E_CCT3mode:
07893d4f
UW
1890 switch (GET_CODE (code))
1891 {
1892 case EQ: return CC3;
1893 case NE: return CC0 | CC1 | CC2;
5b022de5 1894 default: return -1;
07893d4f
UW
1895 }
1896 break;
1897
4e10a5a7 1898 case E_CCLmode:
ba956982
UW
1899 switch (GET_CODE (code))
1900 {
1901 case EQ: return CC0 | CC2;
1902 case NE: return CC1 | CC3;
5b022de5 1903 default: return -1;
07893d4f
UW
1904 }
1905 break;
1906
4e10a5a7 1907 case E_CCL1mode:
07893d4f
UW
1908 switch (GET_CODE (code))
1909 {
1910 case LTU: return CC2 | CC3; /* carry */
1911 case GEU: return CC0 | CC1; /* no carry */
5b022de5 1912 default: return -1;
07893d4f
UW
1913 }
1914 break;
1915
4e10a5a7 1916 case E_CCL2mode:
07893d4f
UW
1917 switch (GET_CODE (code))
1918 {
1919 case GTU: return CC0 | CC1; /* borrow */
1920 case LEU: return CC2 | CC3; /* no borrow */
5b022de5 1921 default: return -1;
ba956982
UW
1922 }
1923 break;
1924
4e10a5a7 1925 case E_CCL3mode:
5d880bd2
UW
1926 switch (GET_CODE (code))
1927 {
1928 case EQ: return CC0 | CC2;
1929 case NE: return CC1 | CC3;
1930 case LTU: return CC1;
1931 case GTU: return CC3;
1932 case LEU: return CC1 | CC2;
1933 case GEU: return CC2 | CC3;
5b022de5 1934 default: return -1;
5d880bd2
UW
1935 }
1936
4e10a5a7 1937 case E_CCUmode:
ba956982
UW
1938 switch (GET_CODE (code))
1939 {
1940 case EQ: return CC0;
1941 case NE: return CC1 | CC2 | CC3;
1942 case LTU: return CC1;
1943 case GTU: return CC2;
1944 case LEU: return CC0 | CC1;
1945 case GEU: return CC0 | CC2;
5b022de5 1946 default: return -1;
ba956982
UW
1947 }
1948 break;
1949
4e10a5a7 1950 case E_CCURmode:
07893d4f
UW
1951 switch (GET_CODE (code))
1952 {
1953 case EQ: return CC0;
1954 case NE: return CC2 | CC1 | CC3;
1955 case LTU: return CC2;
1956 case GTU: return CC1;
1957 case LEU: return CC0 | CC2;
1958 case GEU: return CC0 | CC1;
5b022de5 1959 default: return -1;
07893d4f
UW
1960 }
1961 break;
1962
4e10a5a7 1963 case E_CCAPmode:
0a3bdf9d
UW
1964 switch (GET_CODE (code))
1965 {
1966 case EQ: return CC0;
1967 case NE: return CC1 | CC2 | CC3;
1968 case LT: return CC1 | CC3;
1969 case GT: return CC2;
1970 case LE: return CC0 | CC1 | CC3;
1971 case GE: return CC0 | CC2;
5b022de5 1972 default: return -1;
0a3bdf9d
UW
1973 }
1974 break;
1975
4e10a5a7 1976 case E_CCANmode:
0a3bdf9d
UW
1977 switch (GET_CODE (code))
1978 {
1979 case EQ: return CC0;
1980 case NE: return CC1 | CC2 | CC3;
1981 case LT: return CC1;
1982 case GT: return CC2 | CC3;
1983 case LE: return CC0 | CC1;
1984 case GE: return CC0 | CC2 | CC3;
5b022de5 1985 default: return -1;
0a3bdf9d
UW
1986 }
1987 break;
1988
4e10a5a7 1989 case E_CCSmode:
ba956982
UW
1990 switch (GET_CODE (code))
1991 {
1992 case EQ: return CC0;
1993 case NE: return CC1 | CC2 | CC3;
1994 case LT: return CC1;
1995 case GT: return CC2;
1996 case LE: return CC0 | CC1;
1997 case GE: return CC0 | CC2;
1998 case UNORDERED: return CC3;
1999 case ORDERED: return CC0 | CC1 | CC2;
2000 case UNEQ: return CC0 | CC3;
2001 case UNLT: return CC1 | CC3;
2002 case UNGT: return CC2 | CC3;
2003 case UNLE: return CC0 | CC1 | CC3;
2004 case UNGE: return CC0 | CC2 | CC3;
2005 case LTGT: return CC1 | CC2;
5b022de5 2006 default: return -1;
ba956982 2007 }
07893d4f
UW
2008 break;
2009
4e10a5a7 2010 case E_CCSRmode:
07893d4f
UW
2011 switch (GET_CODE (code))
2012 {
2013 case EQ: return CC0;
2014 case NE: return CC2 | CC1 | CC3;
2015 case LT: return CC2;
2016 case GT: return CC1;
2017 case LE: return CC0 | CC2;
2018 case GE: return CC0 | CC1;
2019 case UNORDERED: return CC3;
2020 case ORDERED: return CC0 | CC2 | CC1;
2021 case UNEQ: return CC0 | CC3;
2022 case UNLT: return CC2 | CC3;
2023 case UNGT: return CC1 | CC3;
2024 case UNLE: return CC0 | CC2 | CC3;
2025 case UNGE: return CC0 | CC1 | CC3;
2026 case LTGT: return CC2 | CC1;
5b022de5 2027 default: return -1;
07893d4f
UW
2028 }
2029 break;
ba956982 2030
6e5b5de8 2031 /* Vector comparison modes. */
a6a2b532
AK
2032 /* CC2 will never be set. It however is part of the negated
2033 masks. */
4e10a5a7 2034 case E_CCVIALLmode:
6e5b5de8
AK
2035 switch (GET_CODE (code))
2036 {
a6a2b532
AK
2037 case EQ:
2038 case GTU:
2039 case GT:
2040 case GE: return CC0;
2041 /* The inverted modes are in fact *any* modes. */
2042 case NE:
2043 case LEU:
2044 case LE:
2045 case LT: return CC3 | CC1 | CC2;
6e5b5de8
AK
2046 default: return -1;
2047 }
3af82a61 2048
4e10a5a7 2049 case E_CCVIANYmode:
3af82a61
AK
2050 switch (GET_CODE (code))
2051 {
a6a2b532
AK
2052 case EQ:
2053 case GTU:
2054 case GT:
2055 case GE: return CC0 | CC1;
2056 /* The inverted modes are in fact *all* modes. */
2057 case NE:
2058 case LEU:
2059 case LE:
2060 case LT: return CC3 | CC2;
3af82a61
AK
2061 default: return -1;
2062 }
4e10a5a7 2063 case E_CCVFALLmode:
6e5b5de8
AK
2064 switch (GET_CODE (code))
2065 {
a6a2b532
AK
2066 case EQ:
2067 case GT:
6e5b5de8 2068 case GE: return CC0;
a6a2b532
AK
2069 /* The inverted modes are in fact *any* modes. */
2070 case NE:
2071 case UNLE:
2072 case UNLT: return CC3 | CC1 | CC2;
6e5b5de8
AK
2073 default: return -1;
2074 }
3af82a61 2075
4e10a5a7 2076 case E_CCVFANYmode:
3af82a61
AK
2077 switch (GET_CODE (code))
2078 {
a6a2b532
AK
2079 case EQ:
2080 case GT:
3af82a61 2081 case GE: return CC0 | CC1;
a6a2b532
AK
2082 /* The inverted modes are in fact *all* modes. */
2083 case NE:
2084 case UNLE:
2085 case UNLT: return CC3 | CC2;
3af82a61
AK
2086 default: return -1;
2087 }
2088
4e10a5a7 2089 case E_CCRAWmode:
5a3fe9b6
AK
2090 switch (GET_CODE (code))
2091 {
2092 case EQ:
2093 return INTVAL (XEXP (code, 1));
2094 case NE:
2095 return (INTVAL (XEXP (code, 1))) ^ 0xf;
2096 default:
2097 gcc_unreachable ();
2098 }
2099
ba956982 2100 default:
5b022de5 2101 return -1;
ba956982
UW
2102 }
2103}
2104
963fc8d0
AK
2105
2106/* Return branch condition mask to implement a compare and branch
2107 specified by CODE. Return -1 for invalid comparisons. */
2108
2109int
2110s390_compare_and_branch_condition_mask (rtx code)
2111{
2112 const int CC0 = 1 << 3;
2113 const int CC1 = 1 << 2;
2114 const int CC2 = 1 << 1;
2115
2116 switch (GET_CODE (code))
2117 {
2118 case EQ:
2119 return CC0;
2120 case NE:
2121 return CC1 | CC2;
2122 case LT:
2123 case LTU:
2124 return CC1;
2125 case GT:
2126 case GTU:
2127 return CC2;
2128 case LE:
2129 case LEU:
2130 return CC0 | CC1;
2131 case GE:
2132 case GEU:
2133 return CC0 | CC2;
2134 default:
2135 gcc_unreachable ();
2136 }
2137 return -1;
2138}
2139
c7453384
EC
2140/* If INV is false, return assembler mnemonic string to implement
2141 a branch specified by CODE. If INV is true, return mnemonic
ba956982
UW
2142 for the corresponding inverted branch. */
2143
2144static const char *
9c808aad 2145s390_branch_condition_mnemonic (rtx code, int inv)
ba956982 2146{
963fc8d0
AK
2147 int mask;
2148
0139adca 2149 static const char *const mnemonic[16] =
ba956982
UW
2150 {
2151 NULL, "o", "h", "nle",
2152 "l", "nhe", "lh", "ne",
2153 "e", "nlh", "he", "nl",
2154 "le", "nh", "no", NULL
2155 };
2156
963fc8d0
AK
2157 if (GET_CODE (XEXP (code, 0)) == REG
2158 && REGNO (XEXP (code, 0)) == CC_REGNUM
5a3fe9b6
AK
2159 && (XEXP (code, 1) == const0_rtx
2160 || (GET_MODE (XEXP (code, 0)) == CCRAWmode
2161 && CONST_INT_P (XEXP (code, 1)))))
963fc8d0
AK
2162 mask = s390_branch_condition_mask (code);
2163 else
2164 mask = s390_compare_and_branch_condition_mask (code);
2165
5b022de5 2166 gcc_assert (mask >= 0);
ba956982
UW
2167
2168 if (inv)
2169 mask ^= 15;
2170
8d933e31 2171 gcc_assert (mask >= 1 && mask <= 14);
ba956982
UW
2172
2173 return mnemonic[mask];
2174}
2175
f19a9af7
AK
2176/* Return the part of op which has a value different from def.
2177 The size of the part is determined by mode.
38899e29 2178 Use this function only if you already know that op really
f19a9af7 2179 contains such a part. */
4023fb28 2180
f19a9af7 2181unsigned HOST_WIDE_INT
ef4bddc2 2182s390_extract_part (rtx op, machine_mode mode, int def)
4023fb28 2183{
f19a9af7
AK
2184 unsigned HOST_WIDE_INT value = 0;
2185 int max_parts = HOST_BITS_PER_WIDE_INT / GET_MODE_BITSIZE (mode);
2186 int part_bits = GET_MODE_BITSIZE (mode);
406fde6e 2187 unsigned HOST_WIDE_INT part_mask = (HOST_WIDE_INT_1U << part_bits) - 1;
f19a9af7 2188 int i;
38899e29 2189
f19a9af7 2190 for (i = 0; i < max_parts; i++)
4023fb28 2191 {
f19a9af7 2192 if (i == 0)
406fde6e 2193 value = UINTVAL (op);
4023fb28 2194 else
f19a9af7 2195 value >>= part_bits;
38899e29 2196
f19a9af7
AK
2197 if ((value & part_mask) != (def & part_mask))
2198 return value & part_mask;
4023fb28 2199 }
38899e29 2200
8d933e31 2201 gcc_unreachable ();
4023fb28
UW
2202}
2203
2204/* If OP is an integer constant of mode MODE with exactly one
f19a9af7
AK
2205 part of mode PART_MODE unequal to DEF, return the number of that
2206 part. Otherwise, return -1. */
4023fb28
UW
2207
2208int
38899e29 2209s390_single_part (rtx op,
ef4bddc2
RS
2210 machine_mode mode,
2211 machine_mode part_mode,
f19a9af7
AK
2212 int def)
2213{
2214 unsigned HOST_WIDE_INT value = 0;
2215 int n_parts = GET_MODE_SIZE (mode) / GET_MODE_SIZE (part_mode);
c4d50129 2216 unsigned HOST_WIDE_INT part_mask
406fde6e 2217 = (HOST_WIDE_INT_1U << GET_MODE_BITSIZE (part_mode)) - 1;
f19a9af7
AK
2218 int i, part = -1;
2219
2220 if (GET_CODE (op) != CONST_INT)
2221 return -1;
38899e29 2222
f19a9af7
AK
2223 for (i = 0; i < n_parts; i++)
2224 {
2225 if (i == 0)
406fde6e 2226 value = UINTVAL (op);
4023fb28 2227 else
f19a9af7 2228 value >>= GET_MODE_BITSIZE (part_mode);
38899e29 2229
f19a9af7
AK
2230 if ((value & part_mask) != (def & part_mask))
2231 {
2232 if (part != -1)
2233 return -1;
2234 else
2235 part = i;
2236 }
4023fb28 2237 }
f19a9af7 2238 return part == -1 ? -1 : n_parts - 1 - part;
4023fb28
UW
2239}
2240
963fc8d0 2241/* Return true if IN contains a contiguous bitfield in the lower SIZE
c2586c82 2242 bits and no other bits are set in (the lower SIZE bits of) IN.
963fc8d0 2243
c2586c82
DV
2244 PSTART and PEND can be used to obtain the start and end
2245 position (inclusive) of the bitfield relative to 64
2246 bits. *PSTART / *PEND gives the position of the first/last bit
2247 of the bitfield counting from the highest order bit starting
2248 with zero. */
963fc8d0
AK
2249
2250bool
c2586c82
DV
2251s390_contiguous_bitmask_nowrap_p (unsigned HOST_WIDE_INT in, int size,
2252 int *pstart, int *pend)
963fc8d0 2253{
c2586c82
DV
2254 int start;
2255 int end = -1;
406fde6e
DV
2256 int lowbit = HOST_BITS_PER_WIDE_INT - 1;
2257 int highbit = HOST_BITS_PER_WIDE_INT - size;
2258 unsigned HOST_WIDE_INT bitmask = HOST_WIDE_INT_1U;
c2586c82
DV
2259
2260 gcc_assert (!!pstart == !!pend);
2261 for (start = lowbit; start >= highbit; bitmask <<= 1, start--)
2262 if (end == -1)
2263 {
2264 /* Look for the rightmost bit of a contiguous range of ones. */
2265 if (bitmask & in)
2266 /* Found it. */
2267 end = start;
2268 }
2269 else
2270 {
2271 /* Look for the firt zero bit after the range of ones. */
2272 if (! (bitmask & in))
2273 /* Found it. */
2274 break;
2275 }
2276 /* We're one past the last one-bit. */
2277 start++;
963fc8d0 2278
c2586c82
DV
2279 if (end == -1)
2280 /* No one bits found. */
2281 return false;
2282
2283 if (start > highbit)
963fc8d0 2284 {
c2586c82
DV
2285 unsigned HOST_WIDE_INT mask;
2286
2287 /* Calculate a mask for all bits beyond the contiguous bits. */
406fde6e
DV
2288 mask = ((~HOST_WIDE_INT_0U >> highbit)
2289 & (~HOST_WIDE_INT_0U << (lowbit - start + 1)));
c2586c82
DV
2290 if (mask & in)
2291 /* There are more bits set beyond the first range of one bits. */
2292 return false;
963fc8d0
AK
2293 }
2294
c2586c82
DV
2295 if (pstart)
2296 {
2297 *pstart = start;
2298 *pend = end;
2299 }
963fc8d0 2300
c2586c82
DV
2301 return true;
2302}
963fc8d0 2303
c2586c82
DV
2304/* Same as s390_contiguous_bitmask_nowrap_p but also returns true
2305 if ~IN contains a contiguous bitfield. In that case, *END is <
2306 *START.
085261c8 2307
c2586c82
DV
2308 If WRAP_P is true, a bitmask that wraps around is also tested.
2309 When a wraparoud occurs *START is greater than *END (in
2310 non-null pointers), and the uppermost (64 - SIZE) bits are thus
2311 part of the range. If WRAP_P is false, no wraparound is
2312 tested. */
963fc8d0 2313
c2586c82
DV
2314bool
2315s390_contiguous_bitmask_p (unsigned HOST_WIDE_INT in, bool wrap_p,
2316 int size, int *start, int *end)
2317{
406fde6e 2318 int bs = HOST_BITS_PER_WIDE_INT;
c2586c82
DV
2319 bool b;
2320
2321 gcc_assert (!!start == !!end);
406fde6e 2322 if ((in & ((~HOST_WIDE_INT_0U) >> (bs - size))) == 0)
c2586c82
DV
2323 /* This cannot be expressed as a contiguous bitmask. Exit early because
2324 the second call of s390_contiguous_bitmask_nowrap_p would accept this as
2325 a valid bitmask. */
963fc8d0 2326 return false;
c2586c82
DV
2327 b = s390_contiguous_bitmask_nowrap_p (in, size, start, end);
2328 if (b)
2329 return true;
2330 if (! wrap_p)
2331 return false;
2332 b = s390_contiguous_bitmask_nowrap_p (~in, size, start, end);
2333 if (b && start)
2334 {
2335 int s = *start;
2336 int e = *end;
963fc8d0 2337
c2586c82
DV
2338 gcc_assert (s >= 1);
2339 *start = ((e + 1) & (bs - 1));
2340 *end = ((s - 1 + bs) & (bs - 1));
2341 }
963fc8d0 2342
c2586c82 2343 return b;
963fc8d0
AK
2344}
2345
085261c8
AK
2346/* Return true if OP contains the same contiguous bitfield in *all*
2347 its elements. START and END can be used to obtain the start and
2348 end position of the bitfield.
2349
2350 START/STOP give the position of the first/last bit of the bitfield
2351 counting from the lowest order bit starting with zero. In order to
2352 use these values for S/390 instructions this has to be converted to
2353 "bits big endian" style. */
2354
2355bool
2356s390_contiguous_bitmask_vector_p (rtx op, int *start, int *end)
2357{
2358 unsigned HOST_WIDE_INT mask;
c2586c82 2359 int size;
92695fbb 2360 rtx elt;
c2586c82 2361 bool b;
085261c8 2362
c2586c82 2363 gcc_assert (!!start == !!end);
92695fbb
RS
2364 if (!const_vec_duplicate_p (op, &elt)
2365 || !CONST_INT_P (elt))
085261c8
AK
2366 return false;
2367
085261c8 2368 size = GET_MODE_UNIT_BITSIZE (GET_MODE (op));
1ce8ee74
AK
2369
2370 /* We cannot deal with V1TI/V1TF. This would require a vgmq. */
2371 if (size > 64)
2372 return false;
2373
92695fbb 2374 mask = UINTVAL (elt);
c2586c82
DV
2375
2376 b = s390_contiguous_bitmask_p (mask, true, size, start, end);
2377 if (b)
085261c8 2378 {
c2586c82
DV
2379 if (start)
2380 {
406fde6e
DV
2381 *start -= (HOST_BITS_PER_WIDE_INT - size);
2382 *end -= (HOST_BITS_PER_WIDE_INT - size);
c2586c82 2383 }
085261c8
AK
2384 return true;
2385 }
c2586c82
DV
2386 else
2387 return false;
085261c8
AK
2388}
2389
2390/* Return true if C consists only of byte chunks being either 0 or
2391 0xff. If MASK is !=NULL a byte mask is generated which is
2392 appropriate for the vector generate byte mask instruction. */
2393
2394bool
2395s390_bytemask_vector_p (rtx op, unsigned *mask)
2396{
2397 int i;
2398 unsigned tmp_mask = 0;
2399 int nunit, unit_size;
2400
2401 if (!VECTOR_MODE_P (GET_MODE (op))
2402 || GET_CODE (op) != CONST_VECTOR
2403 || !CONST_INT_P (XVECEXP (op, 0, 0)))
2404 return false;
2405
2406 nunit = GET_MODE_NUNITS (GET_MODE (op));
2407 unit_size = GET_MODE_UNIT_SIZE (GET_MODE (op));
2408
2409 for (i = 0; i < nunit; i++)
2410 {
2411 unsigned HOST_WIDE_INT c;
2412 int j;
2413
2414 if (!CONST_INT_P (XVECEXP (op, 0, i)))
2415 return false;
2416
2417 c = UINTVAL (XVECEXP (op, 0, i));
2418 for (j = 0; j < unit_size; j++)
2419 {
2420 if ((c & 0xff) != 0 && (c & 0xff) != 0xff)
2421 return false;
2422 tmp_mask |= (c & 1) << ((nunit - 1 - i) * unit_size + j);
2423 c = c >> BITS_PER_UNIT;
2424 }
2425 }
2426
2427 if (mask != NULL)
2428 *mask = tmp_mask;
2429
2430 return true;
2431}
2432
1a2e356e
RH
2433/* Check whether a rotate of ROTL followed by an AND of CONTIG is
2434 equivalent to a shift followed by the AND. In particular, CONTIG
2435 should not overlap the (rotated) bit 0/bit 63 gap. Negative values
2436 for ROTL indicate a rotate to the right. */
2437
2438bool
2439s390_extzv_shift_ok (int bitsize, int rotl, unsigned HOST_WIDE_INT contig)
2440{
c2586c82 2441 int start, end;
1a2e356e
RH
2442 bool ok;
2443
c2586c82 2444 ok = s390_contiguous_bitmask_nowrap_p (contig, bitsize, &start, &end);
1a2e356e
RH
2445 gcc_assert (ok);
2446
c2586c82
DV
2447 if (rotl >= 0)
2448 return (64 - end >= rotl);
2449 else
2450 {
2451 /* Translate "- rotate right" in BITSIZE mode to "rotate left" in
2452 DIMode. */
2453 rotl = -rotl + (64 - bitsize);
2454 return (start >= rotl);
2455 }
1a2e356e
RH
2456}
2457
c7453384
EC
2458/* Check whether we can (and want to) split a double-word
2459 move in mode MODE from SRC to DST into two single-word
dc65c307
UW
2460 moves, moving the subword FIRST_SUBWORD first. */
2461
2462bool
ef4bddc2 2463s390_split_ok_p (rtx dst, rtx src, machine_mode mode, int first_subword)
dc65c307 2464{
085261c8
AK
2465 /* Floating point and vector registers cannot be split. */
2466 if (FP_REG_P (src) || FP_REG_P (dst) || VECTOR_REG_P (src) || VECTOR_REG_P (dst))
dc65c307
UW
2467 return false;
2468
dc65c307
UW
2469 /* Non-offsettable memory references cannot be split. */
2470 if ((GET_CODE (src) == MEM && !offsettable_memref_p (src))
2471 || (GET_CODE (dst) == MEM && !offsettable_memref_p (dst)))
2472 return false;
2473
2474 /* Moving the first subword must not clobber a register
2475 needed to move the second subword. */
2476 if (register_operand (dst, mode))
2477 {
2478 rtx subreg = operand_subword (dst, first_subword, 0, mode);
2479 if (reg_overlap_mentioned_p (subreg, src))
2480 return false;
2481 }
2482
2483 return true;
2484}
2485
bcf8c1cc
AK
2486/* Return true if it can be proven that [MEM1, MEM1 + SIZE]
2487 and [MEM2, MEM2 + SIZE] do overlap and false
2488 otherwise. */
2489
2490bool
2491s390_overlap_p (rtx mem1, rtx mem2, HOST_WIDE_INT size)
2492{
2493 rtx addr1, addr2, addr_delta;
2494 HOST_WIDE_INT delta;
2495
2496 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
2497 return true;
2498
2499 if (size == 0)
2500 return false;
2501
2502 addr1 = XEXP (mem1, 0);
2503 addr2 = XEXP (mem2, 0);
2504
2505 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
2506
2507 /* This overlapping check is used by peepholes merging memory block operations.
2508 Overlapping operations would otherwise be recognized by the S/390 hardware
f4aa3848 2509 and would fall back to a slower implementation. Allowing overlapping
bcf8c1cc 2510 operations would lead to slow code but not to wrong code. Therefore we are
f4aa3848 2511 somewhat optimistic if we cannot prove that the memory blocks are
bcf8c1cc
AK
2512 overlapping.
2513 That's why we return false here although this may accept operations on
2514 overlapping memory areas. */
2515 if (!addr_delta || GET_CODE (addr_delta) != CONST_INT)
2516 return false;
2517
2518 delta = INTVAL (addr_delta);
2519
2520 if (delta == 0
2521 || (delta > 0 && delta < size)
2522 || (delta < 0 && -delta < size))
2523 return true;
2524
2525 return false;
2526}
2527
19b63d8e
UW
2528/* Check whether the address of memory reference MEM2 equals exactly
2529 the address of memory reference MEM1 plus DELTA. Return true if
2530 we can prove this to be the case, false otherwise. */
2531
2532bool
2533s390_offset_p (rtx mem1, rtx mem2, rtx delta)
2534{
2535 rtx addr1, addr2, addr_delta;
2536
2537 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
2538 return false;
2539
2540 addr1 = XEXP (mem1, 0);
2541 addr2 = XEXP (mem2, 0);
2542
2543 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
2544 if (!addr_delta || !rtx_equal_p (addr_delta, delta))
2545 return false;
2546
2547 return true;
2548}
2549
8cb66696
UW
2550/* Expand logical operator CODE in mode MODE with operands OPERANDS. */
2551
2552void
ef4bddc2 2553s390_expand_logical_operator (enum rtx_code code, machine_mode mode,
8cb66696
UW
2554 rtx *operands)
2555{
ef4bddc2 2556 machine_mode wmode = mode;
8cb66696
UW
2557 rtx dst = operands[0];
2558 rtx src1 = operands[1];
2559 rtx src2 = operands[2];
2560 rtx op, clob, tem;
2561
2562 /* If we cannot handle the operation directly, use a temp register. */
2563 if (!s390_logical_operator_ok_p (operands))
2564 dst = gen_reg_rtx (mode);
2565
2566 /* QImode and HImode patterns make sense only if we have a destination
2567 in memory. Otherwise perform the operation in SImode. */
2568 if ((mode == QImode || mode == HImode) && GET_CODE (dst) != MEM)
2569 wmode = SImode;
2570
2571 /* Widen operands if required. */
2572 if (mode != wmode)
2573 {
2574 if (GET_CODE (dst) == SUBREG
2575 && (tem = simplify_subreg (wmode, dst, mode, 0)) != 0)
2576 dst = tem;
2577 else if (REG_P (dst))
2578 dst = gen_rtx_SUBREG (wmode, dst, 0);
2579 else
2580 dst = gen_reg_rtx (wmode);
2581
2582 if (GET_CODE (src1) == SUBREG
2583 && (tem = simplify_subreg (wmode, src1, mode, 0)) != 0)
2584 src1 = tem;
2585 else if (GET_MODE (src1) != VOIDmode)
2586 src1 = gen_rtx_SUBREG (wmode, force_reg (mode, src1), 0);
2587
2588 if (GET_CODE (src2) == SUBREG
2589 && (tem = simplify_subreg (wmode, src2, mode, 0)) != 0)
2590 src2 = tem;
2591 else if (GET_MODE (src2) != VOIDmode)
2592 src2 = gen_rtx_SUBREG (wmode, force_reg (mode, src2), 0);
2593 }
2594
2595 /* Emit the instruction. */
f7df4a84 2596 op = gen_rtx_SET (dst, gen_rtx_fmt_ee (code, wmode, src1, src2));
8cb66696
UW
2597 clob = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
2598 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, op, clob)));
2599
2600 /* Fix up the destination if needed. */
2601 if (dst != operands[0])
2602 emit_move_insn (operands[0], gen_lowpart (mode, dst));
2603}
2604
2605/* Check whether OPERANDS are OK for a logical operation (AND, IOR, XOR). */
2606
2607bool
2608s390_logical_operator_ok_p (rtx *operands)
2609{
2610 /* If the destination operand is in memory, it needs to coincide
2611 with one of the source operands. After reload, it has to be
2612 the first source operand. */
2613 if (GET_CODE (operands[0]) == MEM)
2614 return rtx_equal_p (operands[0], operands[1])
2615 || (!reload_completed && rtx_equal_p (operands[0], operands[2]));
2616
2617 return true;
2618}
2619
0dfa6c5e
UW
2620/* Narrow logical operation CODE of memory operand MEMOP with immediate
2621 operand IMMOP to switch from SS to SI type instructions. */
2622
2623void
2624s390_narrow_logical_operator (enum rtx_code code, rtx *memop, rtx *immop)
2625{
2626 int def = code == AND ? -1 : 0;
2627 HOST_WIDE_INT mask;
2628 int part;
2629
2630 gcc_assert (GET_CODE (*memop) == MEM);
2631 gcc_assert (!MEM_VOLATILE_P (*memop));
2632
2633 mask = s390_extract_part (*immop, QImode, def);
2634 part = s390_single_part (*immop, GET_MODE (*memop), QImode, def);
2635 gcc_assert (part >= 0);
2636
2637 *memop = adjust_address (*memop, QImode, part);
2638 *immop = gen_int_mode (mask, QImode);
2639}
2640
ba956982 2641
ab96de7e
AS
2642/* How to allocate a 'struct machine_function'. */
2643
2644static struct machine_function *
2645s390_init_machine_status (void)
2646{
766090c2 2647 return ggc_cleared_alloc<machine_function> ();
ab96de7e
AS
2648}
2649
9db1d521
HP
2650/* Map for smallest class containing reg regno. */
2651
0139adca 2652const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
085261c8
AK
2653{ GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS, /* 0 */
2654 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS, /* 4 */
2655 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS, /* 8 */
2656 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS, /* 12 */
2657 FP_REGS, FP_REGS, FP_REGS, FP_REGS, /* 16 */
2658 FP_REGS, FP_REGS, FP_REGS, FP_REGS, /* 20 */
2659 FP_REGS, FP_REGS, FP_REGS, FP_REGS, /* 24 */
2660 FP_REGS, FP_REGS, FP_REGS, FP_REGS, /* 28 */
2661 ADDR_REGS, CC_REGS, ADDR_REGS, ADDR_REGS, /* 32 */
2662 ACCESS_REGS, ACCESS_REGS, VEC_REGS, VEC_REGS, /* 36 */
2663 VEC_REGS, VEC_REGS, VEC_REGS, VEC_REGS, /* 40 */
2664 VEC_REGS, VEC_REGS, VEC_REGS, VEC_REGS, /* 44 */
2665 VEC_REGS, VEC_REGS, VEC_REGS, VEC_REGS, /* 48 */
2666 VEC_REGS, VEC_REGS /* 52 */
9db1d521
HP
2667};
2668
077dab3b
HP
2669/* Return attribute type of insn. */
2670
2671static enum attr_type
647d790d 2672s390_safe_attr_type (rtx_insn *insn)
077dab3b
HP
2673{
2674 if (recog_memoized (insn) >= 0)
2675 return get_attr_type (insn);
2676 else
2677 return TYPE_NONE;
2678}
9db1d521 2679
d3632d41
UW
2680/* Return true if DISP is a valid short displacement. */
2681
3ed99cc9 2682static bool
9c808aad 2683s390_short_displacement (rtx disp)
d3632d41
UW
2684{
2685 /* No displacement is OK. */
2686 if (!disp)
3ed99cc9 2687 return true;
d3632d41 2688
4fe6dea8
AK
2689 /* Without the long displacement facility we don't need to
2690 distingiush between long and short displacement. */
2691 if (!TARGET_LONG_DISPLACEMENT)
2692 return true;
2693
d3632d41
UW
2694 /* Integer displacement in range. */
2695 if (GET_CODE (disp) == CONST_INT)
2696 return INTVAL (disp) >= 0 && INTVAL (disp) < 4096;
2697
2698 /* GOT offset is not OK, the GOT can be large. */
2699 if (GET_CODE (disp) == CONST
2700 && GET_CODE (XEXP (disp, 0)) == UNSPEC
227a39fa
UW
2701 && (XINT (XEXP (disp, 0), 1) == UNSPEC_GOT
2702 || XINT (XEXP (disp, 0), 1) == UNSPEC_GOTNTPOFF))
3ed99cc9 2703 return false;
d3632d41
UW
2704
2705 /* All other symbolic constants are literal pool references,
2706 which are OK as the literal pool must be small. */
2707 if (GET_CODE (disp) == CONST)
3ed99cc9 2708 return true;
d3632d41 2709
3ed99cc9 2710 return false;
d3632d41
UW
2711}
2712
ab96de7e
AS
2713/* Decompose a RTL expression ADDR for a memory address into
2714 its components, returned in OUT.
ccfc6cc8 2715
3ed99cc9 2716 Returns false if ADDR is not a valid memory address, true
ab96de7e
AS
2717 otherwise. If OUT is NULL, don't return the components,
2718 but check for validity only.
ccfc6cc8 2719
ab96de7e
AS
2720 Note: Only addresses in canonical form are recognized.
2721 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
2722 canonical form so that they will be recognized. */
f19a9af7 2723
ab96de7e 2724static int
5d81b82b 2725s390_decompose_address (rtx addr, struct s390_address *out)
ab96de7e
AS
2726{
2727 HOST_WIDE_INT offset = 0;
2728 rtx base = NULL_RTX;
2729 rtx indx = NULL_RTX;
2730 rtx disp = NULL_RTX;
2731 rtx orig_disp;
3ed99cc9
AS
2732 bool pointer = false;
2733 bool base_ptr = false;
2734 bool indx_ptr = false;
f01cf809
UW
2735 bool literal_pool = false;
2736
2737 /* We may need to substitute the literal pool base register into the address
2738 below. However, at this point we do not know which register is going to
2739 be used as base, so we substitute the arg pointer register. This is going
2740 to be treated as holding a pointer below -- it shouldn't be used for any
2741 other purpose. */
2742 rtx fake_pool_base = gen_rtx_REG (Pmode, ARG_POINTER_REGNUM);
0dfa6c5e 2743
ab96de7e 2744 /* Decompose address into base + index + displacement. */
0dfa6c5e 2745
ab96de7e
AS
2746 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
2747 base = addr;
0dfa6c5e 2748
ab96de7e 2749 else if (GET_CODE (addr) == PLUS)
e221ef54 2750 {
ab96de7e
AS
2751 rtx op0 = XEXP (addr, 0);
2752 rtx op1 = XEXP (addr, 1);
2753 enum rtx_code code0 = GET_CODE (op0);
2754 enum rtx_code code1 = GET_CODE (op1);
e221ef54 2755
ab96de7e
AS
2756 if (code0 == REG || code0 == UNSPEC)
2757 {
2758 if (code1 == REG || code1 == UNSPEC)
2759 {
2760 indx = op0; /* index + base */
2761 base = op1;
2762 }
e221ef54 2763
ab96de7e
AS
2764 else
2765 {
2766 base = op0; /* base + displacement */
2767 disp = op1;
2768 }
2769 }
ccfc6cc8 2770
ab96de7e 2771 else if (code0 == PLUS)
d3632d41 2772 {
ab96de7e
AS
2773 indx = XEXP (op0, 0); /* index + base + disp */
2774 base = XEXP (op0, 1);
2775 disp = op1;
d3632d41 2776 }
d3632d41 2777
ab96de7e 2778 else
d3632d41 2779 {
3ed99cc9 2780 return false;
d3632d41 2781 }
ab96de7e 2782 }
d3632d41 2783
ab96de7e
AS
2784 else
2785 disp = addr; /* displacement */
d3632d41 2786
ab96de7e
AS
2787 /* Extract integer part of displacement. */
2788 orig_disp = disp;
2789 if (disp)
2790 {
2791 if (GET_CODE (disp) == CONST_INT)
d3632d41 2792 {
ab96de7e
AS
2793 offset = INTVAL (disp);
2794 disp = NULL_RTX;
d3632d41 2795 }
ab96de7e
AS
2796 else if (GET_CODE (disp) == CONST
2797 && GET_CODE (XEXP (disp, 0)) == PLUS
2798 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
2799 {
2800 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
2801 disp = XEXP (XEXP (disp, 0), 0);
2802 }
2803 }
d3632d41 2804
ab96de7e
AS
2805 /* Strip off CONST here to avoid special case tests later. */
2806 if (disp && GET_CODE (disp) == CONST)
2807 disp = XEXP (disp, 0);
ac32b25e 2808
ab96de7e
AS
2809 /* We can convert literal pool addresses to
2810 displacements by basing them off the base register. */
2811 if (disp && GET_CODE (disp) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (disp))
2812 {
085db63d
AK
2813 if (base || indx)
2814 return false;
2815
2816 base = fake_pool_base, literal_pool = true;
ab96de7e
AS
2817
2818 /* Mark up the displacement. */
2819 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp),
2820 UNSPEC_LTREL_OFFSET);
d3632d41 2821 }
ccfc6cc8 2822
ab96de7e
AS
2823 /* Validate base register. */
2824 if (base)
2825 {
2826 if (GET_CODE (base) == UNSPEC)
2827 switch (XINT (base, 1))
2828 {
2829 case UNSPEC_LTREF:
2830 if (!disp)
f4aa3848 2831 disp = gen_rtx_UNSPEC (Pmode,
ab96de7e
AS
2832 gen_rtvec (1, XVECEXP (base, 0, 0)),
2833 UNSPEC_LTREL_OFFSET);
2834 else
3ed99cc9 2835 return false;
ccfc6cc8 2836
f01cf809 2837 base = XVECEXP (base, 0, 1);
ab96de7e 2838 break;
f19a9af7 2839
ab96de7e 2840 case UNSPEC_LTREL_BASE:
f01cf809
UW
2841 if (XVECLEN (base, 0) == 1)
2842 base = fake_pool_base, literal_pool = true;
2843 else
2844 base = XVECEXP (base, 0, 1);
ab96de7e 2845 break;
f19a9af7 2846
ab96de7e 2847 default:
3ed99cc9 2848 return false;
ab96de7e 2849 }
f19a9af7 2850
c64181a8 2851 if (!REG_P (base) || GET_MODE (base) != Pmode)
3ed99cc9 2852 return false;
ab96de7e 2853
f01cf809 2854 if (REGNO (base) == STACK_POINTER_REGNUM
ab96de7e
AS
2855 || REGNO (base) == FRAME_POINTER_REGNUM
2856 || ((reload_completed || reload_in_progress)
2857 && frame_pointer_needed
2858 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
2859 || REGNO (base) == ARG_POINTER_REGNUM
2860 || (flag_pic
2861 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
3ed99cc9 2862 pointer = base_ptr = true;
f01cf809
UW
2863
2864 if ((reload_completed || reload_in_progress)
2865 && base == cfun->machine->base_reg)
2866 pointer = base_ptr = literal_pool = true;
ab96de7e
AS
2867 }
2868
2869 /* Validate index register. */
2870 if (indx)
f19a9af7 2871 {
ab96de7e
AS
2872 if (GET_CODE (indx) == UNSPEC)
2873 switch (XINT (indx, 1))
2874 {
2875 case UNSPEC_LTREF:
2876 if (!disp)
f4aa3848 2877 disp = gen_rtx_UNSPEC (Pmode,
ab96de7e
AS
2878 gen_rtvec (1, XVECEXP (indx, 0, 0)),
2879 UNSPEC_LTREL_OFFSET);
2880 else
3ed99cc9 2881 return false;
f19a9af7 2882
f01cf809 2883 indx = XVECEXP (indx, 0, 1);
ab96de7e 2884 break;
f19a9af7 2885
ab96de7e 2886 case UNSPEC_LTREL_BASE:
f01cf809
UW
2887 if (XVECLEN (indx, 0) == 1)
2888 indx = fake_pool_base, literal_pool = true;
2889 else
2890 indx = XVECEXP (indx, 0, 1);
ab96de7e 2891 break;
f19a9af7 2892
ab96de7e 2893 default:
3ed99cc9 2894 return false;
ab96de7e 2895 }
f19a9af7 2896
c64181a8 2897 if (!REG_P (indx) || GET_MODE (indx) != Pmode)
3ed99cc9 2898 return false;
f19a9af7 2899
f01cf809 2900 if (REGNO (indx) == STACK_POINTER_REGNUM
ab96de7e
AS
2901 || REGNO (indx) == FRAME_POINTER_REGNUM
2902 || ((reload_completed || reload_in_progress)
2903 && frame_pointer_needed
2904 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
2905 || REGNO (indx) == ARG_POINTER_REGNUM
2906 || (flag_pic
2907 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
3ed99cc9 2908 pointer = indx_ptr = true;
f01cf809
UW
2909
2910 if ((reload_completed || reload_in_progress)
2911 && indx == cfun->machine->base_reg)
2912 pointer = indx_ptr = literal_pool = true;
ab96de7e 2913 }
38899e29 2914
ab96de7e
AS
2915 /* Prefer to use pointer as base, not index. */
2916 if (base && indx && !base_ptr
2917 && (indx_ptr || (!REG_POINTER (base) && REG_POINTER (indx))))
2918 {
2919 rtx tmp = base;
2920 base = indx;
2921 indx = tmp;
2922 }
f19a9af7 2923
ab96de7e
AS
2924 /* Validate displacement. */
2925 if (!disp)
2926 {
f4aa3848
AK
2927 /* If virtual registers are involved, the displacement will change later
2928 anyway as the virtual registers get eliminated. This could make a
2929 valid displacement invalid, but it is more likely to make an invalid
2930 displacement valid, because we sometimes access the register save area
63296cb1 2931 via negative offsets to one of those registers.
ab96de7e
AS
2932 Thus we don't check the displacement for validity here. If after
2933 elimination the displacement turns out to be invalid after all,
2934 this is fixed up by reload in any case. */
3597e113
VM
2935 /* LRA maintains always displacements up to date and we need to
2936 know the displacement is right during all LRA not only at the
2937 final elimination. */
2938 if (lra_in_progress
2939 || (base != arg_pointer_rtx
2940 && indx != arg_pointer_rtx
2941 && base != return_address_pointer_rtx
2942 && indx != return_address_pointer_rtx
2943 && base != frame_pointer_rtx
2944 && indx != frame_pointer_rtx
2945 && base != virtual_stack_vars_rtx
2946 && indx != virtual_stack_vars_rtx))
ab96de7e 2947 if (!DISP_IN_RANGE (offset))
3ed99cc9 2948 return false;
ab96de7e
AS
2949 }
2950 else
2951 {
2952 /* All the special cases are pointers. */
3ed99cc9 2953 pointer = true;
f19a9af7 2954
ab96de7e
AS
2955 /* In the small-PIC case, the linker converts @GOT
2956 and @GOTNTPOFF offsets to possible displacements. */
2957 if (GET_CODE (disp) == UNSPEC
2958 && (XINT (disp, 1) == UNSPEC_GOT
2959 || XINT (disp, 1) == UNSPEC_GOTNTPOFF)
ab96de7e
AS
2960 && flag_pic == 1)
2961 {
2962 ;
2963 }
f19a9af7 2964
dc66391d
RS
2965 /* Accept pool label offsets. */
2966 else if (GET_CODE (disp) == UNSPEC
2967 && XINT (disp, 1) == UNSPEC_POOL_OFFSET)
2968 ;
f19a9af7 2969
ab96de7e
AS
2970 /* Accept literal pool references. */
2971 else if (GET_CODE (disp) == UNSPEC
2972 && XINT (disp, 1) == UNSPEC_LTREL_OFFSET)
2973 {
bc6ce334
AK
2974 /* In case CSE pulled a non literal pool reference out of
2975 the pool we have to reject the address. This is
2976 especially important when loading the GOT pointer on non
2977 zarch CPUs. In this case the literal pool contains an lt
2978 relative offset to the _GLOBAL_OFFSET_TABLE_ label which
2979 will most likely exceed the displacement. */
2980 if (GET_CODE (XVECEXP (disp, 0, 0)) != SYMBOL_REF
2981 || !CONSTANT_POOL_ADDRESS_P (XVECEXP (disp, 0, 0)))
2982 return false;
2983
ab96de7e
AS
2984 orig_disp = gen_rtx_CONST (Pmode, disp);
2985 if (offset)
2986 {
2987 /* If we have an offset, make sure it does not
2988 exceed the size of the constant pool entry. */
2989 rtx sym = XVECEXP (disp, 0, 0);
2990 if (offset >= GET_MODE_SIZE (get_pool_mode (sym)))
3ed99cc9 2991 return false;
f19a9af7 2992
0a81f074 2993 orig_disp = plus_constant (Pmode, orig_disp, offset);
ab96de7e
AS
2994 }
2995 }
2996
2997 else
3ed99cc9 2998 return false;
f19a9af7
AK
2999 }
3000
ab96de7e 3001 if (!base && !indx)
3ed99cc9 3002 pointer = true;
ab96de7e
AS
3003
3004 if (out)
3005 {
3006 out->base = base;
3007 out->indx = indx;
3008 out->disp = orig_disp;
3009 out->pointer = pointer;
f01cf809 3010 out->literal_pool = literal_pool;
ab96de7e
AS
3011 }
3012
3ed99cc9 3013 return true;
f19a9af7
AK
3014}
3015
dd95128b
AK
3016/* Decompose a RTL expression OP for an address style operand into its
3017 components, and return the base register in BASE and the offset in
3018 OFFSET. While OP looks like an address it is never supposed to be
3019 used as such.
d98ad410 3020
dd95128b 3021 Return true if OP is a valid address operand, false if not. */
d98ad410
UW
3022
3023bool
dd95128b
AK
3024s390_decompose_addrstyle_without_index (rtx op, rtx *base,
3025 HOST_WIDE_INT *offset)
d98ad410 3026{
191eb16d 3027 rtx off = NULL_RTX;
d98ad410 3028
d98ad410
UW
3029 /* We can have an integer constant, an address register,
3030 or a sum of the two. */
191eb16d 3031 if (CONST_SCALAR_INT_P (op))
d98ad410 3032 {
191eb16d 3033 off = op;
d98ad410
UW
3034 op = NULL_RTX;
3035 }
191eb16d 3036 if (op && GET_CODE (op) == PLUS && CONST_SCALAR_INT_P (XEXP (op, 1)))
d98ad410 3037 {
191eb16d 3038 off = XEXP (op, 1);
d98ad410
UW
3039 op = XEXP (op, 0);
3040 }
3041 while (op && GET_CODE (op) == SUBREG)
3042 op = SUBREG_REG (op);
3043
3044 if (op && GET_CODE (op) != REG)
3045 return false;
3046
3047 if (offset)
191eb16d
AK
3048 {
3049 if (off == NULL_RTX)
3050 *offset = 0;
3051 else if (CONST_INT_P (off))
3052 *offset = INTVAL (off);
3053 else if (CONST_WIDE_INT_P (off))
3054 /* The offset will anyway be cut down to 12 bits so take just
3055 the lowest order chunk of the wide int. */
3056 *offset = CONST_WIDE_INT_ELT (off, 0);
3057 else
3058 gcc_unreachable ();
3059 }
d98ad410
UW
3060 if (base)
3061 *base = op;
3062
3063 return true;
3064}
3065
3066
ab96de7e 3067/* Return true if CODE is a valid address without index. */
3c50106f 3068
ab96de7e
AS
3069bool
3070s390_legitimate_address_without_index_p (rtx op)
3071{
3072 struct s390_address addr;
3073
3074 if (!s390_decompose_address (XEXP (op, 0), &addr))
3075 return false;
3076 if (addr.indx)
3077 return false;
3078
3079 return true;
3080}
3081
cd8dc1f9 3082
0ff4390d
AK
3083/* Return TRUE if ADDR is an operand valid for a load/store relative
3084 instruction. Be aware that the alignment of the operand needs to
3085 be checked separately.
3086 Valid addresses are single references or a sum of a reference and a
3087 constant integer. Return these parts in SYMREF and ADDEND. You can
3088 pass NULL in REF and/or ADDEND if you are not interested in these
3089 values. Literal pool references are *not* considered symbol
3090 references. */
ab96de7e 3091
4fe6dea8 3092static bool
0ff4390d 3093s390_loadrelative_operand_p (rtx addr, rtx *symref, HOST_WIDE_INT *addend)
ab96de7e 3094{
4fe6dea8 3095 HOST_WIDE_INT tmpaddend = 0;
ab96de7e 3096
4fe6dea8
AK
3097 if (GET_CODE (addr) == CONST)
3098 addr = XEXP (addr, 0);
3099
3100 if (GET_CODE (addr) == PLUS)
ab96de7e 3101 {
0ff4390d 3102 if (!CONST_INT_P (XEXP (addr, 1)))
4fe6dea8 3103 return false;
ab96de7e 3104
0ff4390d
AK
3105 tmpaddend = INTVAL (XEXP (addr, 1));
3106 addr = XEXP (addr, 0);
3107 }
98412b77 3108
0ff4390d
AK
3109 if ((GET_CODE (addr) == SYMBOL_REF && !CONSTANT_POOL_ADDRESS_P (addr))
3110 || (GET_CODE (addr) == UNSPEC
3111 && (XINT (addr, 1) == UNSPEC_GOTENT
3112 || (TARGET_CPU_ZARCH && XINT (addr, 1) == UNSPEC_PLT))))
3113 {
3114 if (symref)
3115 *symref = addr;
3116 if (addend)
3117 *addend = tmpaddend;
98412b77 3118
0ff4390d
AK
3119 return true;
3120 }
3121 return false;
98412b77 3122}
4fe6dea8
AK
3123
3124/* Return true if the address in OP is valid for constraint letter C
3125 if wrapped in a MEM rtx. Set LIT_POOL_OK to true if it literal
3126 pool MEMs should be accepted. Only the Q, R, S, T constraint
3127 letters are allowed for C. */
ab96de7e 3128
4fe6dea8
AK
3129static int
3130s390_check_qrst_address (char c, rtx op, bool lit_pool_ok)
3131{
3132 struct s390_address addr;
3133 bool decomposed = false;
3134
00e0af8d
AK
3135 if (!address_operand (op, GET_MODE (op)))
3136 return 0;
3137
4fe6dea8
AK
3138 /* This check makes sure that no symbolic address (except literal
3139 pool references) are accepted by the R or T constraints. */
0ff4390d 3140 if (s390_loadrelative_operand_p (op, NULL, NULL))
98635b04
UW
3141 return 0;
3142
3143 /* Ensure literal pool references are only accepted if LIT_POOL_OK. */
3144 if (!lit_pool_ok)
ab96de7e 3145 {
4fe6dea8 3146 if (!s390_decompose_address (op, &addr))
ab96de7e 3147 return 0;
98635b04 3148 if (addr.literal_pool)
ab96de7e 3149 return 0;
4fe6dea8 3150 decomposed = true;
ab96de7e
AS
3151 }
3152
3e4be43f
UW
3153 /* With reload, we sometimes get intermediate address forms that are
3154 actually invalid as-is, but we need to accept them in the most
3155 generic cases below ('R' or 'T'), since reload will in fact fix
3156 them up. LRA behaves differently here; we never see such forms,
3157 but on the other hand, we need to strictly reject every invalid
3158 address form. Perform this check right up front. */
3159 if (lra_in_progress)
3160 {
3161 if (!decomposed && !s390_decompose_address (op, &addr))
3162 return 0;
3163 decomposed = true;
3164 }
3165
ab96de7e
AS
3166 switch (c)
3167 {
4fe6dea8
AK
3168 case 'Q': /* no index short displacement */
3169 if (!decomposed && !s390_decompose_address (op, &addr))
ab96de7e
AS
3170 return 0;
3171 if (addr.indx)
3172 return 0;
4fe6dea8 3173 if (!s390_short_displacement (addr.disp))
ab96de7e 3174 return 0;
4fe6dea8 3175 break;
ab96de7e 3176
4fe6dea8 3177 case 'R': /* with index short displacement */
ab96de7e
AS
3178 if (TARGET_LONG_DISPLACEMENT)
3179 {
4fe6dea8 3180 if (!decomposed && !s390_decompose_address (op, &addr))
ab96de7e
AS
3181 return 0;
3182 if (!s390_short_displacement (addr.disp))
3183 return 0;
3184 }
4fe6dea8
AK
3185 /* Any invalid address here will be fixed up by reload,
3186 so accept it for the most generic constraint. */
ab96de7e
AS
3187 break;
3188
4fe6dea8 3189 case 'S': /* no index long displacement */
4fe6dea8 3190 if (!decomposed && !s390_decompose_address (op, &addr))
ab96de7e
AS
3191 return 0;
3192 if (addr.indx)
3193 return 0;
ab96de7e
AS
3194 break;
3195
4fe6dea8 3196 case 'T': /* with index long displacement */
4fe6dea8
AK
3197 /* Any invalid address here will be fixed up by reload,
3198 so accept it for the most generic constraint. */
ab96de7e 3199 break;
3e4be43f 3200
4fe6dea8
AK
3201 default:
3202 return 0;
3203 }
3204 return 1;
3205}
ab96de7e 3206
ab96de7e 3207
4fe6dea8 3208/* Evaluates constraint strings described by the regular expression
3e4be43f 3209 ([A|B|Z](Q|R|S|T))|Y and returns 1 if OP is a valid operand for
4fe6dea8
AK
3210 the constraint given in STR, or 0 else. */
3211
3212int
3213s390_mem_constraint (const char *str, rtx op)
3214{
3215 char c = str[0];
3216
3217 switch (c)
3218 {
3219 case 'A':
3220 /* Check for offsettable variants of memory constraints. */
3221 if (!MEM_P (op) || MEM_VOLATILE_P (op))
ab96de7e 3222 return 0;
4fe6dea8
AK
3223 if ((reload_completed || reload_in_progress)
3224 ? !offsettable_memref_p (op) : !offsettable_nonstrict_memref_p (op))
963fc8d0 3225 return 0;
4fe6dea8
AK
3226 return s390_check_qrst_address (str[1], XEXP (op, 0), true);
3227 case 'B':
3228 /* Check for non-literal-pool variants of memory constraints. */
3229 if (!MEM_P (op))
ab96de7e 3230 return 0;
4fe6dea8
AK
3231 return s390_check_qrst_address (str[1], XEXP (op, 0), false);
3232 case 'Q':
3233 case 'R':
3234 case 'S':
3235 case 'T':
3236 if (GET_CODE (op) != MEM)
3237 return 0;
3238 return s390_check_qrst_address (c, XEXP (op, 0), true);
ab96de7e 3239 case 'Y':
d98ad410
UW
3240 /* Simply check for the basic form of a shift count. Reload will
3241 take care of making sure we have a proper base register. */
dd95128b 3242 if (!s390_decompose_addrstyle_without_index (op, NULL, NULL))
d98ad410
UW
3243 return 0;
3244 break;
4fe6dea8
AK
3245 case 'Z':
3246 return s390_check_qrst_address (str[1], op, true);
ab96de7e
AS
3247 default:
3248 return 0;
3249 }
ab96de7e
AS
3250 return 1;
3251}
3252
cd8dc1f9 3253
cd8dc1f9
WG
3254/* Evaluates constraint strings starting with letter O. Input
3255 parameter C is the second letter following the "O" in the constraint
3256 string. Returns 1 if VALUE meets the respective constraint and 0
3257 otherwise. */
ab96de7e 3258
d096725d 3259int
cd8dc1f9 3260s390_O_constraint_str (const char c, HOST_WIDE_INT value)
d096725d 3261{
cd8dc1f9
WG
3262 if (!TARGET_EXTIMM)
3263 return 0;
d096725d 3264
cd8dc1f9 3265 switch (c)
d096725d 3266 {
cd8dc1f9
WG
3267 case 's':
3268 return trunc_int_for_mode (value, SImode) == value;
3269
3270 case 'p':
3271 return value == 0
3272 || s390_single_part (GEN_INT (value), DImode, SImode, 0) == 1;
3273
3274 case 'n':
ee3f3449 3275 return s390_single_part (GEN_INT (value - 1), DImode, SImode, -1) == 1;
cd8dc1f9 3276
d096725d 3277 default:
cd8dc1f9 3278 gcc_unreachable ();
d096725d
AS
3279 }
3280}
3281
cd8dc1f9
WG
3282
3283/* Evaluates constraint strings starting with letter N. Parameter STR
3284 contains the letters following letter "N" in the constraint string.
3285 Returns true if VALUE matches the constraint. */
d096725d 3286
ab96de7e 3287int
cd8dc1f9 3288s390_N_constraint_str (const char *str, HOST_WIDE_INT value)
ab96de7e 3289{
ef4bddc2 3290 machine_mode mode, part_mode;
ab96de7e
AS
3291 int def;
3292 int part, part_goal;
3293
ab96de7e 3294
cd8dc1f9
WG
3295 if (str[0] == 'x')
3296 part_goal = -1;
3297 else
3298 part_goal = str[0] - '0';
ab96de7e 3299
cd8dc1f9
WG
3300 switch (str[1])
3301 {
3302 case 'Q':
3303 part_mode = QImode;
ab96de7e 3304 break;
cd8dc1f9
WG
3305 case 'H':
3306 part_mode = HImode;
ec24698e 3307 break;
cd8dc1f9
WG
3308 case 'S':
3309 part_mode = SImode;
3310 break;
3311 default:
3312 return 0;
3313 }
ec24698e 3314
cd8dc1f9
WG
3315 switch (str[2])
3316 {
3317 case 'H':
3318 mode = HImode;
3319 break;
3320 case 'S':
3321 mode = SImode;
3322 break;
3323 case 'D':
3324 mode = DImode;
3325 break;
3326 default:
3327 return 0;
3328 }
11598938 3329
cd8dc1f9
WG
3330 switch (str[3])
3331 {
3332 case '0':
3333 def = 0;
3334 break;
3335 case 'F':
3336 def = -1;
3337 break;
ab96de7e
AS
3338 default:
3339 return 0;
3340 }
3341
cd8dc1f9
WG
3342 if (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (part_mode))
3343 return 0;
3344
3345 part = s390_single_part (GEN_INT (value), mode, part_mode, def);
3346 if (part < 0)
3347 return 0;
3348 if (part_goal != -1 && part_goal != part)
3349 return 0;
3350
ab96de7e
AS
3351 return 1;
3352}
3353
cd8dc1f9
WG
3354
3355/* Returns true if the input parameter VALUE is a float zero. */
3356
3357int
3358s390_float_const_zero_p (rtx value)
3359{
3360 return (GET_MODE_CLASS (GET_MODE (value)) == MODE_FLOAT
3361 && value == CONST0_RTX (GET_MODE (value)));
3362}
3363
ccaed3ba
AS
3364/* Implement TARGET_REGISTER_MOVE_COST. */
3365
3366static int
f954fb25 3367s390_register_move_cost (machine_mode mode,
ccaed3ba
AS
3368 reg_class_t from, reg_class_t to)
3369{
f954fb25
AK
3370 /* On s390, copy between fprs and gprs is expensive. */
3371
3372 /* It becomes somewhat faster having ldgr/lgdr. */
3373 if (TARGET_Z10 && GET_MODE_SIZE (mode) == 8)
3374 {
3375 /* ldgr is single cycle. */
3376 if (reg_classes_intersect_p (from, GENERAL_REGS)
3377 && reg_classes_intersect_p (to, FP_REGS))
3378 return 1;
3379 /* lgdr needs 3 cycles. */
3380 if (reg_classes_intersect_p (to, GENERAL_REGS)
3381 && reg_classes_intersect_p (from, FP_REGS))
3382 return 3;
3383 }
3384
3385 /* Otherwise copying is done via memory. */
3386 if ((reg_classes_intersect_p (from, GENERAL_REGS)
3387 && reg_classes_intersect_p (to, FP_REGS))
3388 || (reg_classes_intersect_p (from, FP_REGS)
3389 && reg_classes_intersect_p (to, GENERAL_REGS)))
ccaed3ba
AS
3390 return 10;
3391
3392 return 1;
3393}
3394
3395/* Implement TARGET_MEMORY_MOVE_COST. */
3396
3397static int
ef4bddc2 3398s390_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
ccaed3ba
AS
3399 reg_class_t rclass ATTRIBUTE_UNUSED,
3400 bool in ATTRIBUTE_UNUSED)
3401{
fb1a3f8f 3402 return 2;
ccaed3ba 3403}
cd8dc1f9 3404
ab96de7e
AS
3405/* Compute a (partial) cost for rtx X. Return true if the complete
3406 cost has been computed, and false if subexpressions should be
2c2156a7
DV
3407 scanned. In either case, *TOTAL contains the cost result. The
3408 initial value of *TOTAL is the default value computed by
3409 rtx_cost. It may be left unmodified. OUTER_CODE contains the
3410 code of the superexpression of x. */
ab96de7e
AS
3411
3412static bool
e548c9df
AM
3413s390_rtx_costs (rtx x, machine_mode mode, int outer_code,
3414 int opno ATTRIBUTE_UNUSED,
68f932c4 3415 int *total, bool speed ATTRIBUTE_UNUSED)
3c50106f 3416{
e548c9df 3417 int code = GET_CODE (x);
3c50106f
RH
3418 switch (code)
3419 {
3420 case CONST:
3c50106f 3421 case CONST_INT:
3c50106f
RH
3422 case LABEL_REF:
3423 case SYMBOL_REF:
3424 case CONST_DOUBLE:
089b05b1 3425 case CONST_WIDE_INT:
6fa5b390 3426 case MEM:
3c50106f
RH
3427 *total = 0;
3428 return true;
3429
e5309d95
AK
3430 case SET:
3431 {
3432 /* Without this a conditional move instruction would be
3433 accounted as 3 * COSTS_N_INSNS (set, if_then_else,
3434 comparison operator). That's a bit pessimistic. */
3435
3436 if (!TARGET_Z196 || GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3437 return false;
3438
3439 rtx cond = XEXP (SET_SRC (x), 0);
3440
3441 if (!CC_REG_P (XEXP (cond, 0)) || !CONST_INT_P (XEXP (cond, 1)))
3442 return false;
3443
3444 /* It is going to be a load/store on condition. Make it
3445 slightly more expensive than a normal load. */
3446 *total = COSTS_N_INSNS (1) + 1;
3447
3448 rtx dst = SET_DEST (x);
3449 rtx then = XEXP (SET_SRC (x), 1);
3450 rtx els = XEXP (SET_SRC (x), 2);
3451
3452 /* It is a real IF-THEN-ELSE. An additional move will be
3453 needed to implement that. */
3454 if (reload_completed
3455 && !rtx_equal_p (dst, then)
3456 && !rtx_equal_p (dst, els))
3457 *total += COSTS_N_INSNS (1) / 2;
3458
3459 /* A minor penalty for constants we cannot directly handle. */
3460 if ((CONST_INT_P (then) || CONST_INT_P (els))
3461 && (!TARGET_Z13 || MEM_P (dst)
3462 || (CONST_INT_P (then) && !satisfies_constraint_K (then))
3463 || (CONST_INT_P (els) && !satisfies_constraint_K (els))))
3464 *total += COSTS_N_INSNS (1) / 2;
3465
3466 /* A store on condition can only handle register src operands. */
3467 if (MEM_P (dst) && (!REG_P (then) || !REG_P (els)))
3468 *total += COSTS_N_INSNS (1) / 2;
3469
3470 return true;
3471 }
3d44ff99
AK
3472 case IOR:
3473 /* risbg */
3474 if (GET_CODE (XEXP (x, 0)) == AND
3475 && GET_CODE (XEXP (x, 1)) == ASHIFT
3476 && REG_P (XEXP (XEXP (x, 0), 0))
3477 && REG_P (XEXP (XEXP (x, 1), 0))
3478 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
3479 && CONST_INT_P (XEXP (XEXP (x, 1), 1))
3480 && (UINTVAL (XEXP (XEXP (x, 0), 1)) ==
406fde6e 3481 (HOST_WIDE_INT_1U << UINTVAL (XEXP (XEXP (x, 1), 1))) - 1))
3d44ff99
AK
3482 {
3483 *total = COSTS_N_INSNS (2);
3484 return true;
3485 }
9ec98860
AK
3486
3487 /* ~AND on a 128 bit mode. This can be done using a vector
3488 instruction. */
3489 if (TARGET_VXE
3490 && GET_CODE (XEXP (x, 0)) == NOT
3491 && GET_CODE (XEXP (x, 1)) == NOT
3492 && REG_P (XEXP (XEXP (x, 0), 0))
3493 && REG_P (XEXP (XEXP (x, 1), 0))
3494 && GET_MODE_SIZE (GET_MODE (XEXP (XEXP (x, 0), 0))) == 16
3495 && s390_hard_regno_mode_ok (VR0_REGNUM,
3496 GET_MODE (XEXP (XEXP (x, 0), 0))))
3497 {
3498 *total = COSTS_N_INSNS (1);
3499 return true;
3500 }
1d92cba9 3501 /* fallthrough */
3c50106f
RH
3502 case ASHIFT:
3503 case ASHIFTRT:
3504 case LSHIFTRT:
017e0eb9
MD
3505 case ROTATE:
3506 case ROTATERT:
3c50106f 3507 case AND:
3c50106f 3508 case XOR:
3c50106f
RH
3509 case NEG:
3510 case NOT:
3511 *total = COSTS_N_INSNS (1);
017e0eb9 3512 return false;
3c50106f 3513
2742a1ed
MD
3514 case PLUS:
3515 case MINUS:
2742a1ed
MD
3516 *total = COSTS_N_INSNS (1);
3517 return false;
3518
f4aa3848 3519 case MULT:
e548c9df 3520 switch (mode)
017e0eb9 3521 {
4e10a5a7 3522 case E_SImode:
2742a1ed 3523 {
017e0eb9
MD
3524 rtx left = XEXP (x, 0);
3525 rtx right = XEXP (x, 1);
3526 if (GET_CODE (right) == CONST_INT
b5c67a49 3527 && CONST_OK_FOR_K (INTVAL (right)))
017e0eb9
MD
3528 *total = s390_cost->mhi;
3529 else if (GET_CODE (left) == SIGN_EXTEND)
3530 *total = s390_cost->mh;
3531 else
3532 *total = s390_cost->ms; /* msr, ms, msy */
3533 break;
3534 }
4e10a5a7 3535 case E_DImode:
017e0eb9
MD
3536 {
3537 rtx left = XEXP (x, 0);
3538 rtx right = XEXP (x, 1);
9602b6a1 3539 if (TARGET_ZARCH)
017e0eb9
MD
3540 {
3541 if (GET_CODE (right) == CONST_INT
b5c67a49 3542 && CONST_OK_FOR_K (INTVAL (right)))
017e0eb9
MD
3543 *total = s390_cost->mghi;
3544 else if (GET_CODE (left) == SIGN_EXTEND)
3545 *total = s390_cost->msgf;
3546 else
3547 *total = s390_cost->msg; /* msgr, msg */
3548 }
3549 else /* TARGET_31BIT */
3550 {
3551 if (GET_CODE (left) == SIGN_EXTEND
3552 && GET_CODE (right) == SIGN_EXTEND)
3553 /* mulsidi case: mr, m */
3554 *total = s390_cost->m;
2742a1ed
MD
3555 else if (GET_CODE (left) == ZERO_EXTEND
3556 && GET_CODE (right) == ZERO_EXTEND
3557 && TARGET_CPU_ZARCH)
3558 /* umulsidi case: ml, mlr */
3559 *total = s390_cost->ml;
017e0eb9
MD
3560 else
3561 /* Complex calculation is required. */
3562 *total = COSTS_N_INSNS (40);
3563 }
3564 break;
3565 }
4e10a5a7
RS
3566 case E_SFmode:
3567 case E_DFmode:
017e0eb9
MD
3568 *total = s390_cost->mult_df;
3569 break;
4e10a5a7 3570 case E_TFmode:
f61a2c7d
AK
3571 *total = s390_cost->mxbr;
3572 break;
017e0eb9
MD
3573 default:
3574 return false;
3575 }
3576 return false;
3c50106f 3577
d7ecb504 3578 case FMA:
e548c9df 3579 switch (mode)
d7ecb504 3580 {
4e10a5a7 3581 case E_DFmode:
d7ecb504
RH
3582 *total = s390_cost->madbr;
3583 break;
4e10a5a7 3584 case E_SFmode:
d7ecb504
RH
3585 *total = s390_cost->maebr;
3586 break;
3587 default:
3588 return false;
3589 }
3590 /* Negate in the third argument is free: FMSUB. */
3591 if (GET_CODE (XEXP (x, 2)) == NEG)
3592 {
e548c9df
AM
3593 *total += (rtx_cost (XEXP (x, 0), mode, FMA, 0, speed)
3594 + rtx_cost (XEXP (x, 1), mode, FMA, 1, speed)
3595 + rtx_cost (XEXP (XEXP (x, 2), 0), mode, FMA, 2, speed));
d7ecb504
RH
3596 return true;
3597 }
3598 return false;
3599
6fa5b390
MD
3600 case UDIV:
3601 case UMOD:
e548c9df 3602 if (mode == TImode) /* 128 bit division */
6fa5b390 3603 *total = s390_cost->dlgr;
e548c9df 3604 else if (mode == DImode)
6fa5b390
MD
3605 {
3606 rtx right = XEXP (x, 1);
3607 if (GET_CODE (right) == ZERO_EXTEND) /* 64 by 32 bit division */
3608 *total = s390_cost->dlr;
3609 else /* 64 by 64 bit division */
3610 *total = s390_cost->dlgr;
3611 }
e548c9df 3612 else if (mode == SImode) /* 32 bit division */
6fa5b390
MD
3613 *total = s390_cost->dlr;
3614 return false;
3615
3c50106f 3616 case DIV:
6fa5b390 3617 case MOD:
e548c9df 3618 if (mode == DImode)
6fa5b390
MD
3619 {
3620 rtx right = XEXP (x, 1);
3621 if (GET_CODE (right) == ZERO_EXTEND) /* 64 by 32 bit division */
9602b6a1 3622 if (TARGET_ZARCH)
6fa5b390
MD
3623 *total = s390_cost->dsgfr;
3624 else
3625 *total = s390_cost->dr;
3626 else /* 64 by 64 bit division */
3627 *total = s390_cost->dsgr;
3628 }
e548c9df 3629 else if (mode == SImode) /* 32 bit division */
6fa5b390 3630 *total = s390_cost->dlr;
e548c9df 3631 else if (mode == SFmode)
98fd0d70 3632 {
142cd70f 3633 *total = s390_cost->debr;
98fd0d70 3634 }
e548c9df 3635 else if (mode == DFmode)
98fd0d70 3636 {
142cd70f 3637 *total = s390_cost->ddbr;
98fd0d70 3638 }
e548c9df 3639 else if (mode == TFmode)
f61a2c7d 3640 {
142cd70f 3641 *total = s390_cost->dxbr;
f61a2c7d 3642 }
017e0eb9
MD
3643 return false;
3644
2742a1ed 3645 case SQRT:
e548c9df 3646 if (mode == SFmode)
2742a1ed 3647 *total = s390_cost->sqebr;
e548c9df 3648 else if (mode == DFmode)
2742a1ed 3649 *total = s390_cost->sqdbr;
f61a2c7d
AK
3650 else /* TFmode */
3651 *total = s390_cost->sqxbr;
2742a1ed
MD
3652 return false;
3653
017e0eb9 3654 case SIGN_EXTEND:
2742a1ed 3655 case ZERO_EXTEND:
6fa5b390
MD
3656 if (outer_code == MULT || outer_code == DIV || outer_code == MOD
3657 || outer_code == PLUS || outer_code == MINUS
3658 || outer_code == COMPARE)
017e0eb9
MD
3659 *total = 0;
3660 return false;
3c50106f 3661
6fa5b390
MD
3662 case COMPARE:
3663 *total = COSTS_N_INSNS (1);
3664 if (GET_CODE (XEXP (x, 0)) == AND
3665 && GET_CODE (XEXP (x, 1)) == CONST_INT
3666 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
3667 {
3668 rtx op0 = XEXP (XEXP (x, 0), 0);
3669 rtx op1 = XEXP (XEXP (x, 0), 1);
3670 rtx op2 = XEXP (x, 1);
3671
3672 if (memory_operand (op0, GET_MODE (op0))
3673 && s390_tm_ccmode (op1, op2, 0) != VOIDmode)
3674 return true;
3675 if (register_operand (op0, GET_MODE (op0))
3676 && s390_tm_ccmode (op1, op2, 1) != VOIDmode)
3677 return true;
3678 }
3679 return false;
3680
3c50106f
RH
3681 default:
3682 return false;
3683 }
3684}
3685
dea09b1b
UW
3686/* Return the cost of an address rtx ADDR. */
3687
dcefdf67 3688static int
ef4bddc2 3689s390_address_cost (rtx addr, machine_mode mode ATTRIBUTE_UNUSED,
b413068c
OE
3690 addr_space_t as ATTRIBUTE_UNUSED,
3691 bool speed ATTRIBUTE_UNUSED)
dea09b1b
UW
3692{
3693 struct s390_address ad;
3694 if (!s390_decompose_address (addr, &ad))
3695 return 1000;
3696
3697 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
3698}
3699
7f5fc633
AK
3700/* Implement targetm.vectorize.builtin_vectorization_cost. */
3701static int
3702s390_builtin_vectorization_cost (enum vect_cost_for_stmt type_of_cost,
3703 tree vectype,
3704 int misalign ATTRIBUTE_UNUSED)
3705{
3706 switch (type_of_cost)
3707 {
3708 case scalar_stmt:
3709 case scalar_load:
3710 case scalar_store:
3711 case vector_stmt:
3712 case vector_load:
3713 case vector_store:
3714 case vec_to_scalar:
3715 case scalar_to_vec:
3716 case cond_branch_not_taken:
3717 case vec_perm:
3718 case vec_promote_demote:
3719 case unaligned_load:
3720 case unaligned_store:
3721 return 1;
3722
3723 case cond_branch_taken:
3724 return 3;
3725
3726 case vec_construct:
3727 return TYPE_VECTOR_SUBPARTS (vectype) - 1;
3728
3729 default:
3730 gcc_unreachable ();
3731 }
3732}
3733
fd3cd001
UW
3734/* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
3735 otherwise return 0. */
3736
3737int
5d81b82b 3738tls_symbolic_operand (rtx op)
fd3cd001 3739{
fd3cd001
UW
3740 if (GET_CODE (op) != SYMBOL_REF)
3741 return 0;
114278e7 3742 return SYMBOL_REF_TLS_MODEL (op);
fd3cd001 3743}
9db1d521 3744\f
c5aa1d12
UW
3745/* Split DImode access register reference REG (on 64-bit) into its constituent
3746 low and high parts, and store them into LO and HI. Note that gen_lowpart/
3747 gen_highpart cannot be used as they assume all registers are word-sized,
3748 while our access registers have only half that size. */
3749
3750void
3751s390_split_access_reg (rtx reg, rtx *lo, rtx *hi)
3752{
3753 gcc_assert (TARGET_64BIT);
3754 gcc_assert (ACCESS_REG_P (reg));
3755 gcc_assert (GET_MODE (reg) == DImode);
3756 gcc_assert (!(REGNO (reg) & 1));
3757
3758 *lo = gen_rtx_REG (SImode, REGNO (reg) + 1);
3759 *hi = gen_rtx_REG (SImode, REGNO (reg));
3760}
9db1d521 3761
994fe660 3762/* Return true if OP contains a symbol reference */
9db1d521 3763
3ed99cc9 3764bool
9c808aad 3765symbolic_reference_mentioned_p (rtx op)
9db1d521 3766{
5d81b82b
AS
3767 const char *fmt;
3768 int i;
9db1d521
HP
3769
3770 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
3771 return 1;
3772
3773 fmt = GET_RTX_FORMAT (GET_CODE (op));
3774 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
3775 {
3776 if (fmt[i] == 'E')
3777 {
5d81b82b 3778 int j;
9db1d521
HP
3779
3780 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
3781 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
3782 return 1;
3783 }
3784
3785 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
3786 return 1;
3787 }
3788
3789 return 0;
3790}
3791
fd3cd001
UW
3792/* Return true if OP contains a reference to a thread-local symbol. */
3793
3ed99cc9 3794bool
9c808aad 3795tls_symbolic_reference_mentioned_p (rtx op)
fd3cd001 3796{
5d81b82b
AS
3797 const char *fmt;
3798 int i;
fd3cd001
UW
3799
3800 if (GET_CODE (op) == SYMBOL_REF)
3801 return tls_symbolic_operand (op);
3802
3803 fmt = GET_RTX_FORMAT (GET_CODE (op));
3804 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
3805 {
3806 if (fmt[i] == 'E')
3807 {
5d81b82b 3808 int j;
fd3cd001
UW
3809
3810 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
3811 if (tls_symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
3ed99cc9 3812 return true;
fd3cd001
UW
3813 }
3814
3815 else if (fmt[i] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op, i)))
3ed99cc9 3816 return true;
fd3cd001
UW
3817 }
3818
3ed99cc9 3819 return false;
fd3cd001
UW
3820}
3821
9db1d521 3822
c7453384
EC
3823/* Return true if OP is a legitimate general operand when
3824 generating PIC code. It is given that flag_pic is on
089b05b1 3825 and that OP satisfies CONSTANT_P. */
994fe660 3826
9db1d521 3827int
5d81b82b 3828legitimate_pic_operand_p (rtx op)
9db1d521 3829{
4023fb28 3830 /* Accept all non-symbolic constants. */
9db1d521
HP
3831 if (!SYMBOLIC_CONST (op))
3832 return 1;
3833
c7453384 3834 /* Reject everything else; must be handled
fd3cd001 3835 via emit_symbolic_move. */
9db1d521
HP
3836 return 0;
3837}
3838
994fe660 3839/* Returns true if the constant value OP is a legitimate general operand.
089b05b1 3840 It is given that OP satisfies CONSTANT_P. */
994fe660 3841
1a627b35 3842static bool
ef4bddc2 3843s390_legitimate_constant_p (machine_mode mode, rtx op)
9db1d521 3844{
0600d22c 3845 if (TARGET_VX && VECTOR_MODE_P (mode) && GET_CODE (op) == CONST_VECTOR)
085261c8
AK
3846 {
3847 if (GET_MODE_SIZE (mode) != 16)
3848 return 0;
3849
b0057efd
AK
3850 if (!satisfies_constraint_j00 (op)
3851 && !satisfies_constraint_jm1 (op)
3852 && !satisfies_constraint_jKK (op)
3853 && !satisfies_constraint_jxx (op)
3854 && !satisfies_constraint_jyy (op))
085261c8
AK
3855 return 0;
3856 }
3857
4023fb28 3858 /* Accept all non-symbolic constants. */
9db1d521
HP
3859 if (!SYMBOLIC_CONST (op))
3860 return 1;
3861
fd3cd001 3862 /* Accept immediate LARL operands. */
1a627b35 3863 if (TARGET_CPU_ZARCH && larl_operand (op, mode))
fd3cd001
UW
3864 return 1;
3865
3866 /* Thread-local symbols are never legal constants. This is
3867 so that emit_call knows that computing such addresses
3868 might require a function call. */
3869 if (TLS_SYMBOLIC_CONST (op))
3870 return 0;
3871
9db1d521
HP
3872 /* In the PIC case, symbolic constants must *not* be
3873 forced into the literal pool. We accept them here,
fd3cd001 3874 so that they will be handled by emit_symbolic_move. */
9db1d521
HP
3875 if (flag_pic)
3876 return 1;
3877
9db1d521
HP
3878 /* All remaining non-PIC symbolic constants are
3879 forced into the literal pool. */
3880 return 0;
3881}
3882
fd3cd001
UW
3883/* Determine if it's legal to put X into the constant pool. This
3884 is not possible if X contains the address of a symbol that is
3885 not constant (TLS) or not known at final link time (PIC). */
3886
3887static bool
ef4bddc2 3888s390_cannot_force_const_mem (machine_mode mode, rtx x)
fd3cd001
UW
3889{
3890 switch (GET_CODE (x))
3891 {
3892 case CONST_INT:
3893 case CONST_DOUBLE:
089b05b1 3894 case CONST_WIDE_INT:
085261c8 3895 case CONST_VECTOR:
fd3cd001
UW
3896 /* Accept all non-symbolic constants. */
3897 return false;
3898
3899 case LABEL_REF:
3900 /* Labels are OK iff we are non-PIC. */
3901 return flag_pic != 0;
3902
3903 case SYMBOL_REF:
3904 /* 'Naked' TLS symbol references are never OK,
3905 non-TLS symbols are OK iff we are non-PIC. */
3906 if (tls_symbolic_operand (x))
3907 return true;
3908 else
3909 return flag_pic != 0;
3910
3911 case CONST:
fbbf66e7 3912 return s390_cannot_force_const_mem (mode, XEXP (x, 0));
fd3cd001
UW
3913 case PLUS:
3914 case MINUS:
fbbf66e7
RS
3915 return s390_cannot_force_const_mem (mode, XEXP (x, 0))
3916 || s390_cannot_force_const_mem (mode, XEXP (x, 1));
fd3cd001
UW
3917
3918 case UNSPEC:
3919 switch (XINT (x, 1))
3920 {
3921 /* Only lt-relative or GOT-relative UNSPECs are OK. */
fd7643fb
UW
3922 case UNSPEC_LTREL_OFFSET:
3923 case UNSPEC_GOT:
3924 case UNSPEC_GOTOFF:
3925 case UNSPEC_PLTOFF:
fd3cd001
UW
3926 case UNSPEC_TLSGD:
3927 case UNSPEC_TLSLDM:
3928 case UNSPEC_NTPOFF:
3929 case UNSPEC_DTPOFF:
3930 case UNSPEC_GOTNTPOFF:
3931 case UNSPEC_INDNTPOFF:
3932 return false;
3933
9bb86f41
UW
3934 /* If the literal pool shares the code section, be put
3935 execute template placeholders into the pool as well. */
3936 case UNSPEC_INSN:
3937 return TARGET_CPU_ZARCH;
3938
fd3cd001
UW
3939 default:
3940 return true;
3941 }
3942 break;
3943
3944 default:
8d933e31 3945 gcc_unreachable ();
fd3cd001
UW
3946 }
3947}
3948
4023fb28 3949/* Returns true if the constant value OP is a legitimate general
c7453384 3950 operand during and after reload. The difference to
4023fb28
UW
3951 legitimate_constant_p is that this function will not accept
3952 a constant that would need to be forced to the literal pool
65b1d8ea
AK
3953 before it can be used as operand.
3954 This function accepts all constants which can be loaded directly
3955 into a GPR. */
4023fb28 3956
3ed99cc9 3957bool
5d81b82b 3958legitimate_reload_constant_p (rtx op)
4023fb28 3959{
d3632d41 3960 /* Accept la(y) operands. */
c7453384 3961 if (GET_CODE (op) == CONST_INT
d3632d41 3962 && DISP_IN_RANGE (INTVAL (op)))
3ed99cc9 3963 return true;
d3632d41 3964
ec24698e 3965 /* Accept l(g)hi/l(g)fi operands. */
4023fb28 3966 if (GET_CODE (op) == CONST_INT
ec24698e 3967 && (CONST_OK_FOR_K (INTVAL (op)) || CONST_OK_FOR_Os (INTVAL (op))))
3ed99cc9 3968 return true;
4023fb28
UW
3969
3970 /* Accept lliXX operands. */
9e8327e3 3971 if (TARGET_ZARCH
11598938
UW
3972 && GET_CODE (op) == CONST_INT
3973 && trunc_int_for_mode (INTVAL (op), word_mode) == INTVAL (op)
3974 && s390_single_part (op, word_mode, HImode, 0) >= 0)
3ed99cc9 3975 return true;
4023fb28 3976
ec24698e
UW
3977 if (TARGET_EXTIMM
3978 && GET_CODE (op) == CONST_INT
3979 && trunc_int_for_mode (INTVAL (op), word_mode) == INTVAL (op)
3980 && s390_single_part (op, word_mode, SImode, 0) >= 0)
3981 return true;
3982
4023fb28 3983 /* Accept larl operands. */
9e8327e3 3984 if (TARGET_CPU_ZARCH
4023fb28 3985 && larl_operand (op, VOIDmode))
3ed99cc9 3986 return true;
4023fb28 3987
45e5214c
UW
3988 /* Accept floating-point zero operands that fit into a single GPR. */
3989 if (GET_CODE (op) == CONST_DOUBLE
3990 && s390_float_const_zero_p (op)
3991 && GET_MODE_SIZE (GET_MODE (op)) <= UNITS_PER_WORD)
3992 return true;
3993
11598938 3994 /* Accept double-word operands that can be split. */
089b05b1
DV
3995 if (GET_CODE (op) == CONST_WIDE_INT
3996 || (GET_CODE (op) == CONST_INT
3997 && trunc_int_for_mode (INTVAL (op), word_mode) != INTVAL (op)))
11598938 3998 {
ef4bddc2 3999 machine_mode dword_mode = word_mode == SImode ? DImode : TImode;
11598938
UW
4000 rtx hi = operand_subword (op, 0, 0, dword_mode);
4001 rtx lo = operand_subword (op, 1, 0, dword_mode);
4002 return legitimate_reload_constant_p (hi)
4003 && legitimate_reload_constant_p (lo);
4004 }
4005
4023fb28 4006 /* Everything else cannot be handled without reload. */
3ed99cc9 4007 return false;
4023fb28
UW
4008}
4009
65b1d8ea
AK
4010/* Returns true if the constant value OP is a legitimate fp operand
4011 during and after reload.
4012 This function accepts all constants which can be loaded directly
4013 into an FPR. */
4014
4015static bool
4016legitimate_reload_fp_constant_p (rtx op)
4017{
4018 /* Accept floating-point zero operands if the load zero instruction
22ac2c2f
AK
4019 can be used. Prior to z196 the load fp zero instruction caused a
4020 performance penalty if the result is used as BFP number. */
65b1d8ea
AK
4021 if (TARGET_Z196
4022 && GET_CODE (op) == CONST_DOUBLE
4023 && s390_float_const_zero_p (op))
4024 return true;
4025
4026 return false;
4027}
4028
085261c8
AK
4029/* Returns true if the constant value OP is a legitimate vector operand
4030 during and after reload.
4031 This function accepts all constants which can be loaded directly
4032 into an VR. */
4033
4034static bool
4035legitimate_reload_vector_constant_p (rtx op)
4036{
085261c8 4037 if (TARGET_VX && GET_MODE_SIZE (GET_MODE (op)) == 16
b0057efd
AK
4038 && (satisfies_constraint_j00 (op)
4039 || satisfies_constraint_jm1 (op)
4040 || satisfies_constraint_jKK (op)
4041 || satisfies_constraint_jxx (op)
4042 || satisfies_constraint_jyy (op)))
085261c8
AK
4043 return true;
4044
4045 return false;
4046}
4047
0a2aaacc 4048/* Given an rtx OP being reloaded into a reg required to be in class RCLASS,
4023fb28
UW
4049 return the class of reg to actually use. */
4050
5df97412
AS
4051static reg_class_t
4052s390_preferred_reload_class (rtx op, reg_class_t rclass)
4023fb28 4053{
4023fb28
UW
4054 switch (GET_CODE (op))
4055 {
45e5214c
UW
4056 /* Constants we cannot reload into general registers
4057 must be forced into the literal pool. */
085261c8 4058 case CONST_VECTOR:
4023fb28
UW
4059 case CONST_DOUBLE:
4060 case CONST_INT:
089b05b1 4061 case CONST_WIDE_INT:
45e5214c
UW
4062 if (reg_class_subset_p (GENERAL_REGS, rclass)
4063 && legitimate_reload_constant_p (op))
4064 return GENERAL_REGS;
4065 else if (reg_class_subset_p (ADDR_REGS, rclass)
4066 && legitimate_reload_constant_p (op))
4067 return ADDR_REGS;
65b1d8ea
AK
4068 else if (reg_class_subset_p (FP_REGS, rclass)
4069 && legitimate_reload_fp_constant_p (op))
4070 return FP_REGS;
085261c8
AK
4071 else if (reg_class_subset_p (VEC_REGS, rclass)
4072 && legitimate_reload_vector_constant_p (op))
4073 return VEC_REGS;
4074
65b1d8ea 4075 return NO_REGS;
4023fb28
UW
4076
4077 /* If a symbolic constant or a PLUS is reloaded,
14b3e8ef
UW
4078 it is most likely being used as an address, so
4079 prefer ADDR_REGS. If 'class' is not a superset
4080 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
1abcd5eb 4081 case CONST:
cb4b6d17
AK
4082 /* Symrefs cannot be pushed into the literal pool with -fPIC
4083 so we *MUST NOT* return NO_REGS for these cases
4084 (s390_cannot_force_const_mem will return true).
4085
4086 On the other hand we MUST return NO_REGS for symrefs with
4087 invalid addend which might have been pushed to the literal
4088 pool (no -fPIC). Usually we would expect them to be
4089 handled via secondary reload but this does not happen if
4090 they are used as literal pool slot replacement in reload
4091 inheritance (see emit_input_reload_insns). */
1abcd5eb
AK
4092 if (TARGET_CPU_ZARCH
4093 && GET_CODE (XEXP (op, 0)) == PLUS
4094 && GET_CODE (XEXP (XEXP(op, 0), 0)) == SYMBOL_REF
4095 && GET_CODE (XEXP (XEXP(op, 0), 1)) == CONST_INT)
4096 {
cb4b6d17 4097 if (flag_pic && reg_class_subset_p (ADDR_REGS, rclass))
1abcd5eb
AK
4098 return ADDR_REGS;
4099 else
4100 return NO_REGS;
4101 }
4102 /* fallthrough */
4023fb28
UW
4103 case LABEL_REF:
4104 case SYMBOL_REF:
212aa74f
AK
4105 if (!legitimate_reload_constant_p (op))
4106 return NO_REGS;
4107 /* fallthrough */
4108 case PLUS:
4109 /* load address will be used. */
0a2aaacc 4110 if (reg_class_subset_p (ADDR_REGS, rclass))
212aa74f 4111 return ADDR_REGS;
14b3e8ef
UW
4112 else
4113 return NO_REGS;
4023fb28
UW
4114
4115 default:
4116 break;
4117 }
4118
0a2aaacc 4119 return rclass;
4023fb28 4120}
9db1d521 4121
963fc8d0
AK
4122/* Return true if ADDR is SYMBOL_REF + addend with addend being a
4123 multiple of ALIGNMENT and the SYMBOL_REF being naturally
4124 aligned. */
4125
4126bool
4127s390_check_symref_alignment (rtx addr, HOST_WIDE_INT alignment)
4128{
4129 HOST_WIDE_INT addend;
4130 rtx symref;
4131
e63d44c2
RD
4132 /* The "required alignment" might be 0 (e.g. for certain structs
4133 accessed via BLKmode). Early abort in this case, as well as when
4134 an alignment > 8 is required. */
4135 if (alignment < 2 || alignment > 8)
4136 return false;
4137
0ff4390d
AK
4138 if (!s390_loadrelative_operand_p (addr, &symref, &addend))
4139 return false;
98412b77 4140
0ff4390d 4141 if (addend & (alignment - 1))
963fc8d0
AK
4142 return false;
4143
e63d44c2
RD
4144 if (GET_CODE (symref) == SYMBOL_REF)
4145 {
4146 /* We have load-relative instructions for 2-byte, 4-byte, and
4147 8-byte alignment so allow only these. */
4148 switch (alignment)
4149 {
4150 case 8: return !SYMBOL_FLAG_NOTALIGN8_P (symref);
4151 case 4: return !SYMBOL_FLAG_NOTALIGN4_P (symref);
4152 case 2: return !SYMBOL_FLAG_NOTALIGN2_P (symref);
4153 default: return false;
4154 }
4155 }
0ff4390d
AK
4156
4157 if (GET_CODE (symref) == UNSPEC
4158 && alignment <= UNITS_PER_LONG)
4159 return true;
4160
4161 return false;
963fc8d0
AK
4162}
4163
4164/* ADDR is moved into REG using larl. If ADDR isn't a valid larl
4165 operand SCRATCH is used to reload the even part of the address and
4166 adding one. */
4167
4168void
4169s390_reload_larl_operand (rtx reg, rtx addr, rtx scratch)
4170{
4171 HOST_WIDE_INT addend;
4172 rtx symref;
4173
0ff4390d 4174 if (!s390_loadrelative_operand_p (addr, &symref, &addend))
963fc8d0
AK
4175 gcc_unreachable ();
4176
4177 if (!(addend & 1))
4178 /* Easy case. The addend is even so larl will do fine. */
4179 emit_move_insn (reg, addr);
4180 else
4181 {
4182 /* We can leave the scratch register untouched if the target
4183 register is a valid base register. */
4184 if (REGNO (reg) < FIRST_PSEUDO_REGISTER
4185 && REGNO_REG_CLASS (REGNO (reg)) == ADDR_REGS)
4186 scratch = reg;
4187
4188 gcc_assert (REGNO (scratch) < FIRST_PSEUDO_REGISTER);
4189 gcc_assert (REGNO_REG_CLASS (REGNO (scratch)) == ADDR_REGS);
4190
4191 if (addend != 1)
4192 emit_move_insn (scratch,
4193 gen_rtx_CONST (Pmode,
4194 gen_rtx_PLUS (Pmode, symref,
4195 GEN_INT (addend - 1))));
4196 else
4197 emit_move_insn (scratch, symref);
4198
4199 /* Increment the address using la in order to avoid clobbering cc. */
1abcd5eb 4200 s390_load_address (reg, gen_rtx_PLUS (Pmode, scratch, const1_rtx));
963fc8d0
AK
4201 }
4202}
4203
4204/* Generate what is necessary to move between REG and MEM using
4205 SCRATCH. The direction is given by TOMEM. */
4206
4207void
4208s390_reload_symref_address (rtx reg, rtx mem, rtx scratch, bool tomem)
4209{
4210 /* Reload might have pulled a constant out of the literal pool.
4211 Force it back in. */
4212 if (CONST_INT_P (mem) || GET_CODE (mem) == CONST_DOUBLE
089b05b1 4213 || GET_CODE (mem) == CONST_WIDE_INT
085261c8 4214 || GET_CODE (mem) == CONST_VECTOR
963fc8d0
AK
4215 || GET_CODE (mem) == CONST)
4216 mem = force_const_mem (GET_MODE (reg), mem);
4217
4218 gcc_assert (MEM_P (mem));
4219
4220 /* For a load from memory we can leave the scratch register
4221 untouched if the target register is a valid base register. */
4222 if (!tomem
4223 && REGNO (reg) < FIRST_PSEUDO_REGISTER
4224 && REGNO_REG_CLASS (REGNO (reg)) == ADDR_REGS
4225 && GET_MODE (reg) == GET_MODE (scratch))
4226 scratch = reg;
4227
4228 /* Load address into scratch register. Since we can't have a
4229 secondary reload for a secondary reload we have to cover the case
4230 where larl would need a secondary reload here as well. */
4231 s390_reload_larl_operand (scratch, XEXP (mem, 0), scratch);
4232
4233 /* Now we can use a standard load/store to do the move. */
4234 if (tomem)
4235 emit_move_insn (replace_equiv_address (mem, scratch), reg);
4236 else
4237 emit_move_insn (reg, replace_equiv_address (mem, scratch));
4238}
4239
833cd70a 4240/* Inform reload about cases where moving X with a mode MODE to a register in
0a2aaacc 4241 RCLASS requires an extra scratch or immediate register. Return the class
833cd70a 4242 needed for the immediate register. */
f61a2c7d 4243
a87cf97e
JR
4244static reg_class_t
4245s390_secondary_reload (bool in_p, rtx x, reg_class_t rclass_i,
ef4bddc2 4246 machine_mode mode, secondary_reload_info *sri)
833cd70a 4247{
a87cf97e
JR
4248 enum reg_class rclass = (enum reg_class) rclass_i;
4249
833cd70a 4250 /* Intermediate register needed. */
0a2aaacc 4251 if (reg_classes_intersect_p (CC_REGS, rclass))
9dc62c00
AK
4252 return GENERAL_REGS;
4253
085261c8
AK
4254 if (TARGET_VX)
4255 {
4256 /* The vst/vl vector move instructions allow only for short
4257 displacements. */
4258 if (MEM_P (x)
4259 && GET_CODE (XEXP (x, 0)) == PLUS
4260 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4261 && !SHORT_DISP_IN_RANGE(INTVAL (XEXP (XEXP (x, 0), 1)))
4262 && reg_class_subset_p (rclass, VEC_REGS)
4263 && (!reg_class_subset_p (rclass, FP_REGS)
4264 || (GET_MODE_SIZE (mode) > 8
4265 && s390_class_max_nregs (FP_REGS, mode) == 1)))
4266 {
4267 if (in_p)
4268 sri->icode = (TARGET_64BIT ?
4269 CODE_FOR_reloaddi_la_in :
4270 CODE_FOR_reloadsi_la_in);
4271 else
4272 sri->icode = (TARGET_64BIT ?
4273 CODE_FOR_reloaddi_la_out :
4274 CODE_FOR_reloadsi_la_out);
4275 }
4276 }
4277
963fc8d0
AK
4278 if (TARGET_Z10)
4279 {
212aa74f
AK
4280 HOST_WIDE_INT offset;
4281 rtx symref;
4282
963fc8d0
AK
4283 /* On z10 several optimizer steps may generate larl operands with
4284 an odd addend. */
4285 if (in_p
0ff4390d 4286 && s390_loadrelative_operand_p (x, &symref, &offset)
963fc8d0 4287 && mode == Pmode
e63d44c2 4288 && !SYMBOL_FLAG_NOTALIGN2_P (symref)
212aa74f 4289 && (offset & 1) == 1)
963fc8d0
AK
4290 sri->icode = ((mode == DImode) ? CODE_FOR_reloaddi_larl_odd_addend_z10
4291 : CODE_FOR_reloadsi_larl_odd_addend_z10);
4292
05702110
AK
4293 /* Handle all the (mem (symref)) accesses we cannot use the z10
4294 instructions for. */
963fc8d0 4295 if (MEM_P (x)
0ff4390d 4296 && s390_loadrelative_operand_p (XEXP (x, 0), NULL, NULL)
05702110 4297 && (mode == QImode
996fcca1 4298 || !reg_class_subset_p (rclass, GENERAL_REGS)
05702110
AK
4299 || GET_MODE_SIZE (mode) > UNITS_PER_WORD
4300 || !s390_check_symref_alignment (XEXP (x, 0),
4301 GET_MODE_SIZE (mode))))
963fc8d0
AK
4302 {
4303#define __SECONDARY_RELOAD_CASE(M,m) \
4e10a5a7 4304 case E_##M##mode: \
963fc8d0
AK
4305 if (TARGET_64BIT) \
4306 sri->icode = in_p ? CODE_FOR_reload##m##di_toreg_z10 : \
4307 CODE_FOR_reload##m##di_tomem_z10; \
4308 else \
4309 sri->icode = in_p ? CODE_FOR_reload##m##si_toreg_z10 : \
4310 CODE_FOR_reload##m##si_tomem_z10; \
4311 break;
4312
4313 switch (GET_MODE (x))
4314 {
4315 __SECONDARY_RELOAD_CASE (QI, qi);
4316 __SECONDARY_RELOAD_CASE (HI, hi);
4317 __SECONDARY_RELOAD_CASE (SI, si);
4318 __SECONDARY_RELOAD_CASE (DI, di);
4319 __SECONDARY_RELOAD_CASE (TI, ti);
4320 __SECONDARY_RELOAD_CASE (SF, sf);
4321 __SECONDARY_RELOAD_CASE (DF, df);
4322 __SECONDARY_RELOAD_CASE (TF, tf);
4323 __SECONDARY_RELOAD_CASE (SD, sd);
4324 __SECONDARY_RELOAD_CASE (DD, dd);
4325 __SECONDARY_RELOAD_CASE (TD, td);
085261c8
AK
4326 __SECONDARY_RELOAD_CASE (V1QI, v1qi);
4327 __SECONDARY_RELOAD_CASE (V2QI, v2qi);
4328 __SECONDARY_RELOAD_CASE (V4QI, v4qi);
4329 __SECONDARY_RELOAD_CASE (V8QI, v8qi);
4330 __SECONDARY_RELOAD_CASE (V16QI, v16qi);
4331 __SECONDARY_RELOAD_CASE (V1HI, v1hi);
4332 __SECONDARY_RELOAD_CASE (V2HI, v2hi);
4333 __SECONDARY_RELOAD_CASE (V4HI, v4hi);
4334 __SECONDARY_RELOAD_CASE (V8HI, v8hi);
4335 __SECONDARY_RELOAD_CASE (V1SI, v1si);
4336 __SECONDARY_RELOAD_CASE (V2SI, v2si);
4337 __SECONDARY_RELOAD_CASE (V4SI, v4si);
4338 __SECONDARY_RELOAD_CASE (V1DI, v1di);
4339 __SECONDARY_RELOAD_CASE (V2DI, v2di);
4340 __SECONDARY_RELOAD_CASE (V1TI, v1ti);
4341 __SECONDARY_RELOAD_CASE (V1SF, v1sf);
4342 __SECONDARY_RELOAD_CASE (V2SF, v2sf);
4343 __SECONDARY_RELOAD_CASE (V4SF, v4sf);
4344 __SECONDARY_RELOAD_CASE (V1DF, v1df);
4345 __SECONDARY_RELOAD_CASE (V2DF, v2df);
4346 __SECONDARY_RELOAD_CASE (V1TF, v1tf);
963fc8d0
AK
4347 default:
4348 gcc_unreachable ();
4349 }
4350#undef __SECONDARY_RELOAD_CASE
4351 }
4352 }
4353
833cd70a
AK
4354 /* We need a scratch register when loading a PLUS expression which
4355 is not a legitimate operand of the LOAD ADDRESS instruction. */
3597e113
VM
4356 /* LRA can deal with transformation of plus op very well -- so we
4357 don't need to prompt LRA in this case. */
4358 if (! lra_in_progress && in_p && s390_plus_operand (x, mode))
833cd70a
AK
4359 sri->icode = (TARGET_64BIT ?
4360 CODE_FOR_reloaddi_plus : CODE_FOR_reloadsi_plus);
4361
7fa7289d 4362 /* Performing a multiword move from or to memory we have to make sure the
833cd70a
AK
4363 second chunk in memory is addressable without causing a displacement
4364 overflow. If that would be the case we calculate the address in
4365 a scratch register. */
4366 if (MEM_P (x)
4367 && GET_CODE (XEXP (x, 0)) == PLUS
4368 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4369 && !DISP_IN_RANGE (INTVAL (XEXP (XEXP (x, 0), 1))
0ca89db7 4370 + GET_MODE_SIZE (mode) - 1))
833cd70a 4371 {
7fa7289d 4372 /* For GENERAL_REGS a displacement overflow is no problem if occurring
833cd70a
AK
4373 in a s_operand address since we may fallback to lm/stm. So we only
4374 have to care about overflows in the b+i+d case. */
0a2aaacc 4375 if ((reg_classes_intersect_p (GENERAL_REGS, rclass)
833cd70a
AK
4376 && s390_class_max_nregs (GENERAL_REGS, mode) > 1
4377 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS)
4378 /* For FP_REGS no lm/stm is available so this check is triggered
4379 for displacement overflows in b+i+d and b+d like addresses. */
0a2aaacc 4380 || (reg_classes_intersect_p (FP_REGS, rclass)
833cd70a
AK
4381 && s390_class_max_nregs (FP_REGS, mode) > 1))
4382 {
4383 if (in_p)
4384 sri->icode = (TARGET_64BIT ?
085261c8
AK
4385 CODE_FOR_reloaddi_la_in :
4386 CODE_FOR_reloadsi_la_in);
833cd70a
AK
4387 else
4388 sri->icode = (TARGET_64BIT ?
085261c8
AK
4389 CODE_FOR_reloaddi_la_out :
4390 CODE_FOR_reloadsi_la_out);
833cd70a
AK
4391 }
4392 }
9dc62c00 4393
1f9e1fc6
AK
4394 /* A scratch address register is needed when a symbolic constant is
4395 copied to r0 compiling with -fPIC. In other cases the target
4396 register might be used as temporary (see legitimize_pic_address). */
0a2aaacc 4397 if (in_p && SYMBOLIC_CONST (x) && flag_pic == 2 && rclass != ADDR_REGS)
1f9e1fc6
AK
4398 sri->icode = (TARGET_64BIT ?
4399 CODE_FOR_reloaddi_PIC_addr :
4400 CODE_FOR_reloadsi_PIC_addr);
4401
833cd70a 4402 /* Either scratch or no register needed. */
dc65c307
UW
4403 return NO_REGS;
4404}
4405
f3e9edff
UW
4406/* Generate code to load SRC, which is PLUS that is not a
4407 legitimate operand for the LA instruction, into TARGET.
4408 SCRATCH may be used as scratch register. */
4409
4410void
5d81b82b
AS
4411s390_expand_plus_operand (rtx target, rtx src,
4412 rtx scratch)
f3e9edff 4413{
7974fe63 4414 rtx sum1, sum2;
b808c04c 4415 struct s390_address ad;
6a4e49c1 4416
6a4e49c1 4417 /* src must be a PLUS; get its two operands. */
8d933e31
AS
4418 gcc_assert (GET_CODE (src) == PLUS);
4419 gcc_assert (GET_MODE (src) == Pmode);
f3e9edff 4420
7c82a1ed
UW
4421 /* Check if any of the two operands is already scheduled
4422 for replacement by reload. This can happen e.g. when
4423 float registers occur in an address. */
4424 sum1 = find_replacement (&XEXP (src, 0));
4425 sum2 = find_replacement (&XEXP (src, 1));
ccfc6cc8 4426 src = gen_rtx_PLUS (Pmode, sum1, sum2);
ccfc6cc8 4427
7974fe63
UW
4428 /* If the address is already strictly valid, there's nothing to do. */
4429 if (!s390_decompose_address (src, &ad)
93fa8428
AK
4430 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
4431 || (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx))))
f3e9edff 4432 {
7974fe63
UW
4433 /* Otherwise, one of the operands cannot be an address register;
4434 we reload its value into the scratch register. */
4435 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
4436 {
4437 emit_move_insn (scratch, sum1);
4438 sum1 = scratch;
4439 }
4440 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
4441 {
4442 emit_move_insn (scratch, sum2);
4443 sum2 = scratch;
4444 }
f3e9edff 4445
7974fe63
UW
4446 /* According to the way these invalid addresses are generated
4447 in reload.c, it should never happen (at least on s390) that
4448 *neither* of the PLUS components, after find_replacements
4449 was applied, is an address register. */
4450 if (sum1 == scratch && sum2 == scratch)
4451 {
4452 debug_rtx (src);
8d933e31 4453 gcc_unreachable ();
7974fe63 4454 }
f3e9edff 4455
7974fe63 4456 src = gen_rtx_PLUS (Pmode, sum1, sum2);
f3e9edff
UW
4457 }
4458
4459 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
4460 is only ever performed on addresses, so we can mark the
4461 sum as legitimate for LA in any case. */
a41c6c53 4462 s390_load_address (target, src);
f3e9edff
UW
4463}
4464
4465
3ed99cc9 4466/* Return true if ADDR is a valid memory address.
ab96de7e 4467 STRICT specifies whether strict register checking applies. */
9db1d521 4468
c6c3dba9 4469static bool
ef4bddc2 4470s390_legitimate_address_p (machine_mode mode, rtx addr, bool strict)
9db1d521 4471{
ab96de7e 4472 struct s390_address ad;
963fc8d0
AK
4473
4474 if (TARGET_Z10
4475 && larl_operand (addr, VOIDmode)
4476 && (mode == VOIDmode
4477 || s390_check_symref_alignment (addr, GET_MODE_SIZE (mode))))
4478 return true;
4479
ab96de7e 4480 if (!s390_decompose_address (addr, &ad))
3ed99cc9 4481 return false;
b808c04c
UW
4482
4483 if (strict)
4484 {
93fa8428 4485 if (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
3ed99cc9 4486 return false;
93fa8428
AK
4487
4488 if (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx)))
3ed99cc9 4489 return false;
b808c04c
UW
4490 }
4491 else
4492 {
f4aa3848 4493 if (ad.base
93fa8428
AK
4494 && !(REGNO (ad.base) >= FIRST_PSEUDO_REGISTER
4495 || REGNO_REG_CLASS (REGNO (ad.base)) == ADDR_REGS))
3ed99cc9 4496 return false;
f4aa3848 4497
93fa8428
AK
4498 if (ad.indx
4499 && !(REGNO (ad.indx) >= FIRST_PSEUDO_REGISTER
4500 || REGNO_REG_CLASS (REGNO (ad.indx)) == ADDR_REGS))
4501 return false;
b808c04c 4502 }
3ed99cc9 4503 return true;
9db1d521
HP
4504}
4505
3ed99cc9 4506/* Return true if OP is a valid operand for the LA instruction.
ba956982
UW
4507 In 31-bit, we need to prove that the result is used as an
4508 address, as LA performs only a 31-bit addition. */
4509
3ed99cc9 4510bool
5d81b82b 4511legitimate_la_operand_p (rtx op)
ba956982
UW
4512{
4513 struct s390_address addr;
b808c04c 4514 if (!s390_decompose_address (op, &addr))
3ed99cc9 4515 return false;
ba956982 4516
3ed99cc9 4517 return (TARGET_64BIT || addr.pointer);
f3e9edff 4518}
ba956982 4519
3ed99cc9 4520/* Return true if it is valid *and* preferable to use LA to
e1d5ee28 4521 compute the sum of OP1 and OP2. */
c7453384 4522
3ed99cc9 4523bool
e1d5ee28 4524preferred_la_operand_p (rtx op1, rtx op2)
100c7420
UW
4525{
4526 struct s390_address addr;
e1d5ee28
UW
4527
4528 if (op2 != const0_rtx)
4529 op1 = gen_rtx_PLUS (Pmode, op1, op2);
4530
4531 if (!s390_decompose_address (op1, &addr))
3ed99cc9 4532 return false;
93fa8428 4533 if (addr.base && !REGNO_OK_FOR_BASE_P (REGNO (addr.base)))
3ed99cc9 4534 return false;
93fa8428 4535 if (addr.indx && !REGNO_OK_FOR_INDEX_P (REGNO (addr.indx)))
3ed99cc9 4536 return false;
100c7420 4537
65b1d8ea 4538 /* Avoid LA instructions with index register on z196; it is
22ac2c2f
AK
4539 preferable to use regular add instructions when possible.
4540 Starting with zEC12 the la with index register is "uncracked"
4541 again. */
65b1d8ea
AK
4542 if (addr.indx && s390_tune == PROCESSOR_2817_Z196)
4543 return false;
4544
100c7420 4545 if (!TARGET_64BIT && !addr.pointer)
3ed99cc9 4546 return false;
100c7420
UW
4547
4548 if (addr.pointer)
3ed99cc9 4549 return true;
100c7420 4550
4888ec5d
UW
4551 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
4552 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
3ed99cc9 4553 return true;
100c7420 4554
3ed99cc9 4555 return false;
100c7420
UW
4556}
4557
a41c6c53
UW
4558/* Emit a forced load-address operation to load SRC into DST.
4559 This will use the LOAD ADDRESS instruction even in situations
4560 where legitimate_la_operand_p (SRC) returns false. */
ba956982 4561
a41c6c53 4562void
9c808aad 4563s390_load_address (rtx dst, rtx src)
f3e9edff 4564{
a41c6c53
UW
4565 if (TARGET_64BIT)
4566 emit_move_insn (dst, src);
4567 else
4568 emit_insn (gen_force_la_31 (dst, src));
ba956982
UW
4569}
4570
935b5226
AK
4571/* Return true if it ok to use SYMBOL_REF in a relative address. */
4572
4573bool
4574s390_rel_address_ok_p (rtx symbol_ref)
4575{
4576 tree decl;
4577
4578 if (symbol_ref == s390_got_symbol () || CONSTANT_POOL_ADDRESS_P (symbol_ref))
4579 return true;
4580
4581 decl = SYMBOL_REF_DECL (symbol_ref);
4582
4583 if (!flag_pic || SYMBOL_REF_LOCAL_P (symbol_ref))
4584 return (s390_pic_data_is_text_relative
4585 || (decl
4586 && TREE_CODE (decl) == FUNCTION_DECL));
4587
4588 return false;
4589}
4590
9db1d521
HP
4591/* Return a legitimate reference for ORIG (an address) using the
4592 register REG. If REG is 0, a new pseudo is generated.
4593
4594 There are two types of references that must be handled:
4595
4596 1. Global data references must load the address from the GOT, via
4597 the PIC reg. An insn is emitted to do this load, and the reg is
4598 returned.
4599
4600 2. Static data references, constant pool addresses, and code labels
4601 compute the address as an offset from the GOT, whose base is in
114278e7 4602 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
9db1d521
HP
4603 differentiate them from global data objects. The returned
4604 address is the PIC reg + an unspec constant.
4605
331d9186 4606 TARGET_LEGITIMIZE_ADDRESS_P rejects symbolic references unless the PIC
9db1d521
HP
4607 reg also appears in the address. */
4608
4609rtx
9c808aad 4610legitimize_pic_address (rtx orig, rtx reg)
9db1d521
HP
4611{
4612 rtx addr = orig;
0ff4390d 4613 rtx addend = const0_rtx;
0a2aaacc 4614 rtx new_rtx = orig;
9db1d521 4615
cf9d7618
ANM
4616 gcc_assert (!TLS_SYMBOLIC_CONST (addr));
4617
0ff4390d
AK
4618 if (GET_CODE (addr) == CONST)
4619 addr = XEXP (addr, 0);
4620
4621 if (GET_CODE (addr) == PLUS)
9db1d521 4622 {
0ff4390d
AK
4623 addend = XEXP (addr, 1);
4624 addr = XEXP (addr, 0);
4625 }
4626
4627 if ((GET_CODE (addr) == LABEL_REF
935b5226 4628 || (SYMBOL_REF_P (addr) && s390_rel_address_ok_p (addr))
0ff4390d
AK
4629 || (GET_CODE (addr) == UNSPEC &&
4630 (XINT (addr, 1) == UNSPEC_GOTENT
4631 || (TARGET_CPU_ZARCH && XINT (addr, 1) == UNSPEC_PLT))))
4632 && GET_CODE (addend) == CONST_INT)
4633 {
4634 /* This can be locally addressed. */
4635
4636 /* larl_operand requires UNSPECs to be wrapped in a const rtx. */
4637 rtx const_addr = (GET_CODE (addr) == UNSPEC ?
4638 gen_rtx_CONST (Pmode, addr) : addr);
4639
4640 if (TARGET_CPU_ZARCH
4641 && larl_operand (const_addr, VOIDmode)
406fde6e
DV
4642 && INTVAL (addend) < HOST_WIDE_INT_1 << 31
4643 && INTVAL (addend) >= -(HOST_WIDE_INT_1 << 31))
0ff4390d
AK
4644 {
4645 if (INTVAL (addend) & 1)
4646 {
4647 /* LARL can't handle odd offsets, so emit a pair of LARL
4648 and LA. */
4649 rtx temp = reg? reg : gen_reg_rtx (Pmode);
4650
4651 if (!DISP_IN_RANGE (INTVAL (addend)))
4652 {
4653 HOST_WIDE_INT even = INTVAL (addend) - 1;
4654 addr = gen_rtx_PLUS (Pmode, addr, GEN_INT (even));
4655 addr = gen_rtx_CONST (Pmode, addr);
4656 addend = const1_rtx;
4657 }
4658
4659 emit_move_insn (temp, addr);
4660 new_rtx = gen_rtx_PLUS (Pmode, temp, addend);
4661
4662 if (reg != 0)
4663 {
4664 s390_load_address (reg, new_rtx);
4665 new_rtx = reg;
4666 }
4667 }
4668 else
4669 {
4670 /* If the offset is even, we can just use LARL. This
4671 will happen automatically. */
4672 }
4673 }
9db1d521 4674 else
0ff4390d
AK
4675 {
4676 /* No larl - Access local symbols relative to the GOT. */
9db1d521 4677
0ff4390d 4678 rtx temp = reg? reg : gen_reg_rtx (Pmode);
9db1d521 4679
fd7643fb 4680 if (reload_in_progress || reload_completed)
6fb5fa3c 4681 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
fd7643fb 4682
0ff4390d
AK
4683 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTOFF);
4684 if (addend != const0_rtx)
4685 addr = gen_rtx_PLUS (Pmode, addr, addend);
4686 addr = gen_rtx_CONST (Pmode, addr);
4687 addr = force_const_mem (Pmode, addr);
9db1d521
HP
4688 emit_move_insn (temp, addr);
4689
0ff4390d
AK
4690 new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
4691 if (reg != 0)
4692 {
4693 s390_load_address (reg, new_rtx);
4694 new_rtx = reg;
4695 }
4696 }
9db1d521 4697 }
0ff4390d 4698 else if (GET_CODE (addr) == SYMBOL_REF && addend == const0_rtx)
9db1d521 4699 {
0ff4390d
AK
4700 /* A non-local symbol reference without addend.
4701
4702 The symbol ref is wrapped into an UNSPEC to make sure the
4703 proper operand modifier (@GOT or @GOTENT) will be emitted.
4704 This will tell the linker to put the symbol into the GOT.
4705
4706 Additionally the code dereferencing the GOT slot is emitted here.
4707
4708 An addend to the symref needs to be added afterwards.
4709 legitimize_pic_address calls itself recursively to handle
4710 that case. So no need to do it here. */
4711
9db1d521
HP
4712 if (reg == 0)
4713 reg = gen_reg_rtx (Pmode);
4714
0ff4390d
AK
4715 if (TARGET_Z10)
4716 {
4717 /* Use load relative if possible.
4718 lgrl <target>, sym@GOTENT */
4719 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTENT);
4720 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
4721 new_rtx = gen_const_mem (GET_MODE (reg), new_rtx);
4722
4723 emit_move_insn (reg, new_rtx);
4724 new_rtx = reg;
4725 }
4726 else if (flag_pic == 1)
9db1d521 4727 {
0ff4390d
AK
4728 /* Assume GOT offset is a valid displacement operand (< 4k
4729 or < 512k with z990). This is handled the same way in
4730 both 31- and 64-bit code (@GOT).
4731 lg <target>, sym@GOT(r12) */
9db1d521 4732
c3cc6b78 4733 if (reload_in_progress || reload_completed)
6fb5fa3c 4734 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
9db1d521 4735
0a2aaacc
KG
4736 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
4737 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
4738 new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new_rtx);
4739 new_rtx = gen_const_mem (Pmode, new_rtx);
4740 emit_move_insn (reg, new_rtx);
4741 new_rtx = reg;
9db1d521 4742 }
9e8327e3 4743 else if (TARGET_CPU_ZARCH)
9db1d521
HP
4744 {
4745 /* If the GOT offset might be >= 4k, we determine the position
0ff4390d
AK
4746 of the GOT entry via a PC-relative LARL (@GOTENT).
4747 larl temp, sym@GOTENT
4748 lg <target>, 0(temp) */
9db1d521 4749
1f9e1fc6
AK
4750 rtx temp = reg ? reg : gen_reg_rtx (Pmode);
4751
4752 gcc_assert (REGNO (temp) >= FIRST_PSEUDO_REGISTER
4753 || REGNO_REG_CLASS (REGNO (temp)) == ADDR_REGS);
9db1d521 4754
0a2aaacc
KG
4755 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTENT);
4756 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
0ff4390d 4757 emit_move_insn (temp, new_rtx);
9db1d521 4758
0ff4390d 4759 new_rtx = gen_const_mem (Pmode, temp);
0a2aaacc 4760 emit_move_insn (reg, new_rtx);
0ff4390d 4761
0a2aaacc 4762 new_rtx = reg;
9db1d521
HP
4763 }
4764 else
4765 {
c7453384 4766 /* If the GOT offset might be >= 4k, we have to load it
0ff4390d
AK
4767 from the literal pool (@GOT).
4768
4769 lg temp, lit-litbase(r13)
4770 lg <target>, 0(temp)
4771 lit: .long sym@GOT */
9db1d521 4772
1f9e1fc6
AK
4773 rtx temp = reg ? reg : gen_reg_rtx (Pmode);
4774
4775 gcc_assert (REGNO (temp) >= FIRST_PSEUDO_REGISTER
4776 || REGNO_REG_CLASS (REGNO (temp)) == ADDR_REGS);
9db1d521 4777
c3cc6b78 4778 if (reload_in_progress || reload_completed)
6fb5fa3c 4779 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
9db1d521 4780
fd7643fb 4781 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
e23795ea
UW
4782 addr = gen_rtx_CONST (Pmode, addr);
4783 addr = force_const_mem (Pmode, addr);
9db1d521
HP
4784 emit_move_insn (temp, addr);
4785
0a2aaacc
KG
4786 new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
4787 new_rtx = gen_const_mem (Pmode, new_rtx);
4788 emit_move_insn (reg, new_rtx);
4789 new_rtx = reg;
9db1d521 4790 }
c7453384 4791 }
0ff4390d 4792 else if (GET_CODE (addr) == UNSPEC && GET_CODE (addend) == CONST_INT)
9db1d521 4793 {
0ff4390d
AK
4794 gcc_assert (XVECLEN (addr, 0) == 1);
4795 switch (XINT (addr, 1))
9db1d521 4796 {
0ff4390d
AK
4797 /* These address symbols (or PLT slots) relative to the GOT
4798 (not GOT slots!). In general this will exceed the
4799 displacement range so these value belong into the literal
4800 pool. */
4801 case UNSPEC_GOTOFF:
4802 case UNSPEC_PLTOFF:
4803 new_rtx = force_const_mem (Pmode, orig);
4804 break;
9db1d521 4805
0ff4390d
AK
4806 /* For -fPIC the GOT size might exceed the displacement
4807 range so make sure the value is in the literal pool. */
4808 case UNSPEC_GOT:
4809 if (flag_pic == 2)
4810 new_rtx = force_const_mem (Pmode, orig);
4811 break;
9db1d521 4812
0ff4390d
AK
4813 /* For @GOTENT larl is used. This is handled like local
4814 symbol refs. */
4815 case UNSPEC_GOTENT:
4816 gcc_unreachable ();
4817 break;
9db1d521 4818
0ff4390d
AK
4819 /* @PLT is OK as is on 64-bit, must be converted to
4820 GOT-relative @PLTOFF on 31-bit. */
4821 case UNSPEC_PLT:
4822 if (!TARGET_CPU_ZARCH)
9db1d521 4823 {
0ff4390d
AK
4824 rtx temp = reg? reg : gen_reg_rtx (Pmode);
4825
4826 if (reload_in_progress || reload_completed)
4827 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
4828
4829 addr = XVECEXP (addr, 0, 0);
4830 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
4831 UNSPEC_PLTOFF);
4832 if (addend != const0_rtx)
4833 addr = gen_rtx_PLUS (Pmode, addr, addend);
4834 addr = gen_rtx_CONST (Pmode, addr);
4835 addr = force_const_mem (Pmode, addr);
4836 emit_move_insn (temp, addr);
4837
4838 new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
4839 if (reg != 0)
9db1d521 4840 {
0ff4390d
AK
4841 s390_load_address (reg, new_rtx);
4842 new_rtx = reg;
9db1d521 4843 }
0ff4390d
AK
4844 }
4845 else
4846 /* On 64 bit larl can be used. This case is handled like
4847 local symbol refs. */
4848 gcc_unreachable ();
4849 break;
4850
4851 /* Everything else cannot happen. */
4852 default:
4853 gcc_unreachable ();
4854 }
4855 }
4856 else if (addend != const0_rtx)
4857 {
4858 /* Otherwise, compute the sum. */
9db1d521 4859
0ff4390d
AK
4860 rtx base = legitimize_pic_address (addr, reg);
4861 new_rtx = legitimize_pic_address (addend,
4862 base == reg ? NULL_RTX : reg);
4863 if (GET_CODE (new_rtx) == CONST_INT)
4864 new_rtx = plus_constant (Pmode, base, INTVAL (new_rtx));
4865 else
4866 {
4867 if (GET_CODE (new_rtx) == PLUS && CONSTANT_P (XEXP (new_rtx, 1)))
4868 {
4869 base = gen_rtx_PLUS (Pmode, base, XEXP (new_rtx, 0));
4870 new_rtx = XEXP (new_rtx, 1);
9db1d521 4871 }
0ff4390d 4872 new_rtx = gen_rtx_PLUS (Pmode, base, new_rtx);
9db1d521 4873 }
0ff4390d
AK
4874
4875 if (GET_CODE (new_rtx) == CONST)
4876 new_rtx = XEXP (new_rtx, 0);
4877 new_rtx = force_operand (new_rtx, 0);
9db1d521 4878 }
0ff4390d 4879
0a2aaacc 4880 return new_rtx;
9db1d521
HP
4881}
4882
fd3cd001
UW
4883/* Load the thread pointer into a register. */
4884
7b8acc34
AK
4885rtx
4886s390_get_thread_pointer (void)
fd3cd001 4887{
c5aa1d12 4888 rtx tp = gen_reg_rtx (Pmode);
fd3cd001 4889
c5aa1d12 4890 emit_move_insn (tp, gen_rtx_REG (Pmode, TP_REGNUM));
fd3cd001
UW
4891 mark_reg_pointer (tp, BITS_PER_WORD);
4892
4893 return tp;
4894}
4895
ed9676cf
AK
4896/* Emit a tls call insn. The call target is the SYMBOL_REF stored
4897 in s390_tls_symbol which always refers to __tls_get_offset.
4898 The returned offset is written to RESULT_REG and an USE rtx is
4899 generated for TLS_CALL. */
fd3cd001
UW
4900
4901static GTY(()) rtx s390_tls_symbol;
ed9676cf
AK
4902
4903static void
4904s390_emit_tls_call_insn (rtx result_reg, rtx tls_call)
fd3cd001 4905{
ed9676cf 4906 rtx insn;
38899e29 4907
68c0ef75
AK
4908 if (!flag_pic)
4909 emit_insn (s390_load_got ());
ed9676cf 4910
fd3cd001
UW
4911 if (!s390_tls_symbol)
4912 s390_tls_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_offset");
4913
38899e29
EC
4914 insn = s390_emit_call (s390_tls_symbol, tls_call, result_reg,
4915 gen_rtx_REG (Pmode, RETURN_REGNUM));
ed9676cf
AK
4916
4917 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), result_reg);
becfd6e5 4918 RTL_CONST_CALL_P (insn) = 1;
fd3cd001
UW
4919}
4920
4921/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
4922 this (thread-local) address. REG may be used as temporary. */
4923
4924static rtx
9c808aad 4925legitimize_tls_address (rtx addr, rtx reg)
fd3cd001 4926{
9b2ea071
TS
4927 rtx new_rtx, tls_call, temp, base, r2;
4928 rtx_insn *insn;
fd3cd001
UW
4929
4930 if (GET_CODE (addr) == SYMBOL_REF)
4931 switch (tls_symbolic_operand (addr))
4932 {
4933 case TLS_MODEL_GLOBAL_DYNAMIC:
4934 start_sequence ();
4935 r2 = gen_rtx_REG (Pmode, 2);
4936 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_TLSGD);
0a2aaacc
KG
4937 new_rtx = gen_rtx_CONST (Pmode, tls_call);
4938 new_rtx = force_const_mem (Pmode, new_rtx);
4939 emit_move_insn (r2, new_rtx);
ed9676cf 4940 s390_emit_tls_call_insn (r2, tls_call);
fd3cd001
UW
4941 insn = get_insns ();
4942 end_sequence ();
4943
0a2aaacc 4944 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
fd3cd001 4945 temp = gen_reg_rtx (Pmode);
0a2aaacc 4946 emit_libcall_block (insn, temp, r2, new_rtx);
fd3cd001 4947
0a2aaacc 4948 new_rtx = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
fd3cd001
UW
4949 if (reg != 0)
4950 {
0a2aaacc
KG
4951 s390_load_address (reg, new_rtx);
4952 new_rtx = reg;
fd3cd001
UW
4953 }
4954 break;
4955
4956 case TLS_MODEL_LOCAL_DYNAMIC:
4957 start_sequence ();
4958 r2 = gen_rtx_REG (Pmode, 2);
4959 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM);
0a2aaacc
KG
4960 new_rtx = gen_rtx_CONST (Pmode, tls_call);
4961 new_rtx = force_const_mem (Pmode, new_rtx);
4962 emit_move_insn (r2, new_rtx);
ed9676cf 4963 s390_emit_tls_call_insn (r2, tls_call);
fd3cd001
UW
4964 insn = get_insns ();
4965 end_sequence ();
4966
0a2aaacc 4967 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_NTPOFF);
fd3cd001 4968 temp = gen_reg_rtx (Pmode);
0a2aaacc 4969 emit_libcall_block (insn, temp, r2, new_rtx);
fd3cd001 4970
0a2aaacc 4971 new_rtx = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
fd3cd001 4972 base = gen_reg_rtx (Pmode);
0a2aaacc 4973 s390_load_address (base, new_rtx);
fd3cd001 4974
0a2aaacc
KG
4975 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_DTPOFF);
4976 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
4977 new_rtx = force_const_mem (Pmode, new_rtx);
fd3cd001 4978 temp = gen_reg_rtx (Pmode);
0a2aaacc 4979 emit_move_insn (temp, new_rtx);
fd3cd001 4980
0a2aaacc 4981 new_rtx = gen_rtx_PLUS (Pmode, base, temp);
fd3cd001
UW
4982 if (reg != 0)
4983 {
0a2aaacc
KG
4984 s390_load_address (reg, new_rtx);
4985 new_rtx = reg;
fd3cd001
UW
4986 }
4987 break;
4988
4989 case TLS_MODEL_INITIAL_EXEC:
4990 if (flag_pic == 1)
4991 {
4992 /* Assume GOT offset < 4k. This is handled the same way
4993 in both 31- and 64-bit code. */
4994
4995 if (reload_in_progress || reload_completed)
6fb5fa3c 4996 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
fd3cd001 4997
0a2aaacc
KG
4998 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
4999 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
5000 new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new_rtx);
5001 new_rtx = gen_const_mem (Pmode, new_rtx);
fd3cd001 5002 temp = gen_reg_rtx (Pmode);
0a2aaacc 5003 emit_move_insn (temp, new_rtx);
fd3cd001 5004 }
9e8327e3 5005 else if (TARGET_CPU_ZARCH)
fd3cd001
UW
5006 {
5007 /* If the GOT offset might be >= 4k, we determine the position
5008 of the GOT entry via a PC-relative LARL. */
5009
0a2aaacc
KG
5010 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
5011 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
fd3cd001 5012 temp = gen_reg_rtx (Pmode);
0a2aaacc 5013 emit_move_insn (temp, new_rtx);
fd3cd001 5014
0a2aaacc 5015 new_rtx = gen_const_mem (Pmode, temp);
fd3cd001 5016 temp = gen_reg_rtx (Pmode);
0a2aaacc 5017 emit_move_insn (temp, new_rtx);
fd3cd001
UW
5018 }
5019 else if (flag_pic)
5020 {
c7453384 5021 /* If the GOT offset might be >= 4k, we have to load it
fd3cd001
UW
5022 from the literal pool. */
5023
5024 if (reload_in_progress || reload_completed)
6fb5fa3c 5025 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
fd3cd001 5026
0a2aaacc
KG
5027 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
5028 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
5029 new_rtx = force_const_mem (Pmode, new_rtx);
fd3cd001 5030 temp = gen_reg_rtx (Pmode);
0a2aaacc 5031 emit_move_insn (temp, new_rtx);
fd3cd001 5032
0a2aaacc
KG
5033 new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
5034 new_rtx = gen_const_mem (Pmode, new_rtx);
fd3cd001 5035
0a2aaacc 5036 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new_rtx, addr), UNSPEC_TLS_LOAD);
fd3cd001 5037 temp = gen_reg_rtx (Pmode);
f7df4a84 5038 emit_insn (gen_rtx_SET (temp, new_rtx));
fd3cd001
UW
5039 }
5040 else
5041 {
5042 /* In position-dependent code, load the absolute address of
5043 the GOT entry from the literal pool. */
5044
0a2aaacc
KG
5045 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
5046 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
5047 new_rtx = force_const_mem (Pmode, new_rtx);
fd3cd001 5048 temp = gen_reg_rtx (Pmode);
0a2aaacc 5049 emit_move_insn (temp, new_rtx);
fd3cd001 5050
0a2aaacc
KG
5051 new_rtx = temp;
5052 new_rtx = gen_const_mem (Pmode, new_rtx);
5053 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new_rtx, addr), UNSPEC_TLS_LOAD);
fd3cd001 5054 temp = gen_reg_rtx (Pmode);
f7df4a84 5055 emit_insn (gen_rtx_SET (temp, new_rtx));
fd3cd001
UW
5056 }
5057
0a2aaacc 5058 new_rtx = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
fd3cd001
UW
5059 if (reg != 0)
5060 {
0a2aaacc
KG
5061 s390_load_address (reg, new_rtx);
5062 new_rtx = reg;
fd3cd001
UW
5063 }
5064 break;
5065
5066 case TLS_MODEL_LOCAL_EXEC:
0a2aaacc
KG
5067 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
5068 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
5069 new_rtx = force_const_mem (Pmode, new_rtx);
fd3cd001 5070 temp = gen_reg_rtx (Pmode);
0a2aaacc 5071 emit_move_insn (temp, new_rtx);
fd3cd001 5072
0a2aaacc 5073 new_rtx = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
fd3cd001
UW
5074 if (reg != 0)
5075 {
0a2aaacc
KG
5076 s390_load_address (reg, new_rtx);
5077 new_rtx = reg;
fd3cd001
UW
5078 }
5079 break;
5080
5081 default:
8d933e31 5082 gcc_unreachable ();
fd3cd001
UW
5083 }
5084
5085 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == UNSPEC)
5086 {
5087 switch (XINT (XEXP (addr, 0), 1))
5088 {
5089 case UNSPEC_INDNTPOFF:
8d933e31 5090 gcc_assert (TARGET_CPU_ZARCH);
0a2aaacc 5091 new_rtx = addr;
fd3cd001
UW
5092 break;
5093
5094 default:
8d933e31 5095 gcc_unreachable ();
fd3cd001
UW
5096 }
5097 }
5098
578d1468
UW
5099 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
5100 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
5101 {
0a2aaacc
KG
5102 new_rtx = XEXP (XEXP (addr, 0), 0);
5103 if (GET_CODE (new_rtx) != SYMBOL_REF)
5104 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
578d1468 5105
0a2aaacc 5106 new_rtx = legitimize_tls_address (new_rtx, reg);
0a81f074
RS
5107 new_rtx = plus_constant (Pmode, new_rtx,
5108 INTVAL (XEXP (XEXP (addr, 0), 1)));
0a2aaacc 5109 new_rtx = force_operand (new_rtx, 0);
578d1468
UW
5110 }
5111
fd3cd001 5112 else
8d933e31 5113 gcc_unreachable (); /* for now ... */
fd3cd001 5114
0a2aaacc 5115 return new_rtx;
fd3cd001
UW
5116}
5117
1f9e1fc6
AK
5118/* Emit insns making the address in operands[1] valid for a standard
5119 move to operands[0]. operands[1] is replaced by an address which
5120 should be used instead of the former RTX to emit the move
5121 pattern. */
9db1d521
HP
5122
5123void
9c808aad 5124emit_symbolic_move (rtx *operands)
9db1d521 5125{
b3a13419 5126 rtx temp = !can_create_pseudo_p () ? operands[0] : gen_reg_rtx (Pmode);
9db1d521 5127
fd3cd001 5128 if (GET_CODE (operands[0]) == MEM)
9db1d521 5129 operands[1] = force_reg (Pmode, operands[1]);
fd3cd001
UW
5130 else if (TLS_SYMBOLIC_CONST (operands[1]))
5131 operands[1] = legitimize_tls_address (operands[1], temp);
5132 else if (flag_pic)
9db1d521
HP
5133 operands[1] = legitimize_pic_address (operands[1], temp);
5134}
5135
994fe660 5136/* Try machine-dependent ways of modifying an illegitimate address X
9db1d521 5137 to be legitimate. If we find one, return the new, valid address.
9db1d521
HP
5138
5139 OLDX is the address as it was before break_out_memory_refs was called.
5140 In some cases it is useful to look at this to decide what needs to be done.
5141
994fe660 5142 MODE is the mode of the operand pointed to by X.
9db1d521
HP
5143
5144 When -fpic is used, special handling is needed for symbolic references.
5145 See comments by legitimize_pic_address for details. */
5146
506d7b68
PB
5147static rtx
5148s390_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
ef4bddc2 5149 machine_mode mode ATTRIBUTE_UNUSED)
9db1d521 5150{
ba956982 5151 rtx constant_term = const0_rtx;
9db1d521 5152
fd3cd001
UW
5153 if (TLS_SYMBOLIC_CONST (x))
5154 {
5155 x = legitimize_tls_address (x, 0);
5156
c6c3dba9 5157 if (s390_legitimate_address_p (mode, x, FALSE))
fd3cd001
UW
5158 return x;
5159 }
cf9d7618 5160 else if (GET_CODE (x) == PLUS
f4aa3848 5161 && (TLS_SYMBOLIC_CONST (XEXP (x, 0))
cf9d7618
ANM
5162 || TLS_SYMBOLIC_CONST (XEXP (x, 1))))
5163 {
5164 return x;
5165 }
fd3cd001 5166 else if (flag_pic)
9db1d521 5167 {
ba956982 5168 if (SYMBOLIC_CONST (x)
c7453384
EC
5169 || (GET_CODE (x) == PLUS
5170 && (SYMBOLIC_CONST (XEXP (x, 0))
ba956982
UW
5171 || SYMBOLIC_CONST (XEXP (x, 1)))))
5172 x = legitimize_pic_address (x, 0);
5173
c6c3dba9 5174 if (s390_legitimate_address_p (mode, x, FALSE))
ba956982 5175 return x;
9db1d521 5176 }
9db1d521 5177
ba956982 5178 x = eliminate_constant_term (x, &constant_term);
994fe660 5179
61f02ff5
UW
5180 /* Optimize loading of large displacements by splitting them
5181 into the multiple of 4K and the rest; this allows the
c7453384 5182 former to be CSE'd if possible.
61f02ff5
UW
5183
5184 Don't do this if the displacement is added to a register
5185 pointing into the stack frame, as the offsets will
5186 change later anyway. */
5187
5188 if (GET_CODE (constant_term) == CONST_INT
d3632d41
UW
5189 && !TARGET_LONG_DISPLACEMENT
5190 && !DISP_IN_RANGE (INTVAL (constant_term))
61f02ff5
UW
5191 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
5192 {
5193 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
5194 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
5195
5196 rtx temp = gen_reg_rtx (Pmode);
5197 rtx val = force_operand (GEN_INT (upper), temp);
5198 if (val != temp)
5199 emit_move_insn (temp, val);
5200
5201 x = gen_rtx_PLUS (Pmode, x, temp);
5202 constant_term = GEN_INT (lower);
5203 }
5204
ba956982 5205 if (GET_CODE (x) == PLUS)
9db1d521 5206 {
ba956982
UW
5207 if (GET_CODE (XEXP (x, 0)) == REG)
5208 {
5d81b82b
AS
5209 rtx temp = gen_reg_rtx (Pmode);
5210 rtx val = force_operand (XEXP (x, 1), temp);
ba956982
UW
5211 if (val != temp)
5212 emit_move_insn (temp, val);
5213
5214 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
5215 }
5216
5217 else if (GET_CODE (XEXP (x, 1)) == REG)
5218 {
5d81b82b
AS
5219 rtx temp = gen_reg_rtx (Pmode);
5220 rtx val = force_operand (XEXP (x, 0), temp);
ba956982
UW
5221 if (val != temp)
5222 emit_move_insn (temp, val);
5223
5224 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
5225 }
9db1d521 5226 }
ba956982
UW
5227
5228 if (constant_term != const0_rtx)
5229 x = gen_rtx_PLUS (Pmode, x, constant_term);
5230
5231 return x;
9db1d521
HP
5232}
5233
0b540f12 5234/* Try a machine-dependent way of reloading an illegitimate address AD
dd5a833e 5235 operand. If we find one, push the reload and return the new address.
0b540f12
UW
5236
5237 MODE is the mode of the enclosing MEM. OPNUM is the operand number
5238 and TYPE is the reload type of the current reload. */
5239
f4aa3848 5240rtx
ef4bddc2 5241legitimize_reload_address (rtx ad, machine_mode mode ATTRIBUTE_UNUSED,
0b540f12
UW
5242 int opnum, int type)
5243{
5244 if (!optimize || TARGET_LONG_DISPLACEMENT)
5245 return NULL_RTX;
5246
5247 if (GET_CODE (ad) == PLUS)
5248 {
5249 rtx tem = simplify_binary_operation (PLUS, Pmode,
5250 XEXP (ad, 0), XEXP (ad, 1));
5251 if (tem)
5252 ad = tem;
5253 }
5254
5255 if (GET_CODE (ad) == PLUS
5256 && GET_CODE (XEXP (ad, 0)) == REG
5257 && GET_CODE (XEXP (ad, 1)) == CONST_INT
5258 && !DISP_IN_RANGE (INTVAL (XEXP (ad, 1))))
5259 {
5260 HOST_WIDE_INT lower = INTVAL (XEXP (ad, 1)) & 0xfff;
5261 HOST_WIDE_INT upper = INTVAL (XEXP (ad, 1)) ^ lower;
0a2aaacc 5262 rtx cst, tem, new_rtx;
0b540f12
UW
5263
5264 cst = GEN_INT (upper);
5265 if (!legitimate_reload_constant_p (cst))
5266 cst = force_const_mem (Pmode, cst);
5267
5268 tem = gen_rtx_PLUS (Pmode, XEXP (ad, 0), cst);
0a2aaacc 5269 new_rtx = gen_rtx_PLUS (Pmode, tem, GEN_INT (lower));
0b540f12
UW
5270
5271 push_reload (XEXP (tem, 1), 0, &XEXP (tem, 1), 0,
f4aa3848 5272 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
0b540f12 5273 opnum, (enum reload_type) type);
0a2aaacc 5274 return new_rtx;
0b540f12
UW
5275 }
5276
5277 return NULL_RTX;
5278}
5279
a41c6c53
UW
5280/* Emit code to move LEN bytes from DST to SRC. */
5281
367d32f3 5282bool
70128ad9 5283s390_expand_movmem (rtx dst, rtx src, rtx len)
a41c6c53 5284{
367d32f3
AK
5285 /* When tuning for z10 or higher we rely on the Glibc functions to
5286 do the right thing. Only for constant lengths below 64k we will
5287 generate inline code. */
5288 if (s390_tune >= PROCESSOR_2097_Z10
5289 && (GET_CODE (len) != CONST_INT || INTVAL (len) > (1<<16)))
5290 return false;
5291
f5a537e3
AK
5292 /* Expand memcpy for constant length operands without a loop if it
5293 is shorter that way.
5294
5295 With a constant length argument a
5296 memcpy loop (without pfd) is 36 bytes -> 6 * mvc */
5297 if (GET_CODE (len) == CONST_INT
5298 && INTVAL (len) >= 0
5299 && INTVAL (len) <= 256 * 6
5300 && (!TARGET_MVCLE || INTVAL (len) <= 256))
a41c6c53 5301 {
f5a537e3
AK
5302 HOST_WIDE_INT o, l;
5303
5304 for (l = INTVAL (len), o = 0; l > 0; l -= 256, o += 256)
5305 {
5306 rtx newdst = adjust_address (dst, BLKmode, o);
5307 rtx newsrc = adjust_address (src, BLKmode, o);
5308 emit_insn (gen_movmem_short (newdst, newsrc,
5309 GEN_INT (l > 256 ? 255 : l - 1)));
5310 }
a41c6c53
UW
5311 }
5312
5313 else if (TARGET_MVCLE)
5314 {
70128ad9 5315 emit_insn (gen_movmem_long (dst, src, convert_to_mode (Pmode, len, 1)));
a41c6c53
UW
5316 }
5317
5318 else
5319 {
5320 rtx dst_addr, src_addr, count, blocks, temp;
19f8b229
TS
5321 rtx_code_label *loop_start_label = gen_label_rtx ();
5322 rtx_code_label *loop_end_label = gen_label_rtx ();
5323 rtx_code_label *end_label = gen_label_rtx ();
ef4bddc2 5324 machine_mode mode;
a41c6c53
UW
5325
5326 mode = GET_MODE (len);
5327 if (mode == VOIDmode)
b9404c99 5328 mode = Pmode;
a41c6c53 5329
a41c6c53
UW
5330 dst_addr = gen_reg_rtx (Pmode);
5331 src_addr = gen_reg_rtx (Pmode);
5332 count = gen_reg_rtx (mode);
5333 blocks = gen_reg_rtx (mode);
5334
5335 convert_move (count, len, 1);
c7453384 5336 emit_cmp_and_jump_insns (count, const0_rtx,
a41c6c53
UW
5337 EQ, NULL_RTX, mode, 1, end_label);
5338
5339 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
5340 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
5341 dst = change_address (dst, VOIDmode, dst_addr);
5342 src = change_address (src, VOIDmode, src_addr);
c7453384 5343
bbbbb16a
ILT
5344 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1,
5345 OPTAB_DIRECT);
a41c6c53
UW
5346 if (temp != count)
5347 emit_move_insn (count, temp);
5348
bbbbb16a
ILT
5349 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1,
5350 OPTAB_DIRECT);
a41c6c53
UW
5351 if (temp != blocks)
5352 emit_move_insn (blocks, temp);
5353
6de9cd9a
DN
5354 emit_cmp_and_jump_insns (blocks, const0_rtx,
5355 EQ, NULL_RTX, mode, 1, loop_end_label);
70315fcd
SB
5356
5357 emit_label (loop_start_label);
a41c6c53 5358
adfa3cd3
AK
5359 if (TARGET_Z10
5360 && (GET_CODE (len) != CONST_INT || INTVAL (len) > 768))
5361 {
5362 rtx prefetch;
5363
5364 /* Issue a read prefetch for the +3 cache line. */
5365 prefetch = gen_prefetch (gen_rtx_PLUS (Pmode, src_addr, GEN_INT (768)),
5366 const0_rtx, const0_rtx);
5367 PREFETCH_SCHEDULE_BARRIER_P (prefetch) = true;
5368 emit_insn (prefetch);
5369
5370 /* Issue a write prefetch for the +3 cache line. */
5371 prefetch = gen_prefetch (gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (768)),
5372 const1_rtx, const0_rtx);
5373 PREFETCH_SCHEDULE_BARRIER_P (prefetch) = true;
5374 emit_insn (prefetch);
5375 }
5376
70128ad9 5377 emit_insn (gen_movmem_short (dst, src, GEN_INT (255)));
c7453384 5378 s390_load_address (dst_addr,
a41c6c53 5379 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
c7453384 5380 s390_load_address (src_addr,
a41c6c53 5381 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
c7453384 5382
bbbbb16a
ILT
5383 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1,
5384 OPTAB_DIRECT);
a41c6c53
UW
5385 if (temp != blocks)
5386 emit_move_insn (blocks, temp);
5387
6de9cd9a
DN
5388 emit_cmp_and_jump_insns (blocks, const0_rtx,
5389 EQ, NULL_RTX, mode, 1, loop_end_label);
70315fcd
SB
5390
5391 emit_jump (loop_start_label);
6de9cd9a 5392 emit_label (loop_end_label);
a41c6c53 5393
70128ad9 5394 emit_insn (gen_movmem_short (dst, src,
b9404c99 5395 convert_to_mode (Pmode, count, 1)));
a41c6c53
UW
5396 emit_label (end_label);
5397 }
367d32f3 5398 return true;
a41c6c53
UW
5399}
5400
6d057022
AS
5401/* Emit code to set LEN bytes at DST to VAL.
5402 Make use of clrmem if VAL is zero. */
a41c6c53
UW
5403
5404void
6d057022 5405s390_expand_setmem (rtx dst, rtx len, rtx val)
a41c6c53 5406{
8597cd33 5407 if (GET_CODE (len) == CONST_INT && INTVAL (len) <= 0)
c9f59991
AK
5408 return;
5409
6d057022 5410 gcc_assert (GET_CODE (val) == CONST_INT || GET_MODE (val) == QImode);
f4aa3848 5411
8597cd33
AK
5412 /* Expand setmem/clrmem for a constant length operand without a
5413 loop if it will be shorter that way.
5414 With a constant length and without pfd argument a
5415 clrmem loop is 32 bytes -> 5.3 * xc
5416 setmem loop is 36 bytes -> 3.6 * (mvi/stc + mvc) */
5417 if (GET_CODE (len) == CONST_INT
5418 && ((INTVAL (len) <= 256 * 5 && val == const0_rtx)
5419 || INTVAL (len) <= 257 * 3)
5420 && (!TARGET_MVCLE || INTVAL (len) <= 256))
a41c6c53 5421 {
8597cd33 5422 HOST_WIDE_INT o, l;
f4aa3848 5423
8597cd33
AK
5424 if (val == const0_rtx)
5425 /* clrmem: emit 256 byte blockwise XCs. */
5426 for (l = INTVAL (len), o = 0; l > 0; l -= 256, o += 256)
5427 {
5428 rtx newdst = adjust_address (dst, BLKmode, o);
5429 emit_insn (gen_clrmem_short (newdst,
5430 GEN_INT (l > 256 ? 255 : l - 1)));
5431 }
5432 else
5433 /* setmem: emit 1(mvi) + 256(mvc) byte blockwise memsets by
5434 setting first byte to val and using a 256 byte mvc with one
5435 byte overlap to propagate the byte. */
5436 for (l = INTVAL (len), o = 0; l > 0; l -= 257, o += 257)
5437 {
5438 rtx newdst = adjust_address (dst, BLKmode, o);
5439 emit_move_insn (adjust_address (dst, QImode, o), val);
5440 if (l > 1)
5441 {
5442 rtx newdstp1 = adjust_address (dst, BLKmode, o + 1);
5443 emit_insn (gen_movmem_short (newdstp1, newdst,
5444 GEN_INT (l > 257 ? 255 : l - 2)));
5445 }
5446 }
a41c6c53
UW
5447 }
5448
5449 else if (TARGET_MVCLE)
5450 {
6d057022 5451 val = force_not_mem (convert_modes (Pmode, QImode, val, 1));
da0dcab1
DV
5452 if (TARGET_64BIT)
5453 emit_insn (gen_setmem_long_di (dst, convert_to_mode (Pmode, len, 1),
5454 val));
5455 else
5456 emit_insn (gen_setmem_long_si (dst, convert_to_mode (Pmode, len, 1),
5457 val));
a41c6c53
UW
5458 }
5459
5460 else
5461 {
9602b6a1 5462 rtx dst_addr, count, blocks, temp, dstp1 = NULL_RTX;
19f8b229 5463 rtx_code_label *loop_start_label = gen_label_rtx ();
587790e6
AK
5464 rtx_code_label *onebyte_end_label = gen_label_rtx ();
5465 rtx_code_label *zerobyte_end_label = gen_label_rtx ();
5466 rtx_code_label *restbyte_end_label = gen_label_rtx ();
ef4bddc2 5467 machine_mode mode;
a41c6c53
UW
5468
5469 mode = GET_MODE (len);
5470 if (mode == VOIDmode)
587790e6 5471 mode = Pmode;
a41c6c53 5472
a41c6c53 5473 dst_addr = gen_reg_rtx (Pmode);
a41c6c53
UW
5474 count = gen_reg_rtx (mode);
5475 blocks = gen_reg_rtx (mode);
5476
5477 convert_move (count, len, 1);
c7453384 5478 emit_cmp_and_jump_insns (count, const0_rtx,
587790e6 5479 EQ, NULL_RTX, mode, 1, zerobyte_end_label,
8f746f8c 5480 profile_probability::very_unlikely ());
a41c6c53 5481
587790e6
AK
5482 /* We need to make a copy of the target address since memset is
5483 supposed to return it unmodified. We have to make it here
5484 already since the new reg is used at onebyte_end_label. */
a41c6c53
UW
5485 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
5486 dst = change_address (dst, VOIDmode, dst_addr);
c7453384 5487
587790e6 5488 if (val != const0_rtx)
6d057022 5489 {
587790e6
AK
5490 /* When using the overlapping mvc the original target
5491 address is only accessed as single byte entity (even by
5492 the mvc reading this value). */
f5541398 5493 set_mem_size (dst, 1);
587790e6
AK
5494 dstp1 = adjust_address (dst, VOIDmode, 1);
5495 emit_cmp_and_jump_insns (count,
5496 const1_rtx, EQ, NULL_RTX, mode, 1,
8f746f8c
AK
5497 onebyte_end_label,
5498 profile_probability::very_unlikely ());
6d057022 5499 }
587790e6
AK
5500
5501 /* There is one unconditional (mvi+mvc)/xc after the loop
5502 dealing with the rest of the bytes, subtracting two (mvi+mvc)
5503 or one (xc) here leaves this number of bytes to be handled by
5504 it. */
5505 temp = expand_binop (mode, add_optab, count,
5506 val == const0_rtx ? constm1_rtx : GEN_INT (-2),
5507 count, 1, OPTAB_DIRECT);
a41c6c53 5508 if (temp != count)
587790e6 5509 emit_move_insn (count, temp);
a41c6c53 5510
bbbbb16a
ILT
5511 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1,
5512 OPTAB_DIRECT);
a41c6c53 5513 if (temp != blocks)
587790e6 5514 emit_move_insn (blocks, temp);
a41c6c53 5515
6de9cd9a 5516 emit_cmp_and_jump_insns (blocks, const0_rtx,
587790e6
AK
5517 EQ, NULL_RTX, mode, 1, restbyte_end_label);
5518
5519 emit_jump (loop_start_label);
5520
5521 if (val != const0_rtx)
5522 {
5523 /* The 1 byte != 0 special case. Not handled efficiently
5524 since we require two jumps for that. However, this
5525 should be very rare. */
5526 emit_label (onebyte_end_label);
5527 emit_move_insn (adjust_address (dst, QImode, 0), val);
5528 emit_jump (zerobyte_end_label);
5529 }
70315fcd
SB
5530
5531 emit_label (loop_start_label);
a41c6c53 5532
adfa3cd3
AK
5533 if (TARGET_Z10
5534 && (GET_CODE (len) != CONST_INT || INTVAL (len) > 1024))
5535 {
5536 /* Issue a write prefetch for the +4 cache line. */
5537 rtx prefetch = gen_prefetch (gen_rtx_PLUS (Pmode, dst_addr,
5538 GEN_INT (1024)),
5539 const1_rtx, const0_rtx);
5540 emit_insn (prefetch);
5541 PREFETCH_SCHEDULE_BARRIER_P (prefetch) = true;
5542 }
5543
6d057022
AS
5544 if (val == const0_rtx)
5545 emit_insn (gen_clrmem_short (dst, GEN_INT (255)));
5546 else
587790e6
AK
5547 {
5548 /* Set the first byte in the block to the value and use an
5549 overlapping mvc for the block. */
5550 emit_move_insn (adjust_address (dst, QImode, 0), val);
5551 emit_insn (gen_movmem_short (dstp1, dst, GEN_INT (254)));
5552 }
c7453384 5553 s390_load_address (dst_addr,
a41c6c53 5554 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
c7453384 5555
bbbbb16a
ILT
5556 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1,
5557 OPTAB_DIRECT);
a41c6c53 5558 if (temp != blocks)
587790e6 5559 emit_move_insn (blocks, temp);
a41c6c53 5560
6de9cd9a 5561 emit_cmp_and_jump_insns (blocks, const0_rtx,
587790e6 5562 NE, NULL_RTX, mode, 1, loop_start_label);
70315fcd 5563
587790e6 5564 emit_label (restbyte_end_label);
a41c6c53 5565
6d057022 5566 if (val == const0_rtx)
587790e6 5567 emit_insn (gen_clrmem_short (dst, convert_to_mode (Pmode, count, 1)));
6d057022 5568 else
587790e6
AK
5569 {
5570 /* Set the first byte in the block to the value and use an
5571 overlapping mvc for the block. */
5572 emit_move_insn (adjust_address (dst, QImode, 0), val);
5573 /* execute only uses the lowest 8 bits of count that's
5574 exactly what we need here. */
5575 emit_insn (gen_movmem_short (dstp1, dst,
5576 convert_to_mode (Pmode, count, 1)));
5577 }
5578
5579 emit_label (zerobyte_end_label);
a41c6c53
UW
5580 }
5581}
5582
5583/* Emit code to compare LEN bytes at OP0 with those at OP1,
5584 and return the result in TARGET. */
5585
367d32f3 5586bool
9c808aad 5587s390_expand_cmpmem (rtx target, rtx op0, rtx op1, rtx len)
a41c6c53 5588{
5b022de5 5589 rtx ccreg = gen_rtx_REG (CCUmode, CC_REGNUM);
02887425
UW
5590 rtx tmp;
5591
367d32f3
AK
5592 /* When tuning for z10 or higher we rely on the Glibc functions to
5593 do the right thing. Only for constant lengths below 64k we will
5594 generate inline code. */
5595 if (s390_tune >= PROCESSOR_2097_Z10
5596 && (GET_CODE (len) != CONST_INT || INTVAL (len) > (1<<16)))
5597 return false;
5598
02887425
UW
5599 /* As the result of CMPINT is inverted compared to what we need,
5600 we have to swap the operands. */
5601 tmp = op0; op0 = op1; op1 = tmp;
a41c6c53 5602
a41c6c53
UW
5603 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
5604 {
5605 if (INTVAL (len) > 0)
5606 {
b9404c99 5607 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (INTVAL (len) - 1)));
02887425 5608 emit_insn (gen_cmpint (target, ccreg));
a41c6c53
UW
5609 }
5610 else
5611 emit_move_insn (target, const0_rtx);
5612 }
9dc62c00 5613 else if (TARGET_MVCLE)
a41c6c53 5614 {
b9404c99 5615 emit_insn (gen_cmpmem_long (op0, op1, convert_to_mode (Pmode, len, 1)));
02887425 5616 emit_insn (gen_cmpint (target, ccreg));
a41c6c53 5617 }
a41c6c53
UW
5618 else
5619 {
5620 rtx addr0, addr1, count, blocks, temp;
19f8b229
TS
5621 rtx_code_label *loop_start_label = gen_label_rtx ();
5622 rtx_code_label *loop_end_label = gen_label_rtx ();
5623 rtx_code_label *end_label = gen_label_rtx ();
ef4bddc2 5624 machine_mode mode;
a41c6c53
UW
5625
5626 mode = GET_MODE (len);
5627 if (mode == VOIDmode)
b9404c99 5628 mode = Pmode;
a41c6c53 5629
a41c6c53
UW
5630 addr0 = gen_reg_rtx (Pmode);
5631 addr1 = gen_reg_rtx (Pmode);
5632 count = gen_reg_rtx (mode);
5633 blocks = gen_reg_rtx (mode);
5634
5635 convert_move (count, len, 1);
c7453384 5636 emit_cmp_and_jump_insns (count, const0_rtx,
a41c6c53
UW
5637 EQ, NULL_RTX, mode, 1, end_label);
5638
5639 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
5640 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
5641 op0 = change_address (op0, VOIDmode, addr0);
5642 op1 = change_address (op1, VOIDmode, addr1);
c7453384 5643
bbbbb16a
ILT
5644 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1,
5645 OPTAB_DIRECT);
a41c6c53
UW
5646 if (temp != count)
5647 emit_move_insn (count, temp);
5648
bbbbb16a
ILT
5649 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1,
5650 OPTAB_DIRECT);
a41c6c53
UW
5651 if (temp != blocks)
5652 emit_move_insn (blocks, temp);
5653
6de9cd9a
DN
5654 emit_cmp_and_jump_insns (blocks, const0_rtx,
5655 EQ, NULL_RTX, mode, 1, loop_end_label);
70315fcd
SB
5656
5657 emit_label (loop_start_label);
a41c6c53 5658
adfa3cd3
AK
5659 if (TARGET_Z10
5660 && (GET_CODE (len) != CONST_INT || INTVAL (len) > 512))
5661 {
5662 rtx prefetch;
5663
5664 /* Issue a read prefetch for the +2 cache line of operand 1. */
5665 prefetch = gen_prefetch (gen_rtx_PLUS (Pmode, addr0, GEN_INT (512)),
5666 const0_rtx, const0_rtx);
5667 emit_insn (prefetch);
5668 PREFETCH_SCHEDULE_BARRIER_P (prefetch) = true;
5669
5670 /* Issue a read prefetch for the +2 cache line of operand 2. */
5671 prefetch = gen_prefetch (gen_rtx_PLUS (Pmode, addr1, GEN_INT (512)),
5672 const0_rtx, const0_rtx);
5673 emit_insn (prefetch);
5674 PREFETCH_SCHEDULE_BARRIER_P (prefetch) = true;
5675 }
5676
b9404c99 5677 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (255)));
5b022de5 5678 temp = gen_rtx_NE (VOIDmode, ccreg, const0_rtx);
c7453384 5679 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
a41c6c53 5680 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
f7df4a84 5681 temp = gen_rtx_SET (pc_rtx, temp);
a41c6c53
UW
5682 emit_jump_insn (temp);
5683
c7453384 5684 s390_load_address (addr0,
a41c6c53 5685 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
c7453384 5686 s390_load_address (addr1,
a41c6c53 5687 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
c7453384 5688
bbbbb16a
ILT
5689 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1,
5690 OPTAB_DIRECT);
a41c6c53
UW
5691 if (temp != blocks)
5692 emit_move_insn (blocks, temp);
5693
6de9cd9a
DN
5694 emit_cmp_and_jump_insns (blocks, const0_rtx,
5695 EQ, NULL_RTX, mode, 1, loop_end_label);
70315fcd
SB
5696
5697 emit_jump (loop_start_label);
6de9cd9a 5698 emit_label (loop_end_label);
a41c6c53 5699
38899e29 5700 emit_insn (gen_cmpmem_short (op0, op1,
b9404c99 5701 convert_to_mode (Pmode, count, 1)));
a41c6c53
UW
5702 emit_label (end_label);
5703
02887425 5704 emit_insn (gen_cmpint (target, ccreg));
a41c6c53 5705 }
367d32f3 5706 return true;
a41c6c53
UW
5707}
5708
085261c8
AK
5709/* Emit a conditional jump to LABEL for condition code mask MASK using
5710 comparsion operator COMPARISON. Return the emitted jump insn. */
5711
d8485bdb 5712static rtx_insn *
085261c8
AK
5713s390_emit_ccraw_jump (HOST_WIDE_INT mask, enum rtx_code comparison, rtx label)
5714{
5715 rtx temp;
5716
5717 gcc_assert (comparison == EQ || comparison == NE);
5718 gcc_assert (mask > 0 && mask < 15);
5719
5720 temp = gen_rtx_fmt_ee (comparison, VOIDmode,
5721 gen_rtx_REG (CCRAWmode, CC_REGNUM), GEN_INT (mask));
5722 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
5723 gen_rtx_LABEL_REF (VOIDmode, label), pc_rtx);
5724 temp = gen_rtx_SET (pc_rtx, temp);
5725 return emit_jump_insn (temp);
5726}
5727
5728/* Emit the instructions to implement strlen of STRING and store the
5729 result in TARGET. The string has the known ALIGNMENT. This
5730 version uses vector instructions and is therefore not appropriate
5731 for targets prior to z13. */
5732
5733void
5734s390_expand_vec_strlen (rtx target, rtx string, rtx alignment)
5735{
085261c8
AK
5736 rtx highest_index_to_load_reg = gen_reg_rtx (Pmode);
5737 rtx str_reg = gen_reg_rtx (V16QImode);
5738 rtx str_addr_base_reg = gen_reg_rtx (Pmode);
5739 rtx str_idx_reg = gen_reg_rtx (Pmode);
5740 rtx result_reg = gen_reg_rtx (V16QImode);
5741 rtx is_aligned_label = gen_label_rtx ();
5742 rtx into_loop_label = NULL_RTX;
5743 rtx loop_start_label = gen_label_rtx ();
5744 rtx temp;
5745 rtx len = gen_reg_rtx (QImode);
5746 rtx cond;
5747
5748 s390_load_address (str_addr_base_reg, XEXP (string, 0));
5749 emit_move_insn (str_idx_reg, const0_rtx);
5750
5751 if (INTVAL (alignment) < 16)
5752 {
5753 /* Check whether the address happens to be aligned properly so
5754 jump directly to the aligned loop. */
5755 emit_cmp_and_jump_insns (gen_rtx_AND (Pmode,
5756 str_addr_base_reg, GEN_INT (15)),
5757 const0_rtx, EQ, NULL_RTX,
5758 Pmode, 1, is_aligned_label);
5759
5760 temp = gen_reg_rtx (Pmode);
5761 temp = expand_binop (Pmode, and_optab, str_addr_base_reg,
5762 GEN_INT (15), temp, 1, OPTAB_DIRECT);
5763 gcc_assert (REG_P (temp));
5764 highest_index_to_load_reg =
5765 expand_binop (Pmode, sub_optab, GEN_INT (15), temp,
5766 highest_index_to_load_reg, 1, OPTAB_DIRECT);
5767 gcc_assert (REG_P (highest_index_to_load_reg));
5768 emit_insn (gen_vllv16qi (str_reg,
5769 convert_to_mode (SImode, highest_index_to_load_reg, 1),
5770 gen_rtx_MEM (BLKmode, str_addr_base_reg)));
5771
5772 into_loop_label = gen_label_rtx ();
5773 s390_emit_jump (into_loop_label, NULL_RTX);
5774 emit_barrier ();
5775 }
5776
5777 emit_label (is_aligned_label);
5778 LABEL_NUSES (is_aligned_label) = INTVAL (alignment) < 16 ? 2 : 1;
5779
5780 /* Reaching this point we are only performing 16 bytes aligned
5781 loads. */
5782 emit_move_insn (highest_index_to_load_reg, GEN_INT (15));
5783
5784 emit_label (loop_start_label);
5785 LABEL_NUSES (loop_start_label) = 1;
5786
5787 /* Load 16 bytes of the string into VR. */
5788 emit_move_insn (str_reg,
5789 gen_rtx_MEM (V16QImode,
5790 gen_rtx_PLUS (Pmode, str_idx_reg,
5791 str_addr_base_reg)));
5792 if (into_loop_label != NULL_RTX)
5793 {
5794 emit_label (into_loop_label);
5795 LABEL_NUSES (into_loop_label) = 1;
5796 }
5797
5798 /* Increment string index by 16 bytes. */
5799 expand_binop (Pmode, add_optab, str_idx_reg, GEN_INT (16),
5800 str_idx_reg, 1, OPTAB_DIRECT);
5801
5802 emit_insn (gen_vec_vfenesv16qi (result_reg, str_reg, str_reg,
5803 GEN_INT (VSTRING_FLAG_ZS | VSTRING_FLAG_CS)));
5804
5805 add_int_reg_note (s390_emit_ccraw_jump (8, NE, loop_start_label),
5fa396ad
JH
5806 REG_BR_PROB,
5807 profile_probability::very_likely ().to_reg_br_prob_note ());
ff03930a 5808 emit_insn (gen_vec_extractv16qiqi (len, result_reg, GEN_INT (7)));
085261c8
AK
5809
5810 /* If the string pointer wasn't aligned we have loaded less then 16
5811 bytes and the remaining bytes got filled with zeros (by vll).
5812 Now we have to check whether the resulting index lies within the
5813 bytes actually part of the string. */
5814
5815 cond = s390_emit_compare (GT, convert_to_mode (Pmode, len, 1),
5816 highest_index_to_load_reg);
5817 s390_load_address (highest_index_to_load_reg,
5818 gen_rtx_PLUS (Pmode, highest_index_to_load_reg,
5819 const1_rtx));
5820 if (TARGET_64BIT)
5821 emit_insn (gen_movdicc (str_idx_reg, cond,
5822 highest_index_to_load_reg, str_idx_reg));
5823 else
5824 emit_insn (gen_movsicc (str_idx_reg, cond,
5825 highest_index_to_load_reg, str_idx_reg));
5826
5fa396ad
JH
5827 add_reg_br_prob_note (s390_emit_jump (is_aligned_label, cond),
5828 profile_probability::very_unlikely ());
085261c8
AK
5829
5830 expand_binop (Pmode, add_optab, str_idx_reg,
5831 GEN_INT (-16), str_idx_reg, 1, OPTAB_DIRECT);
5832 /* FIXME: len is already zero extended - so avoid the llgcr emitted
5833 here. */
5834 temp = expand_binop (Pmode, add_optab, str_idx_reg,
5835 convert_to_mode (Pmode, len, 1),
5836 target, 1, OPTAB_DIRECT);
5837 if (temp != target)
5838 emit_move_insn (target, temp);
5839}
5d880bd2 5840
859a4c0e
AK
5841void
5842s390_expand_vec_movstr (rtx result, rtx dst, rtx src)
5843{
859a4c0e
AK
5844 rtx temp = gen_reg_rtx (Pmode);
5845 rtx src_addr = XEXP (src, 0);
5846 rtx dst_addr = XEXP (dst, 0);
5847 rtx src_addr_reg = gen_reg_rtx (Pmode);
5848 rtx dst_addr_reg = gen_reg_rtx (Pmode);
5849 rtx offset = gen_reg_rtx (Pmode);
5850 rtx vsrc = gen_reg_rtx (V16QImode);
5851 rtx vpos = gen_reg_rtx (V16QImode);
5852 rtx loadlen = gen_reg_rtx (SImode);
5853 rtx gpos_qi = gen_reg_rtx(QImode);
5854 rtx gpos = gen_reg_rtx (SImode);
5855 rtx done_label = gen_label_rtx ();
5856 rtx loop_label = gen_label_rtx ();
5857 rtx exit_label = gen_label_rtx ();
5858 rtx full_label = gen_label_rtx ();
5859
5860 /* Perform a quick check for string ending on the first up to 16
5861 bytes and exit early if successful. */
5862
5863 emit_insn (gen_vlbb (vsrc, src, GEN_INT (6)));
5864 emit_insn (gen_lcbb (loadlen, src_addr, GEN_INT (6)));
5865 emit_insn (gen_vfenezv16qi (vpos, vsrc, vsrc));
ff03930a 5866 emit_insn (gen_vec_extractv16qiqi (gpos_qi, vpos, GEN_INT (7)));
859a4c0e
AK
5867 emit_move_insn (gpos, gen_rtx_SUBREG (SImode, gpos_qi, 0));
5868 /* gpos is the byte index if a zero was found and 16 otherwise.
5869 So if it is lower than the loaded bytes we have a hit. */
5870 emit_cmp_and_jump_insns (gpos, loadlen, GE, NULL_RTX, SImode, 1,
5871 full_label);
5872 emit_insn (gen_vstlv16qi (vsrc, gpos, dst));
5873
5874 force_expand_binop (Pmode, add_optab, dst_addr, gpos, result,
5875 1, OPTAB_DIRECT);
5876 emit_jump (exit_label);
5877 emit_barrier ();
5878
5879 emit_label (full_label);
5880 LABEL_NUSES (full_label) = 1;
5881
5882 /* Calculate `offset' so that src + offset points to the last byte
5883 before 16 byte alignment. */
5884
5885 /* temp = src_addr & 0xf */
5886 force_expand_binop (Pmode, and_optab, src_addr, GEN_INT (15), temp,
5887 1, OPTAB_DIRECT);
5888
5889 /* offset = 0xf - temp */
5890 emit_move_insn (offset, GEN_INT (15));
5891 force_expand_binop (Pmode, sub_optab, offset, temp, offset,
5892 1, OPTAB_DIRECT);
5893
5894 /* Store `offset' bytes in the dstination string. The quick check
5895 has loaded at least `offset' bytes into vsrc. */
5896
5897 emit_insn (gen_vstlv16qi (vsrc, gen_lowpart (SImode, offset), dst));
5898
5899 /* Advance to the next byte to be loaded. */
5900 force_expand_binop (Pmode, add_optab, offset, const1_rtx, offset,
5901 1, OPTAB_DIRECT);
5902
5903 /* Make sure the addresses are single regs which can be used as a
5904 base. */
5905 emit_move_insn (src_addr_reg, src_addr);
5906 emit_move_insn (dst_addr_reg, dst_addr);
5907
5908 /* MAIN LOOP */
5909
5910 emit_label (loop_label);
5911 LABEL_NUSES (loop_label) = 1;
5912
5913 emit_move_insn (vsrc,
5914 gen_rtx_MEM (V16QImode,
5915 gen_rtx_PLUS (Pmode, src_addr_reg, offset)));
5916
5917 emit_insn (gen_vec_vfenesv16qi (vpos, vsrc, vsrc,
5918 GEN_INT (VSTRING_FLAG_ZS | VSTRING_FLAG_CS)));
5919 add_int_reg_note (s390_emit_ccraw_jump (8, EQ, done_label),
5fa396ad
JH
5920 REG_BR_PROB, profile_probability::very_unlikely ()
5921 .to_reg_br_prob_note ());
859a4c0e
AK
5922
5923 emit_move_insn (gen_rtx_MEM (V16QImode,
5924 gen_rtx_PLUS (Pmode, dst_addr_reg, offset)),
5925 vsrc);
5926 /* offset += 16 */
5927 force_expand_binop (Pmode, add_optab, offset, GEN_INT (16),
5928 offset, 1, OPTAB_DIRECT);
5929
5930 emit_jump (loop_label);
5931 emit_barrier ();
5932
5933 /* REGULAR EXIT */
5934
5935 /* We are done. Add the offset of the zero character to the dst_addr
5936 pointer to get the result. */
5937
5938 emit_label (done_label);
5939 LABEL_NUSES (done_label) = 1;
5940
5941 force_expand_binop (Pmode, add_optab, dst_addr_reg, offset, dst_addr_reg,
5942 1, OPTAB_DIRECT);
5943
ff03930a 5944 emit_insn (gen_vec_extractv16qiqi (gpos_qi, vpos, GEN_INT (7)));
859a4c0e
AK
5945 emit_move_insn (gpos, gen_rtx_SUBREG (SImode, gpos_qi, 0));
5946
5947 emit_insn (gen_vstlv16qi (vsrc, gpos, gen_rtx_MEM (BLKmode, dst_addr_reg)));
5948
5949 force_expand_binop (Pmode, add_optab, dst_addr_reg, gpos, result,
5950 1, OPTAB_DIRECT);
5951
5952 /* EARLY EXIT */
5953
5954 emit_label (exit_label);
5955 LABEL_NUSES (exit_label) = 1;
5956}
5957
5958
5d880bd2
UW
5959/* Expand conditional increment or decrement using alc/slb instructions.
5960 Should generate code setting DST to either SRC or SRC + INCREMENT,
5961 depending on the result of the comparison CMP_OP0 CMP_CODE CMP_OP1.
00bda920
AK
5962 Returns true if successful, false otherwise.
5963
5964 That makes it possible to implement some if-constructs without jumps e.g.:
5965 (borrow = CC0 | CC1 and carry = CC2 | CC3)
5966 unsigned int a, b, c;
5967 if (a < b) c++; -> CCU b > a -> CC2; c += carry;
5968 if (a < b) c--; -> CCL3 a - b -> borrow; c -= borrow;
5969 if (a <= b) c++; -> CCL3 b - a -> borrow; c += carry;
5970 if (a <= b) c--; -> CCU a <= b -> borrow; c -= borrow;
5971
5972 Checks for EQ and NE with a nonzero value need an additional xor e.g.:
5973 if (a == b) c++; -> CCL3 a ^= b; 0 - a -> borrow; c += carry;
5974 if (a == b) c--; -> CCU a ^= b; a <= 0 -> CC0 | CC1; c -= borrow;
5975 if (a != b) c++; -> CCU a ^= b; a > 0 -> CC2; c += carry;
5976 if (a != b) c--; -> CCL3 a ^= b; 0 - a -> borrow; c -= borrow; */
5d880bd2
UW
5977
5978bool
5979s390_expand_addcc (enum rtx_code cmp_code, rtx cmp_op0, rtx cmp_op1,
5980 rtx dst, rtx src, rtx increment)
5981{
ef4bddc2
RS
5982 machine_mode cmp_mode;
5983 machine_mode cc_mode;
5d880bd2
UW
5984 rtx op_res;
5985 rtx insn;
5986 rtvec p;
8d933e31 5987 int ret;
5d880bd2
UW
5988
5989 if ((GET_MODE (cmp_op0) == SImode || GET_MODE (cmp_op0) == VOIDmode)
5990 && (GET_MODE (cmp_op1) == SImode || GET_MODE (cmp_op1) == VOIDmode))
5991 cmp_mode = SImode;
5992 else if ((GET_MODE (cmp_op0) == DImode || GET_MODE (cmp_op0) == VOIDmode)
5993 && (GET_MODE (cmp_op1) == DImode || GET_MODE (cmp_op1) == VOIDmode))
5994 cmp_mode = DImode;
5995 else
5996 return false;
5997
5998 /* Try ADD LOGICAL WITH CARRY. */
5999 if (increment == const1_rtx)
6000 {
6001 /* Determine CC mode to use. */
6002 if (cmp_code == EQ || cmp_code == NE)
6003 {
6004 if (cmp_op1 != const0_rtx)
6005 {
6006 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
6007 NULL_RTX, 0, OPTAB_WIDEN);
6008 cmp_op1 = const0_rtx;
6009 }
6010
6011 cmp_code = cmp_code == EQ ? LEU : GTU;
6012 }
6013
6014 if (cmp_code == LTU || cmp_code == LEU)
6015 {
6016 rtx tem = cmp_op0;
6017 cmp_op0 = cmp_op1;
6018 cmp_op1 = tem;
6019 cmp_code = swap_condition (cmp_code);
6020 }
6021
6022 switch (cmp_code)
6023 {
6024 case GTU:
6025 cc_mode = CCUmode;
6026 break;
6027
6028 case GEU:
6029 cc_mode = CCL3mode;
6030 break;
6031
6032 default:
6033 return false;
6034 }
6035
6036 /* Emit comparison instruction pattern. */
6037 if (!register_operand (cmp_op0, cmp_mode))
6038 cmp_op0 = force_reg (cmp_mode, cmp_op0);
6039
f7df4a84 6040 insn = gen_rtx_SET (gen_rtx_REG (cc_mode, CC_REGNUM),
5d880bd2
UW
6041 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
6042 /* We use insn_invalid_p here to add clobbers if required. */
57ac4c34 6043 ret = insn_invalid_p (emit_insn (insn), false);
8d933e31 6044 gcc_assert (!ret);
5d880bd2
UW
6045
6046 /* Emit ALC instruction pattern. */
6047 op_res = gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
6048 gen_rtx_REG (cc_mode, CC_REGNUM),
6049 const0_rtx);
6050
6051 if (src != const0_rtx)
6052 {
6053 if (!register_operand (src, GET_MODE (dst)))
6054 src = force_reg (GET_MODE (dst), src);
6055
a94a76a7
UW
6056 op_res = gen_rtx_PLUS (GET_MODE (dst), op_res, src);
6057 op_res = gen_rtx_PLUS (GET_MODE (dst), op_res, const0_rtx);
5d880bd2
UW
6058 }
6059
6060 p = rtvec_alloc (2);
f4aa3848 6061 RTVEC_ELT (p, 0) =
f7df4a84 6062 gen_rtx_SET (dst, op_res);
f4aa3848 6063 RTVEC_ELT (p, 1) =
5d880bd2
UW
6064 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
6065 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
6066
6067 return true;
6068 }
6069
6070 /* Try SUBTRACT LOGICAL WITH BORROW. */
6071 if (increment == constm1_rtx)
6072 {
6073 /* Determine CC mode to use. */
6074 if (cmp_code == EQ || cmp_code == NE)
6075 {
6076 if (cmp_op1 != const0_rtx)
6077 {
6078 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
6079 NULL_RTX, 0, OPTAB_WIDEN);
6080 cmp_op1 = const0_rtx;
6081 }
6082
6083 cmp_code = cmp_code == EQ ? LEU : GTU;
6084 }
6085
6086 if (cmp_code == GTU || cmp_code == GEU)
6087 {
6088 rtx tem = cmp_op0;
6089 cmp_op0 = cmp_op1;
6090 cmp_op1 = tem;
6091 cmp_code = swap_condition (cmp_code);
6092 }
6093
6094 switch (cmp_code)
6095 {
6096 case LEU:
6097 cc_mode = CCUmode;
6098 break;
6099
6100 case LTU:
6101 cc_mode = CCL3mode;
6102 break;
6103
6104 default:
6105 return false;
6106 }
6107
6108 /* Emit comparison instruction pattern. */
6109 if (!register_operand (cmp_op0, cmp_mode))
6110 cmp_op0 = force_reg (cmp_mode, cmp_op0);
6111
f7df4a84 6112 insn = gen_rtx_SET (gen_rtx_REG (cc_mode, CC_REGNUM),
5d880bd2
UW
6113 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
6114 /* We use insn_invalid_p here to add clobbers if required. */
57ac4c34 6115 ret = insn_invalid_p (emit_insn (insn), false);
8d933e31 6116 gcc_assert (!ret);
5d880bd2
UW
6117
6118 /* Emit SLB instruction pattern. */
6119 if (!register_operand (src, GET_MODE (dst)))
6120 src = force_reg (GET_MODE (dst), src);
6121
f4aa3848
AK
6122 op_res = gen_rtx_MINUS (GET_MODE (dst),
6123 gen_rtx_MINUS (GET_MODE (dst), src, const0_rtx),
6124 gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
6125 gen_rtx_REG (cc_mode, CC_REGNUM),
5d880bd2
UW
6126 const0_rtx));
6127 p = rtvec_alloc (2);
f4aa3848 6128 RTVEC_ELT (p, 0) =
f7df4a84 6129 gen_rtx_SET (dst, op_res);
f4aa3848 6130 RTVEC_ELT (p, 1) =
5d880bd2
UW
6131 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
6132 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
6133
6134 return true;
6135 }
6136
6137 return false;
6138}
6139
963fc8d0 6140/* Expand code for the insv template. Return true if successful. */
6fa05db6 6141
963fc8d0 6142bool
6fa05db6
AS
6143s390_expand_insv (rtx dest, rtx op1, rtx op2, rtx src)
6144{
6145 int bitsize = INTVAL (op1);
6146 int bitpos = INTVAL (op2);
ef4bddc2
RS
6147 machine_mode mode = GET_MODE (dest);
6148 machine_mode smode;
78ce265b
RH
6149 int smode_bsize, mode_bsize;
6150 rtx op, clobber;
6fa05db6 6151
a9d46e32 6152 if (bitsize + bitpos > GET_MODE_BITSIZE (mode))
03ed99a8
AK
6153 return false;
6154
78ce265b
RH
6155 /* Generate INSERT IMMEDIATE (IILL et al). */
6156 /* (set (ze (reg)) (const_int)). */
6157 if (TARGET_ZARCH
6158 && register_operand (dest, word_mode)
6159 && (bitpos % 16) == 0
6160 && (bitsize % 16) == 0
6161 && const_int_operand (src, VOIDmode))
963fc8d0 6162 {
78ce265b
RH
6163 HOST_WIDE_INT val = INTVAL (src);
6164 int regpos = bitpos + bitsize;
963fc8d0 6165
78ce265b
RH
6166 while (regpos > bitpos)
6167 {
ef4bddc2 6168 machine_mode putmode;
78ce265b
RH
6169 int putsize;
6170
6171 if (TARGET_EXTIMM && (regpos % 32 == 0) && (regpos >= bitpos + 32))
6172 putmode = SImode;
6173 else
6174 putmode = HImode;
963fc8d0 6175
78ce265b
RH
6176 putsize = GET_MODE_BITSIZE (putmode);
6177 regpos -= putsize;
6178 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest,
6179 GEN_INT (putsize),
6180 GEN_INT (regpos)),
6181 gen_int_mode (val, putmode));
6182 val >>= putsize;
6183 }
6184 gcc_assert (regpos == bitpos);
963fc8d0
AK
6185 return true;
6186 }
6187
f67f4dff 6188 smode = smallest_int_mode_for_size (bitsize);
78ce265b
RH
6189 smode_bsize = GET_MODE_BITSIZE (smode);
6190 mode_bsize = GET_MODE_BITSIZE (mode);
6fa05db6 6191
78ce265b 6192 /* Generate STORE CHARACTERS UNDER MASK (STCM et al). */
6fa05db6 6193 if (bitpos == 0
78ce265b
RH
6194 && (bitsize % BITS_PER_UNIT) == 0
6195 && MEM_P (dest)
6fa05db6
AS
6196 && (register_operand (src, word_mode)
6197 || const_int_operand (src, VOIDmode)))
6198 {
6199 /* Emit standard pattern if possible. */
78ce265b
RH
6200 if (smode_bsize == bitsize)
6201 {
6202 emit_move_insn (adjust_address (dest, smode, 0),
6203 gen_lowpart (smode, src));
6204 return true;
6205 }
6fa05db6
AS
6206
6207 /* (set (ze (mem)) (const_int)). */
6208 else if (const_int_operand (src, VOIDmode))
6209 {
6210 int size = bitsize / BITS_PER_UNIT;
78ce265b
RH
6211 rtx src_mem = adjust_address (force_const_mem (word_mode, src),
6212 BLKmode,
6213 UNITS_PER_WORD - size);
6fa05db6
AS
6214
6215 dest = adjust_address (dest, BLKmode, 0);
f5541398 6216 set_mem_size (dest, size);
6fa05db6 6217 s390_expand_movmem (dest, src_mem, GEN_INT (size));
78ce265b 6218 return true;
6fa05db6 6219 }
f4aa3848 6220
6fa05db6
AS
6221 /* (set (ze (mem)) (reg)). */
6222 else if (register_operand (src, word_mode))
6223 {
78ce265b 6224 if (bitsize <= 32)
6fa05db6
AS
6225 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, op1,
6226 const0_rtx), src);
6227 else
6228 {
6229 /* Emit st,stcmh sequence. */
78ce265b 6230 int stcmh_width = bitsize - 32;
6fa05db6
AS
6231 int size = stcmh_width / BITS_PER_UNIT;
6232
f4aa3848 6233 emit_move_insn (adjust_address (dest, SImode, size),
6fa05db6 6234 gen_lowpart (SImode, src));
f5541398 6235 set_mem_size (dest, size);
78ce265b
RH
6236 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest,
6237 GEN_INT (stcmh_width),
6238 const0_rtx),
6239 gen_rtx_LSHIFTRT (word_mode, src, GEN_INT (32)));
6fa05db6 6240 }
78ce265b 6241 return true;
6fa05db6 6242 }
78ce265b 6243 }
6fa05db6 6244
78ce265b
RH
6245 /* Generate INSERT CHARACTERS UNDER MASK (IC, ICM et al). */
6246 if ((bitpos % BITS_PER_UNIT) == 0
6247 && (bitsize % BITS_PER_UNIT) == 0
6248 && (bitpos & 32) == ((bitpos + bitsize - 1) & 32)
6249 && MEM_P (src)
6250 && (mode == DImode || mode == SImode)
6251 && register_operand (dest, mode))
6252 {
6253 /* Emit a strict_low_part pattern if possible. */
6254 if (smode_bsize == bitsize && bitpos == mode_bsize - smode_bsize)
6255 {
6256 op = gen_rtx_STRICT_LOW_PART (VOIDmode, gen_lowpart (smode, dest));
f7df4a84 6257 op = gen_rtx_SET (op, gen_lowpart (smode, src));
78ce265b
RH
6258 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
6259 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, op, clobber)));
6260 return true;
6261 }
6262
6263 /* ??? There are more powerful versions of ICM that are not
6264 completely represented in the md file. */
6fa05db6
AS
6265 }
6266
78ce265b
RH
6267 /* For z10, generate ROTATE THEN INSERT SELECTED BITS (RISBG et al). */
6268 if (TARGET_Z10 && (mode == DImode || mode == SImode))
6fa05db6 6269 {
ef4bddc2 6270 machine_mode mode_s = GET_MODE (src);
6fa05db6 6271
e7067fcd 6272 if (CONSTANT_P (src))
6fa05db6 6273 {
3d44ff99
AK
6274 /* For constant zero values the representation with AND
6275 appears to be folded in more situations than the (set
6276 (zero_extract) ...).
6277 We only do this when the start and end of the bitfield
6278 remain in the same SImode chunk. That way nihf or nilf
6279 can be used.
6280 The AND patterns might still generate a risbg for this. */
6281 if (src == const0_rtx && bitpos / 32 == (bitpos + bitsize - 1) / 32)
6282 return false;
6283 else
6284 src = force_reg (mode, src);
78ce265b
RH
6285 }
6286 else if (mode_s != mode)
6287 {
6288 gcc_assert (GET_MODE_BITSIZE (mode_s) >= bitsize);
6289 src = force_reg (mode_s, src);
6290 src = gen_lowpart (mode, src);
6291 }
6fa05db6 6292
0d8e4dac 6293 op = gen_rtx_ZERO_EXTRACT (mode, dest, op1, op2),
f7df4a84 6294 op = gen_rtx_SET (op, src);
22ac2c2f
AK
6295
6296 if (!TARGET_ZEC12)
6297 {
6298 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
6299 op = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, op, clobber));
6300 }
6301 emit_insn (op);
6fa05db6 6302
6fa05db6
AS
6303 return true;
6304 }
6305
6306 return false;
6307}
5d880bd2 6308
45d18331
AS
6309/* A subroutine of s390_expand_cs_hqi and s390_expand_atomic which returns a
6310 register that holds VAL of mode MODE shifted by COUNT bits. */
3093f076
AS
6311
6312static inline rtx
ef4bddc2 6313s390_expand_mask_and_shift (rtx val, machine_mode mode, rtx count)
3093f076
AS
6314{
6315 val = expand_simple_binop (SImode, AND, val, GEN_INT (GET_MODE_MASK (mode)),
6316 NULL_RTX, 1, OPTAB_DIRECT);
f4aa3848 6317 return expand_simple_binop (SImode, ASHIFT, val, count,
3093f076
AS
6318 NULL_RTX, 1, OPTAB_DIRECT);
6319}
6320
085261c8
AK
6321/* Generate a vector comparison COND of CMP_OP1 and CMP_OP2 and store
6322 the result in TARGET. */
6323
6324void
6325s390_expand_vec_compare (rtx target, enum rtx_code cond,
6326 rtx cmp_op1, rtx cmp_op2)
6327{
6328 machine_mode mode = GET_MODE (target);
6329 bool neg_p = false, swap_p = false;
6330 rtx tmp;
6331
2de2b3f9 6332 if (GET_MODE_CLASS (GET_MODE (cmp_op1)) == MODE_VECTOR_FLOAT)
085261c8
AK
6333 {
6334 switch (cond)
6335 {
6336 /* NE a != b -> !(a == b) */
6337 case NE: cond = EQ; neg_p = true; break;
6338 /* UNGT a u> b -> !(b >= a) */
6339 case UNGT: cond = GE; neg_p = true; swap_p = true; break;
6340 /* UNGE a u>= b -> !(b > a) */
6341 case UNGE: cond = GT; neg_p = true; swap_p = true; break;
6342 /* LE: a <= b -> b >= a */
6343 case LE: cond = GE; swap_p = true; break;
6344 /* UNLE: a u<= b -> !(a > b) */
6345 case UNLE: cond = GT; neg_p = true; break;
6346 /* LT: a < b -> b > a */
6347 case LT: cond = GT; swap_p = true; break;
6348 /* UNLT: a u< b -> !(a >= b) */
6349 case UNLT: cond = GE; neg_p = true; break;
6350 case UNEQ:
6351 emit_insn (gen_vec_cmpuneqv2df (target, cmp_op1, cmp_op2));
6352 return;
6353 case LTGT:
6354 emit_insn (gen_vec_cmpltgtv2df (target, cmp_op1, cmp_op2));
6355 return;
6356 case ORDERED:
6357 emit_insn (gen_vec_orderedv2df (target, cmp_op1, cmp_op2));
6358 return;
6359 case UNORDERED:
6360 emit_insn (gen_vec_unorderedv2df (target, cmp_op1, cmp_op2));
6361 return;
6362 default: break;
6363 }
6364 }
6365 else
6366 {
6367 switch (cond)
6368 {
6369 /* NE: a != b -> !(a == b) */
6370 case NE: cond = EQ; neg_p = true; break;
6371 /* GE: a >= b -> !(b > a) */
6372 case GE: cond = GT; neg_p = true; swap_p = true; break;
6373 /* GEU: a >= b -> !(b > a) */
6374 case GEU: cond = GTU; neg_p = true; swap_p = true; break;
6375 /* LE: a <= b -> !(a > b) */
6376 case LE: cond = GT; neg_p = true; break;
6377 /* LEU: a <= b -> !(a > b) */
6378 case LEU: cond = GTU; neg_p = true; break;
6379 /* LT: a < b -> b > a */
6380 case LT: cond = GT; swap_p = true; break;
6381 /* LTU: a < b -> b > a */
6382 case LTU: cond = GTU; swap_p = true; break;
6383 default: break;
6384 }
6385 }
6386
6387 if (swap_p)
6388 {
6389 tmp = cmp_op1; cmp_op1 = cmp_op2; cmp_op2 = tmp;
6390 }
6391
6392 emit_insn (gen_rtx_SET (target, gen_rtx_fmt_ee (cond,
6393 mode,
6394 cmp_op1, cmp_op2)));
6395 if (neg_p)
6396 emit_insn (gen_rtx_SET (target, gen_rtx_NOT (mode, target)));
6397}
6398
3af82a61
AK
6399/* Expand the comparison CODE of CMP1 and CMP2 and copy 1 or 0 into
6400 TARGET if either all (ALL_P is true) or any (ALL_P is false) of the
a6a2b532
AK
6401 elements in CMP1 and CMP2 fulfill the comparison.
6402 This function is only used to emit patterns for the vx builtins and
6403 therefore only handles comparison codes required by the
6404 builtins. */
3af82a61
AK
6405void
6406s390_expand_vec_compare_cc (rtx target, enum rtx_code code,
6407 rtx cmp1, rtx cmp2, bool all_p)
6408{
a6a2b532 6409 machine_mode cc_producer_mode, cc_consumer_mode, scratch_mode;
3af82a61
AK
6410 rtx tmp_reg = gen_reg_rtx (SImode);
6411 bool swap_p = false;
6412
6413 if (GET_MODE_CLASS (GET_MODE (cmp1)) == MODE_VECTOR_INT)
6414 {
6415 switch (code)
6416 {
a6a2b532
AK
6417 case EQ:
6418 case NE:
6419 cc_producer_mode = CCVEQmode;
6420 break;
6421 case GE:
6422 case LT:
6423 code = swap_condition (code);
6424 swap_p = true;
6425 /* fallthrough */
6426 case GT:
6427 case LE:
6428 cc_producer_mode = CCVIHmode;
6429 break;
6430 case GEU:
6431 case LTU:
6432 code = swap_condition (code);
6433 swap_p = true;
6434 /* fallthrough */
6435 case GTU:
6436 case LEU:
6437 cc_producer_mode = CCVIHUmode;
6438 break;
6439 default:
6440 gcc_unreachable ();
3af82a61 6441 }
a6a2b532 6442
3af82a61 6443 scratch_mode = GET_MODE (cmp1);
a6a2b532
AK
6444 /* These codes represent inverted CC interpretations. Inverting
6445 an ALL CC mode results in an ANY CC mode and the other way
6446 around. Invert the all_p flag here to compensate for
6447 that. */
6448 if (code == NE || code == LE || code == LEU)
6449 all_p = !all_p;
6450
6451 cc_consumer_mode = all_p ? CCVIALLmode : CCVIANYmode;
3af82a61 6452 }
a6a2b532 6453 else if (GET_MODE_CLASS (GET_MODE (cmp1)) == MODE_VECTOR_FLOAT)
3af82a61 6454 {
a6a2b532
AK
6455 bool inv_p = false;
6456
3af82a61
AK
6457 switch (code)
6458 {
a6a2b532
AK
6459 case EQ: cc_producer_mode = CCVEQmode; break;
6460 case NE: cc_producer_mode = CCVEQmode; inv_p = true; break;
6461 case GT: cc_producer_mode = CCVFHmode; break;
6462 case GE: cc_producer_mode = CCVFHEmode; break;
6463 case UNLE: cc_producer_mode = CCVFHmode; inv_p = true; break;
6464 case UNLT: cc_producer_mode = CCVFHEmode; inv_p = true; break;
6465 case LT: cc_producer_mode = CCVFHmode; code = GT; swap_p = true; break;
6466 case LE: cc_producer_mode = CCVFHEmode; code = GE; swap_p = true; break;
3af82a61
AK
6467 default: gcc_unreachable ();
6468 }
304b9962
RS
6469 scratch_mode = mode_for_vector
6470 (int_mode_for_mode (GET_MODE_INNER (GET_MODE (cmp1))).require (),
6471 GET_MODE_NUNITS (GET_MODE (cmp1)));
a6a2b532
AK
6472 gcc_assert (scratch_mode != BLKmode);
6473
6474 if (inv_p)
6475 all_p = !all_p;
6476
6477 cc_consumer_mode = all_p ? CCVFALLmode : CCVFANYmode;
3af82a61
AK
6478 }
6479 else
6480 gcc_unreachable ();
6481
3af82a61
AK
6482 if (swap_p)
6483 {
6484 rtx tmp = cmp2;
6485 cmp2 = cmp1;
6486 cmp1 = tmp;
6487 }
6488
6489 emit_insn (gen_rtx_PARALLEL (VOIDmode,
6490 gen_rtvec (2, gen_rtx_SET (
a6a2b532
AK
6491 gen_rtx_REG (cc_producer_mode, CC_REGNUM),
6492 gen_rtx_COMPARE (cc_producer_mode, cmp1, cmp2)),
3af82a61
AK
6493 gen_rtx_CLOBBER (VOIDmode,
6494 gen_rtx_SCRATCH (scratch_mode)))));
6495 emit_move_insn (target, const0_rtx);
6496 emit_move_insn (tmp_reg, const1_rtx);
6497
6498 emit_move_insn (target,
6499 gen_rtx_IF_THEN_ELSE (SImode,
a6a2b532
AK
6500 gen_rtx_fmt_ee (code, VOIDmode,
6501 gen_rtx_REG (cc_consumer_mode, CC_REGNUM),
3af82a61 6502 const0_rtx),
a6a2b532 6503 tmp_reg, target));
3af82a61
AK
6504}
6505
eca98038
AK
6506/* Invert the comparison CODE applied to a CC mode. This is only safe
6507 if we know whether there result was created by a floating point
6508 compare or not. For the CCV modes this is encoded as part of the
6509 mode. */
6510enum rtx_code
6511s390_reverse_condition (machine_mode mode, enum rtx_code code)
6512{
6513 /* Reversal of FP compares takes care -- an ordered compare
6514 becomes an unordered compare and vice versa. */
6515 if (mode == CCVFALLmode || mode == CCVFANYmode)
6516 return reverse_condition_maybe_unordered (code);
6517 else if (mode == CCVIALLmode || mode == CCVIANYmode)
6518 return reverse_condition (code);
6519 else
6520 gcc_unreachable ();
6521}
6522
085261c8
AK
6523/* Generate a vector comparison expression loading either elements of
6524 THEN or ELS into TARGET depending on the comparison COND of CMP_OP1
6525 and CMP_OP2. */
6526
6527void
6528s390_expand_vcond (rtx target, rtx then, rtx els,
6529 enum rtx_code cond, rtx cmp_op1, rtx cmp_op2)
6530{
6531 rtx tmp;
6532 machine_mode result_mode;
6533 rtx result_target;
6534
d958ae79
RD
6535 machine_mode target_mode = GET_MODE (target);
6536 machine_mode cmp_mode = GET_MODE (cmp_op1);
6537 rtx op = (cond == LT) ? els : then;
6538
6539 /* Try to optimize x < 0 ? -1 : 0 into (signed) x >> 31
6540 and x < 0 ? 1 : 0 into (unsigned) x >> 31. Likewise
6541 for short and byte (x >> 15 and x >> 7 respectively). */
6542 if ((cond == LT || cond == GE)
6543 && target_mode == cmp_mode
6544 && cmp_op2 == CONST0_RTX (cmp_mode)
6545 && op == CONST0_RTX (target_mode)
6546 && s390_vector_mode_supported_p (target_mode)
6547 && GET_MODE_CLASS (target_mode) == MODE_VECTOR_INT)
6548 {
6549 rtx negop = (cond == LT) ? then : els;
6550
6551 int shift = GET_MODE_BITSIZE (GET_MODE_INNER (target_mode)) - 1;
6552
6553 /* if x < 0 ? 1 : 0 or if x >= 0 ? 0 : 1 */
6554 if (negop == CONST1_RTX (target_mode))
6555 {
6556 rtx res = expand_simple_binop (cmp_mode, LSHIFTRT, cmp_op1,
6557 GEN_INT (shift), target,
6558 1, OPTAB_DIRECT);
6559 if (res != target)
6560 emit_move_insn (target, res);
6561 return;
6562 }
6563
6564 /* if x < 0 ? -1 : 0 or if x >= 0 ? 0 : -1 */
916c0f6a 6565 else if (all_ones_operand (negop, target_mode))
d958ae79
RD
6566 {
6567 rtx res = expand_simple_binop (cmp_mode, ASHIFTRT, cmp_op1,
6568 GEN_INT (shift), target,
6569 0, OPTAB_DIRECT);
6570 if (res != target)
6571 emit_move_insn (target, res);
6572 return;
6573 }
6574 }
6575
085261c8
AK
6576 /* We always use an integral type vector to hold the comparison
6577 result. */
304b9962
RS
6578 result_mode = mode_for_vector
6579 (int_mode_for_mode (GET_MODE_INNER (cmp_mode)).require (),
6580 GET_MODE_NUNITS (cmp_mode));
085261c8
AK
6581 result_target = gen_reg_rtx (result_mode);
6582
d958ae79
RD
6583 /* We allow vector immediates as comparison operands that
6584 can be handled by the optimization above but not by the
6585 following code. Hence, force them into registers here. */
085261c8 6586 if (!REG_P (cmp_op1))
d0a859e6 6587 cmp_op1 = force_reg (GET_MODE (cmp_op1), cmp_op1);
085261c8
AK
6588
6589 if (!REG_P (cmp_op2))
d0a859e6 6590 cmp_op2 = force_reg (GET_MODE (cmp_op2), cmp_op2);
085261c8
AK
6591
6592 s390_expand_vec_compare (result_target, cond,
6593 cmp_op1, cmp_op2);
6594
6595 /* If the results are supposed to be either -1 or 0 we are done
6596 since this is what our compare instructions generate anyway. */
916c0f6a 6597 if (all_ones_operand (then, GET_MODE (then))
085261c8
AK
6598 && const0_operand (els, GET_MODE (els)))
6599 {
d958ae79 6600 emit_move_insn (target, gen_rtx_SUBREG (target_mode,
085261c8
AK
6601 result_target, 0));
6602 return;
6603 }
6604
6605 /* Otherwise we will do a vsel afterwards. */
6606 /* This gets triggered e.g.
6607 with gcc.c-torture/compile/pr53410-1.c */
6608 if (!REG_P (then))
d958ae79 6609 then = force_reg (target_mode, then);
085261c8
AK
6610
6611 if (!REG_P (els))
d958ae79 6612 els = force_reg (target_mode, els);
085261c8
AK
6613
6614 tmp = gen_rtx_fmt_ee (EQ, VOIDmode,
6615 result_target,
6616 CONST0_RTX (result_mode));
6617
6618 /* We compared the result against zero above so we have to swap then
6619 and els here. */
d958ae79 6620 tmp = gen_rtx_IF_THEN_ELSE (target_mode, tmp, els, then);
085261c8 6621
d958ae79 6622 gcc_assert (target_mode == GET_MODE (then));
085261c8
AK
6623 emit_insn (gen_rtx_SET (target, tmp));
6624}
6625
6626/* Emit the RTX necessary to initialize the vector TARGET with values
6627 in VALS. */
6628void
6629s390_expand_vec_init (rtx target, rtx vals)
6630{
6631 machine_mode mode = GET_MODE (target);
6632 machine_mode inner_mode = GET_MODE_INNER (mode);
6633 int n_elts = GET_MODE_NUNITS (mode);
6634 bool all_same = true, all_regs = true, all_const_int = true;
6635 rtx x;
6636 int i;
6637
6638 for (i = 0; i < n_elts; ++i)
6639 {
6640 x = XVECEXP (vals, 0, i);
6641
6642 if (!CONST_INT_P (x))
6643 all_const_int = false;
6644
6645 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
6646 all_same = false;
6647
6648 if (!REG_P (x))
6649 all_regs = false;
6650 }
6651
6652 /* Use vector gen mask or vector gen byte mask if possible. */
6653 if (all_same && all_const_int
6654 && (XVECEXP (vals, 0, 0) == const0_rtx
6655 || s390_contiguous_bitmask_vector_p (XVECEXP (vals, 0, 0),
6656 NULL, NULL)
6657 || s390_bytemask_vector_p (XVECEXP (vals, 0, 0), NULL)))
6658 {
6659 emit_insn (gen_rtx_SET (target,
6660 gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0))));
6661 return;
6662 }
6663
6664 if (all_same)
6665 {
6666 emit_insn (gen_rtx_SET (target,
6667 gen_rtx_VEC_DUPLICATE (mode,
6668 XVECEXP (vals, 0, 0))));
6669 return;
6670 }
6671
a602f985
AK
6672 if (all_regs
6673 && REG_P (target)
6674 && n_elts == 2
6675 && GET_MODE_SIZE (inner_mode) == 8)
085261c8
AK
6676 {
6677 /* Use vector load pair. */
6678 emit_insn (gen_rtx_SET (target,
6679 gen_rtx_VEC_CONCAT (mode,
6680 XVECEXP (vals, 0, 0),
6681 XVECEXP (vals, 0, 1))));
6682 return;
6683 }
72612e4e
AK
6684
6685 /* Use vector load logical element and zero. */
6686 if (TARGET_VXE && (mode == V4SImode || mode == V4SFmode))
6687 {
6688 bool found = true;
6689
6690 x = XVECEXP (vals, 0, 0);
6691 if (memory_operand (x, inner_mode))
6692 {
6693 for (i = 1; i < n_elts; ++i)
6694 found = found && XVECEXP (vals, 0, i) == const0_rtx;
6695
6696 if (found)
6697 {
6698 machine_mode half_mode = (inner_mode == SFmode
6699 ? V2SFmode : V2SImode);
6700 emit_insn (gen_rtx_SET (target,
6701 gen_rtx_VEC_CONCAT (mode,
6702 gen_rtx_VEC_CONCAT (half_mode,
6703 x,
6704 const0_rtx),
6705 gen_rtx_VEC_CONCAT (half_mode,
6706 const0_rtx,
6707 const0_rtx))));
6708 return;
6709 }
6710 }
6711 }
085261c8
AK
6712
6713 /* We are about to set the vector elements one by one. Zero out the
6714 full register first in order to help the data flow framework to
6715 detect it as full VR set. */
6716 emit_insn (gen_rtx_SET (target, CONST0_RTX (mode)));
6717
6718 /* Unfortunately the vec_init expander is not allowed to fail. So
6719 we have to implement the fallback ourselves. */
6720 for (i = 0; i < n_elts; i++)
a07189f4
AK
6721 {
6722 rtx elem = XVECEXP (vals, 0, i);
6723 if (!general_operand (elem, GET_MODE (elem)))
6724 elem = force_reg (inner_mode, elem);
6725
6726 emit_insn (gen_rtx_SET (target,
6727 gen_rtx_UNSPEC (mode,
6728 gen_rtvec (3, elem,
6729 GEN_INT (i), target),
6730 UNSPEC_VEC_SET)));
6731 }
085261c8
AK
6732}
6733
3093f076 6734/* Structure to hold the initial parameters for a compare_and_swap operation
f4aa3848 6735 in HImode and QImode. */
3093f076
AS
6736
6737struct alignment_context
6738{
f4aa3848 6739 rtx memsi; /* SI aligned memory location. */
3093f076
AS
6740 rtx shift; /* Bit offset with regard to lsb. */
6741 rtx modemask; /* Mask of the HQImode shifted by SHIFT bits. */
6742 rtx modemaski; /* ~modemask */
6416ae7f 6743 bool aligned; /* True if memory is aligned, false else. */
3093f076
AS
6744};
6745
45d18331
AS
6746/* A subroutine of s390_expand_cs_hqi and s390_expand_atomic to initialize
6747 structure AC for transparent simplifying, if the memory alignment is known
6748 to be at least 32bit. MEM is the memory location for the actual operation
6749 and MODE its mode. */
3093f076
AS
6750
6751static void
6752init_alignment_context (struct alignment_context *ac, rtx mem,
ef4bddc2 6753 machine_mode mode)
3093f076
AS
6754{
6755 ac->shift = GEN_INT (GET_MODE_SIZE (SImode) - GET_MODE_SIZE (mode));
6756 ac->aligned = (MEM_ALIGN (mem) >= GET_MODE_BITSIZE (SImode));
6757
6758 if (ac->aligned)
6759 ac->memsi = adjust_address (mem, SImode, 0); /* Memory is aligned. */
6760 else
6761 {
6762 /* Alignment is unknown. */
6763 rtx byteoffset, addr, align;
6764
6765 /* Force the address into a register. */
6766 addr = force_reg (Pmode, XEXP (mem, 0));
6767
6768 /* Align it to SImode. */
6769 align = expand_simple_binop (Pmode, AND, addr,
6770 GEN_INT (-GET_MODE_SIZE (SImode)),
6771 NULL_RTX, 1, OPTAB_DIRECT);
6772 /* Generate MEM. */
6773 ac->memsi = gen_rtx_MEM (SImode, align);
6774 MEM_VOLATILE_P (ac->memsi) = MEM_VOLATILE_P (mem);
44d64274 6775 set_mem_alias_set (ac->memsi, ALIAS_SET_MEMORY_BARRIER);
3093f076
AS
6776 set_mem_align (ac->memsi, GET_MODE_BITSIZE (SImode));
6777
6778 /* Calculate shiftcount. */
6779 byteoffset = expand_simple_binop (Pmode, AND, addr,
6780 GEN_INT (GET_MODE_SIZE (SImode) - 1),
6781 NULL_RTX, 1, OPTAB_DIRECT);
6782 /* As we already have some offset, evaluate the remaining distance. */
6783 ac->shift = expand_simple_binop (SImode, MINUS, ac->shift, byteoffset,
6784 NULL_RTX, 1, OPTAB_DIRECT);
3093f076 6785 }
78ce265b 6786
3093f076 6787 /* Shift is the byte count, but we need the bitcount. */
78ce265b
RH
6788 ac->shift = expand_simple_binop (SImode, ASHIFT, ac->shift, GEN_INT (3),
6789 NULL_RTX, 1, OPTAB_DIRECT);
6790
3093f076 6791 /* Calculate masks. */
f4aa3848 6792 ac->modemask = expand_simple_binop (SImode, ASHIFT,
78ce265b
RH
6793 GEN_INT (GET_MODE_MASK (mode)),
6794 ac->shift, NULL_RTX, 1, OPTAB_DIRECT);
6795 ac->modemaski = expand_simple_unop (SImode, NOT, ac->modemask,
6796 NULL_RTX, 1);
6797}
6798
6799/* A subroutine of s390_expand_cs_hqi. Insert INS into VAL. If possible,
6800 use a single insv insn into SEQ2. Otherwise, put prep insns in SEQ1 and
6801 perform the merge in SEQ2. */
6802
6803static rtx
6804s390_two_part_insv (struct alignment_context *ac, rtx *seq1, rtx *seq2,
ef4bddc2 6805 machine_mode mode, rtx val, rtx ins)
78ce265b
RH
6806{
6807 rtx tmp;
6808
6809 if (ac->aligned)
6810 {
6811 start_sequence ();
6812 tmp = copy_to_mode_reg (SImode, val);
6813 if (s390_expand_insv (tmp, GEN_INT (GET_MODE_BITSIZE (mode)),
6814 const0_rtx, ins))
6815 {
6816 *seq1 = NULL;
6817 *seq2 = get_insns ();
6818 end_sequence ();
6819 return tmp;
6820 }
6821 end_sequence ();
6822 }
6823
6824 /* Failed to use insv. Generate a two part shift and mask. */
6825 start_sequence ();
6826 tmp = s390_expand_mask_and_shift (ins, mode, ac->shift);
6827 *seq1 = get_insns ();
6828 end_sequence ();
6829
6830 start_sequence ();
6831 tmp = expand_simple_binop (SImode, IOR, tmp, val, NULL_RTX, 1, OPTAB_DIRECT);
6832 *seq2 = get_insns ();
6833 end_sequence ();
6834
6835 return tmp;
3093f076
AS
6836}
6837
6838/* Expand an atomic compare and swap operation for HImode and QImode. MEM is
78ce265b
RH
6839 the memory location, CMP the old value to compare MEM with and NEW_RTX the
6840 value to set if CMP == MEM. */
3093f076 6841
03db9ab5 6842static void
ef4bddc2 6843s390_expand_cs_hqi (machine_mode mode, rtx btarget, rtx vtarget, rtx mem,
78ce265b 6844 rtx cmp, rtx new_rtx, bool is_weak)
3093f076
AS
6845{
6846 struct alignment_context ac;
4e1ffb63 6847 rtx cmpv, newv, val, cc, seq0, seq1, seq2, seq3;
3093f076 6848 rtx res = gen_reg_rtx (SImode);
19f8b229 6849 rtx_code_label *csloop = NULL, *csend = NULL;
3093f076 6850
3093f076
AS
6851 gcc_assert (MEM_P (mem));
6852
6853 init_alignment_context (&ac, mem, mode);
6854
3093f076
AS
6855 /* Load full word. Subsequent loads are performed by CS. */
6856 val = expand_simple_binop (SImode, AND, ac.memsi, ac.modemaski,
6857 NULL_RTX, 1, OPTAB_DIRECT);
6858
78ce265b
RH
6859 /* Prepare insertions of cmp and new_rtx into the loaded value. When
6860 possible, we try to use insv to make this happen efficiently. If
6861 that fails we'll generate code both inside and outside the loop. */
6862 cmpv = s390_two_part_insv (&ac, &seq0, &seq2, mode, val, cmp);
6863 newv = s390_two_part_insv (&ac, &seq1, &seq3, mode, val, new_rtx);
6864
6865 if (seq0)
6866 emit_insn (seq0);
6867 if (seq1)
6868 emit_insn (seq1);
6869
3093f076 6870 /* Start CS loop. */
78ce265b
RH
6871 if (!is_weak)
6872 {
6873 /* Begin assuming success. */
6874 emit_move_insn (btarget, const1_rtx);
6875
6876 csloop = gen_label_rtx ();
6877 csend = gen_label_rtx ();
6878 emit_label (csloop);
6879 }
6880
f4aa3848 6881 /* val = "<mem>00..0<mem>"
3093f076 6882 * cmp = "00..0<cmp>00..0"
f4aa3848 6883 * new = "00..0<new>00..0"
3093f076
AS
6884 */
6885
78ce265b
RH
6886 emit_insn (seq2);
6887 emit_insn (seq3);
6888
03db9ab5 6889 cc = s390_emit_compare_and_swap (EQ, res, ac.memsi, cmpv, newv, CCZ1mode);
78ce265b
RH
6890 if (is_weak)
6891 emit_insn (gen_cstorecc4 (btarget, cc, XEXP (cc, 0), XEXP (cc, 1)));
3093f076 6892 else
3093f076 6893 {
4e1ffb63
RH
6894 rtx tmp;
6895
78ce265b
RH
6896 /* Jump to end if we're done (likely?). */
6897 s390_emit_jump (csend, cc);
6898
4e1ffb63
RH
6899 /* Check for changes outside mode, and loop internal if so.
6900 Arrange the moves so that the compare is adjacent to the
6901 branch so that we can generate CRJ. */
6902 tmp = copy_to_reg (val);
6903 force_expand_binop (SImode, and_optab, res, ac.modemaski, val,
6904 1, OPTAB_DIRECT);
6905 cc = s390_emit_compare (NE, val, tmp);
78ce265b
RH
6906 s390_emit_jump (csloop, cc);
6907
6908 /* Failed. */
6909 emit_move_insn (btarget, const0_rtx);
6910 emit_label (csend);
3093f076 6911 }
f4aa3848 6912
3093f076 6913 /* Return the correct part of the bitfield. */
78ce265b
RH
6914 convert_move (vtarget, expand_simple_binop (SImode, LSHIFTRT, res, ac.shift,
6915 NULL_RTX, 1, OPTAB_DIRECT), 1);
3093f076
AS
6916}
6917
03db9ab5
DV
6918/* Variant of s390_expand_cs for SI, DI and TI modes. */
6919static void
6920s390_expand_cs_tdsi (machine_mode mode, rtx btarget, rtx vtarget, rtx mem,
6921 rtx cmp, rtx new_rtx, bool is_weak)
6922{
6923 rtx output = vtarget;
6924 rtx_code_label *skip_cs_label = NULL;
6925 bool do_const_opt = false;
6926
6927 if (!register_operand (output, mode))
6928 output = gen_reg_rtx (mode);
6929
6930 /* If IS_WEAK is true and the INPUT value is a constant, compare the memory
6931 with the constant first and skip the compare_and_swap because its very
6932 expensive and likely to fail anyway.
6933 Note 1: This is done only for IS_WEAK. C11 allows optimizations that may
6934 cause spurious in that case.
6935 Note 2: It may be useful to do this also for non-constant INPUT.
6936 Note 3: Currently only targets with "load on condition" are supported
6937 (z196 and newer). */
6938
6939 if (TARGET_Z196
6940 && (mode == SImode || mode == DImode))
6941 do_const_opt = (is_weak && CONST_INT_P (cmp));
6942
6943 if (do_const_opt)
6944 {
03db9ab5
DV
6945 rtx cc = gen_rtx_REG (CCZmode, CC_REGNUM);
6946
6947 skip_cs_label = gen_label_rtx ();
6948 emit_move_insn (btarget, const0_rtx);
6949 if (CONST_INT_P (cmp) && INTVAL (cmp) == 0)
6950 {
6951 rtvec lt = rtvec_alloc (2);
6952
6953 /* Load-and-test + conditional jump. */
6954 RTVEC_ELT (lt, 0)
6955 = gen_rtx_SET (cc, gen_rtx_COMPARE (CCZmode, mem, cmp));
6956 RTVEC_ELT (lt, 1) = gen_rtx_SET (output, mem);
6957 emit_insn (gen_rtx_PARALLEL (VOIDmode, lt));
6958 }
6959 else
6960 {
6961 emit_move_insn (output, mem);
6962 emit_insn (gen_rtx_SET (cc, gen_rtx_COMPARE (CCZmode, output, cmp)));
6963 }
6964 s390_emit_jump (skip_cs_label, gen_rtx_NE (VOIDmode, cc, const0_rtx));
5fa396ad
JH
6965 add_reg_br_prob_note (get_last_insn (),
6966 profile_probability::very_unlikely ());
03db9ab5
DV
6967 /* If the jump is not taken, OUTPUT is the expected value. */
6968 cmp = output;
6969 /* Reload newval to a register manually, *after* the compare and jump
6970 above. Otherwise Reload might place it before the jump. */
6971 }
6972 else
6973 cmp = force_reg (mode, cmp);
6974 new_rtx = force_reg (mode, new_rtx);
6975 s390_emit_compare_and_swap (EQ, output, mem, cmp, new_rtx,
6976 (do_const_opt) ? CCZmode : CCZ1mode);
6977 if (skip_cs_label != NULL)
6978 emit_label (skip_cs_label);
6979
6980 /* We deliberately accept non-register operands in the predicate
6981 to ensure the write back to the output operand happens *before*
6982 the store-flags code below. This makes it easier for combine
6983 to merge the store-flags code with a potential test-and-branch
6984 pattern following (immediately!) afterwards. */
6985 if (output != vtarget)
6986 emit_move_insn (vtarget, output);
6987
6988 if (do_const_opt)
6989 {
6990 rtx cc, cond, ite;
6991
6992 /* Do not use gen_cstorecc4 here because it writes either 1 or 0, but
6993 btarget has already been initialized with 0 above. */
6994 cc = gen_rtx_REG (CCZmode, CC_REGNUM);
6995 cond = gen_rtx_EQ (VOIDmode, cc, const0_rtx);
6996 ite = gen_rtx_IF_THEN_ELSE (SImode, cond, const1_rtx, btarget);
6997 emit_insn (gen_rtx_SET (btarget, ite));
6998 }
6999 else
7000 {
7001 rtx cc, cond;
7002
7003 cc = gen_rtx_REG (CCZ1mode, CC_REGNUM);
7004 cond = gen_rtx_EQ (SImode, cc, const0_rtx);
7005 emit_insn (gen_cstorecc4 (btarget, cond, cc, const0_rtx));
7006 }
7007}
7008
7009/* Expand an atomic compare and swap operation. MEM is the memory location,
7010 CMP the old value to compare MEM with and NEW_RTX the value to set if
7011 CMP == MEM. */
7012
7013void
7014s390_expand_cs (machine_mode mode, rtx btarget, rtx vtarget, rtx mem,
7015 rtx cmp, rtx new_rtx, bool is_weak)
7016{
7017 switch (mode)
7018 {
4e10a5a7
RS
7019 case E_TImode:
7020 case E_DImode:
7021 case E_SImode:
03db9ab5
DV
7022 s390_expand_cs_tdsi (mode, btarget, vtarget, mem, cmp, new_rtx, is_weak);
7023 break;
4e10a5a7
RS
7024 case E_HImode:
7025 case E_QImode:
03db9ab5
DV
7026 s390_expand_cs_hqi (mode, btarget, vtarget, mem, cmp, new_rtx, is_weak);
7027 break;
7028 default:
7029 gcc_unreachable ();
7030 }
7031}
7032
7033/* Expand an atomic_exchange operation simulated with a compare-and-swap loop.
7034 The memory location MEM is set to INPUT. OUTPUT is set to the previous value
7035 of MEM. */
7036
7037void
7038s390_expand_atomic_exchange_tdsi (rtx output, rtx mem, rtx input)
7039{
7040 machine_mode mode = GET_MODE (mem);
7041 rtx_code_label *csloop;
7042
7043 if (TARGET_Z196
7044 && (mode == DImode || mode == SImode)
7045 && CONST_INT_P (input) && INTVAL (input) == 0)
7046 {
7047 emit_move_insn (output, const0_rtx);
7048 if (mode == DImode)
7049 emit_insn (gen_atomic_fetch_anddi (output, mem, const0_rtx, input));
7050 else
7051 emit_insn (gen_atomic_fetch_andsi (output, mem, const0_rtx, input));
7052 return;
7053 }
7054
7055 input = force_reg (mode, input);
7056 emit_move_insn (output, mem);
7057 csloop = gen_label_rtx ();
7058 emit_label (csloop);
7059 s390_emit_jump (csloop, s390_emit_compare_and_swap (NE, output, mem, output,
7060 input, CCZ1mode));
7061}
7062
45d18331 7063/* Expand an atomic operation CODE of mode MODE. MEM is the memory location
ea2c620c 7064 and VAL the value to play with. If AFTER is true then store the value
45d18331
AS
7065 MEM holds after the operation, if AFTER is false then store the value MEM
7066 holds before the operation. If TARGET is zero then discard that value, else
7067 store it to TARGET. */
7068
7069void
ef4bddc2 7070s390_expand_atomic (machine_mode mode, enum rtx_code code,
45d18331
AS
7071 rtx target, rtx mem, rtx val, bool after)
7072{
7073 struct alignment_context ac;
7074 rtx cmp;
0a2aaacc 7075 rtx new_rtx = gen_reg_rtx (SImode);
45d18331 7076 rtx orig = gen_reg_rtx (SImode);
19f8b229 7077 rtx_code_label *csloop = gen_label_rtx ();
45d18331
AS
7078
7079 gcc_assert (!target || register_operand (target, VOIDmode));
7080 gcc_assert (MEM_P (mem));
7081
7082 init_alignment_context (&ac, mem, mode);
7083
7084 /* Shift val to the correct bit positions.
7085 Preserve "icm", but prevent "ex icm". */
7086 if (!(ac.aligned && code == SET && MEM_P (val)))
7087 val = s390_expand_mask_and_shift (val, mode, ac.shift);
7088
7089 /* Further preparation insns. */
7090 if (code == PLUS || code == MINUS)
7091 emit_move_insn (orig, val);
7092 else if (code == MULT || code == AND) /* val = "11..1<val>11..1" */
7093 val = expand_simple_binop (SImode, XOR, val, ac.modemaski,
7094 NULL_RTX, 1, OPTAB_DIRECT);
7095
7096 /* Load full word. Subsequent loads are performed by CS. */
7097 cmp = force_reg (SImode, ac.memsi);
7098
7099 /* Start CS loop. */
7100 emit_label (csloop);
0a2aaacc 7101 emit_move_insn (new_rtx, cmp);
45d18331
AS
7102
7103 /* Patch new with val at correct position. */
7104 switch (code)
7105 {
7106 case PLUS:
7107 case MINUS:
0a2aaacc 7108 val = expand_simple_binop (SImode, code, new_rtx, orig,
45d18331
AS
7109 NULL_RTX, 1, OPTAB_DIRECT);
7110 val = expand_simple_binop (SImode, AND, val, ac.modemask,
7111 NULL_RTX, 1, OPTAB_DIRECT);
7112 /* FALLTHRU */
f4aa3848 7113 case SET:
45d18331 7114 if (ac.aligned && MEM_P (val))
44e95206 7115 store_bit_field (new_rtx, GET_MODE_BITSIZE (mode), 0,
ee45a32d 7116 0, 0, SImode, val, false);
45d18331
AS
7117 else
7118 {
0a2aaacc 7119 new_rtx = expand_simple_binop (SImode, AND, new_rtx, ac.modemaski,
45d18331 7120 NULL_RTX, 1, OPTAB_DIRECT);
0a2aaacc 7121 new_rtx = expand_simple_binop (SImode, IOR, new_rtx, val,
45d18331
AS
7122 NULL_RTX, 1, OPTAB_DIRECT);
7123 }
7124 break;
7125 case AND:
7126 case IOR:
7127 case XOR:
0a2aaacc 7128 new_rtx = expand_simple_binop (SImode, code, new_rtx, val,
45d18331
AS
7129 NULL_RTX, 1, OPTAB_DIRECT);
7130 break;
7131 case MULT: /* NAND */
0a2aaacc 7132 new_rtx = expand_simple_binop (SImode, AND, new_rtx, val,
45d18331 7133 NULL_RTX, 1, OPTAB_DIRECT);
6a238c58
AK
7134 new_rtx = expand_simple_binop (SImode, XOR, new_rtx, ac.modemask,
7135 NULL_RTX, 1, OPTAB_DIRECT);
45d18331
AS
7136 break;
7137 default:
7138 gcc_unreachable ();
7139 }
45d18331 7140
8bb501bb 7141 s390_emit_jump (csloop, s390_emit_compare_and_swap (NE, cmp,
03db9ab5
DV
7142 ac.memsi, cmp, new_rtx,
7143 CCZ1mode));
45d18331
AS
7144
7145 /* Return the correct part of the bitfield. */
7146 if (target)
7147 convert_move (target, expand_simple_binop (SImode, LSHIFTRT,
0a2aaacc 7148 after ? new_rtx : cmp, ac.shift,
45d18331
AS
7149 NULL_RTX, 1, OPTAB_DIRECT), 1);
7150}
7151
fdbe66f2 7152/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
6b2300b3
JJ
7153 We need to emit DTP-relative relocations. */
7154
fdbe66f2
EB
7155static void s390_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
7156
7157static void
9c808aad 7158s390_output_dwarf_dtprel (FILE *file, int size, rtx x)
6b2300b3
JJ
7159{
7160 switch (size)
7161 {
7162 case 4:
7163 fputs ("\t.long\t", file);
7164 break;
7165 case 8:
7166 fputs ("\t.quad\t", file);
7167 break;
7168 default:
8d933e31 7169 gcc_unreachable ();
6b2300b3
JJ
7170 }
7171 output_addr_const (file, x);
7172 fputs ("@DTPOFF", file);
7173}
7174
085261c8
AK
7175/* Return the proper mode for REGNO being represented in the dwarf
7176 unwind table. */
7177machine_mode
7178s390_dwarf_frame_reg_mode (int regno)
7179{
7180 machine_mode save_mode = default_dwarf_frame_reg_mode (regno);
7181
7e5c3746
AK
7182 /* Make sure not to return DImode for any GPR with -m31 -mzarch. */
7183 if (GENERAL_REGNO_P (regno))
7184 save_mode = Pmode;
7185
085261c8
AK
7186 /* The rightmost 64 bits of vector registers are call-clobbered. */
7187 if (GET_MODE_SIZE (save_mode) > 8)
7188 save_mode = DImode;
7189
7190 return save_mode;
7191}
7192
7269aee7 7193#ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
608063c3 7194/* Implement TARGET_MANGLE_TYPE. */
7269aee7
AH
7195
7196static const char *
3101faab 7197s390_mangle_type (const_tree type)
7269aee7 7198{
3af82a61
AK
7199 type = TYPE_MAIN_VARIANT (type);
7200
7201 if (TREE_CODE (type) != VOID_TYPE && TREE_CODE (type) != BOOLEAN_TYPE
7202 && TREE_CODE (type) != INTEGER_TYPE && TREE_CODE (type) != REAL_TYPE)
7203 return NULL;
7204
7205 if (type == s390_builtin_types[BT_BV16QI]) return "U6__boolc";
7206 if (type == s390_builtin_types[BT_BV8HI]) return "U6__bools";
7207 if (type == s390_builtin_types[BT_BV4SI]) return "U6__booli";
7208 if (type == s390_builtin_types[BT_BV2DI]) return "U6__booll";
7209
7269aee7
AH
7210 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
7211 && TARGET_LONG_DOUBLE_128)
7212 return "g";
7213
7214 /* For all other types, use normal C++ mangling. */
7215 return NULL;
7216}
7217#endif
7218
4c8c0dec 7219/* In the name of slightly smaller debug output, and to cater to
aabcd309 7220 general assembler lossage, recognize various UNSPEC sequences
4c8c0dec
JJ
7221 and turn them back into a direct symbol reference. */
7222
69bd9368 7223static rtx
9c808aad 7224s390_delegitimize_address (rtx orig_x)
4c8c0dec 7225{
e8d8f497 7226 rtx x, y;
4c8c0dec 7227
e8d8f497
JJ
7228 orig_x = delegitimize_mem_from_attrs (orig_x);
7229 x = orig_x;
e101e12e
AK
7230
7231 /* Extract the symbol ref from:
7232 (plus:SI (reg:SI 12 %r12)
7233 (const:SI (unspec:SI [(symbol_ref/f:SI ("*.LC0"))]
01841ac0
AK
7234 UNSPEC_GOTOFF/PLTOFF)))
7235 and
7236 (plus:SI (reg:SI 12 %r12)
7237 (const:SI (plus:SI (unspec:SI [(symbol_ref:SI ("L"))]
7238 UNSPEC_GOTOFF/PLTOFF)
7239 (const_int 4 [0x4])))) */
e101e12e
AK
7240 if (GET_CODE (x) == PLUS
7241 && REG_P (XEXP (x, 0))
7242 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM
7243 && GET_CODE (XEXP (x, 1)) == CONST)
7244 {
ab081dd6
AK
7245 HOST_WIDE_INT offset = 0;
7246
e101e12e
AK
7247 /* The const operand. */
7248 y = XEXP (XEXP (x, 1), 0);
01841ac0
AK
7249
7250 if (GET_CODE (y) == PLUS
7251 && GET_CODE (XEXP (y, 1)) == CONST_INT)
ab081dd6
AK
7252 {
7253 offset = INTVAL (XEXP (y, 1));
7254 y = XEXP (y, 0);
7255 }
01841ac0 7256
e101e12e 7257 if (GET_CODE (y) == UNSPEC
01841ac0
AK
7258 && (XINT (y, 1) == UNSPEC_GOTOFF
7259 || XINT (y, 1) == UNSPEC_PLTOFF))
0a81f074 7260 return plus_constant (Pmode, XVECEXP (y, 0, 0), offset);
e101e12e
AK
7261 }
7262
4c8c0dec
JJ
7263 if (GET_CODE (x) != MEM)
7264 return orig_x;
7265
7266 x = XEXP (x, 0);
7267 if (GET_CODE (x) == PLUS
7268 && GET_CODE (XEXP (x, 1)) == CONST
7269 && GET_CODE (XEXP (x, 0)) == REG
7270 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
7271 {
7272 y = XEXP (XEXP (x, 1), 0);
7273 if (GET_CODE (y) == UNSPEC
fd7643fb 7274 && XINT (y, 1) == UNSPEC_GOT)
67a2f76d
JJ
7275 y = XVECEXP (y, 0, 0);
7276 else
7277 return orig_x;
4c8c0dec 7278 }
67a2f76d 7279 else if (GET_CODE (x) == CONST)
4c8c0dec 7280 {
01841ac0
AK
7281 /* Extract the symbol ref from:
7282 (mem:QI (const:DI (unspec:DI [(symbol_ref:DI ("foo"))]
7283 UNSPEC_PLT/GOTENT))) */
7284
4c8c0dec
JJ
7285 y = XEXP (x, 0);
7286 if (GET_CODE (y) == UNSPEC
01841ac0
AK
7287 && (XINT (y, 1) == UNSPEC_GOTENT
7288 || XINT (y, 1) == UNSPEC_PLT))
67a2f76d
JJ
7289 y = XVECEXP (y, 0, 0);
7290 else
7291 return orig_x;
4c8c0dec 7292 }
67a2f76d
JJ
7293 else
7294 return orig_x;
4c8c0dec 7295
67a2f76d
JJ
7296 if (GET_MODE (orig_x) != Pmode)
7297 {
2f36e14b
JJ
7298 if (GET_MODE (orig_x) == BLKmode)
7299 return orig_x;
67a2f76d
JJ
7300 y = lowpart_subreg (GET_MODE (orig_x), y, Pmode);
7301 if (y == NULL_RTX)
7302 return orig_x;
7303 }
7304 return y;
4c8c0dec 7305}
ba956982 7306
6d057022
AS
7307/* Output operand OP to stdio stream FILE.
7308 OP is an address (register + offset) which is not used to address data;
7309 instead the rightmost bits are interpreted as the value. */
ac32b25e
UW
7310
7311static void
dd95128b 7312print_addrstyle_operand (FILE *file, rtx op)
ac32b25e 7313{
d98ad410
UW
7314 HOST_WIDE_INT offset;
7315 rtx base;
f83a336d 7316
d98ad410 7317 /* Extract base register and offset. */
dd95128b 7318 if (!s390_decompose_addrstyle_without_index (op, &base, &offset))
d98ad410 7319 gcc_unreachable ();
ac32b25e
UW
7320
7321 /* Sanity check. */
d98ad410 7322 if (base)
8d933e31 7323 {
d98ad410
UW
7324 gcc_assert (GET_CODE (base) == REG);
7325 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
7326 gcc_assert (REGNO_REG_CLASS (REGNO (base)) == ADDR_REGS);
8d933e31 7327 }
ac32b25e 7328
6d057022
AS
7329 /* Offsets are constricted to twelve bits. */
7330 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset & ((1 << 12) - 1));
d98ad410
UW
7331 if (base)
7332 fprintf (file, "(%s)", reg_names[REGNO (base)]);
ac32b25e
UW
7333}
7334
2d38d809
DV
7335/* Assigns the number of NOP halfwords to be emitted before and after the
7336 function label to *HW_BEFORE and *HW_AFTER. Both pointers must not be NULL.
7337 If hotpatching is disabled for the function, the values are set to zero.
7338*/
d0de9e13 7339
2d38d809 7340static void
94091f43
DV
7341s390_function_num_hotpatch_hw (tree decl,
7342 int *hw_before,
7343 int *hw_after)
d0de9e13
DV
7344{
7345 tree attr;
7346
94091f43
DV
7347 attr = lookup_attribute ("hotpatch", DECL_ATTRIBUTES (decl));
7348
7349 /* Handle the arguments of the hotpatch attribute. The values
7350 specified via attribute might override the cmdline argument
7351 values. */
7352 if (attr)
d0de9e13 7353 {
94091f43
DV
7354 tree args = TREE_VALUE (attr);
7355
7356 *hw_before = TREE_INT_CST_LOW (TREE_VALUE (args));
7357 *hw_after = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args)));
d0de9e13 7358 }
94091f43 7359 else
d0de9e13 7360 {
94091f43
DV
7361 /* Use the values specified by the cmdline arguments. */
7362 *hw_before = s390_hotpatch_hw_before_label;
7363 *hw_after = s390_hotpatch_hw_after_label;
d0de9e13 7364 }
d0de9e13
DV
7365}
7366
ec47b086
DV
7367/* Write the current .machine and .machinemode specification to the assembler
7368 file. */
7369
7763d972 7370#ifdef HAVE_AS_MACHINE_MACHINEMODE
ec47b086
DV
7371static void
7372s390_asm_output_machine_for_arch (FILE *asm_out_file)
7373{
7374 fprintf (asm_out_file, "\t.machinemode %s\n",
7375 (TARGET_ZARCH) ? "zarch" : "esa");
2731a5b3
AK
7376 fprintf (asm_out_file, "\t.machine \"%s",
7377 processor_table[s390_arch].binutils_name);
ec47b086
DV
7378 if (S390_USE_ARCHITECTURE_MODIFIERS)
7379 {
7380 int cpu_flags;
7381
7382 cpu_flags = processor_flags_table[(int) s390_arch];
7383 if (TARGET_HTM && !(cpu_flags & PF_TX))
7384 fprintf (asm_out_file, "+htm");
7385 else if (!TARGET_HTM && (cpu_flags & PF_TX))
7386 fprintf (asm_out_file, "+nohtm");
7387 if (TARGET_VX && !(cpu_flags & PF_VX))
7388 fprintf (asm_out_file, "+vx");
7389 else if (!TARGET_VX && (cpu_flags & PF_VX))
7390 fprintf (asm_out_file, "+novx");
7391 }
7392 fprintf (asm_out_file, "\"\n");
7393}
7394
7395/* Write an extra function header before the very start of the function. */
7396
7397void
7398s390_asm_output_function_prefix (FILE *asm_out_file,
7399 const char *fnname ATTRIBUTE_UNUSED)
7400{
7401 if (DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl) == NULL)
7402 return;
7403 /* Since only the function specific options are saved but not the indications
7404 which options are set, it's too much work here to figure out which options
7405 have actually changed. Thus, generate .machine and .machinemode whenever a
7406 function has the target attribute or pragma. */
7407 fprintf (asm_out_file, "\t.machinemode push\n");
7408 fprintf (asm_out_file, "\t.machine push\n");
7409 s390_asm_output_machine_for_arch (asm_out_file);
7410}
7411
7412/* Write an extra function footer after the very end of the function. */
7413
7414void
7415s390_asm_declare_function_size (FILE *asm_out_file,
d45ef29e 7416 const char *fnname, tree decl)
ec47b086 7417{
d45ef29e
MK
7418 if (!flag_inhibit_size_directive)
7419 ASM_OUTPUT_MEASURED_SIZE (asm_out_file, fnname);
ec47b086
DV
7420 if (DECL_FUNCTION_SPECIFIC_TARGET (decl) == NULL)
7421 return;
7422 fprintf (asm_out_file, "\t.machine pop\n");
7423 fprintf (asm_out_file, "\t.machinemode pop\n");
7424}
7425#endif
7426
d0de9e13
DV
7427/* Write the extra assembler code needed to declare a function properly. */
7428
7429void
7430s390_asm_output_function_label (FILE *asm_out_file, const char *fname,
7431 tree decl)
7432{
94091f43 7433 int hw_before, hw_after;
d0de9e13 7434
2d38d809
DV
7435 s390_function_num_hotpatch_hw (decl, &hw_before, &hw_after);
7436 if (hw_before > 0)
d0de9e13 7437 {
f8af0e30 7438 unsigned int function_alignment;
d0de9e13
DV
7439 int i;
7440
7441 /* Add a trampoline code area before the function label and initialize it
7442 with two-byte nop instructions. This area can be overwritten with code
7443 that jumps to a patched version of the function. */
4bbc8970 7444 asm_fprintf (asm_out_file, "\tnopr\t%%r0"
2d38d809
DV
7445 "\t# pre-label NOPs for hotpatch (%d halfwords)\n",
7446 hw_before);
7447 for (i = 1; i < hw_before; i++)
4bbc8970 7448 fputs ("\tnopr\t%r0\n", asm_out_file);
2d38d809 7449
d0de9e13
DV
7450 /* Note: The function label must be aligned so that (a) the bytes of the
7451 following nop do not cross a cacheline boundary, and (b) a jump address
7452 (eight bytes for 64 bit targets, 4 bytes for 32 bit targets) can be
7453 stored directly before the label without crossing a cacheline
7454 boundary. All this is necessary to make sure the trampoline code can
2d38d809
DV
7455 be changed atomically.
7456 This alignment is done automatically using the FOUNCTION_BOUNDARY, but
7457 if there are NOPs before the function label, the alignment is placed
7458 before them. So it is necessary to duplicate the alignment after the
7459 NOPs. */
f8af0e30
DV
7460 function_alignment = MAX (8, DECL_ALIGN (decl) / BITS_PER_UNIT);
7461 if (! DECL_USER_ALIGN (decl))
7462 function_alignment = MAX (function_alignment,
7463 (unsigned int) align_functions);
2d38d809 7464 fputs ("\t# alignment for hotpatch\n", asm_out_file);
f8af0e30 7465 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (function_alignment));
d0de9e13
DV
7466 }
7467
ec47b086
DV
7468 if (S390_USE_TARGET_ATTRIBUTE && TARGET_DEBUG_ARG)
7469 {
7470 asm_fprintf (asm_out_file, "\t# fn:%s ar%d\n", fname, s390_arch);
7471 asm_fprintf (asm_out_file, "\t# fn:%s tu%d\n", fname, s390_tune);
7472 asm_fprintf (asm_out_file, "\t# fn:%s sg%d\n", fname, s390_stack_guard);
7473 asm_fprintf (asm_out_file, "\t# fn:%s ss%d\n", fname, s390_stack_size);
7474 asm_fprintf (asm_out_file, "\t# fn:%s bc%d\n", fname, s390_branch_cost);
7475 asm_fprintf (asm_out_file, "\t# fn:%s wf%d\n", fname,
7476 s390_warn_framesize);
7477 asm_fprintf (asm_out_file, "\t# fn:%s ba%d\n", fname, TARGET_BACKCHAIN);
7478 asm_fprintf (asm_out_file, "\t# fn:%s hd%d\n", fname, TARGET_HARD_DFP);
7479 asm_fprintf (asm_out_file, "\t# fn:%s hf%d\n", fname, !TARGET_SOFT_FLOAT);
7480 asm_fprintf (asm_out_file, "\t# fn:%s ht%d\n", fname, TARGET_OPT_HTM);
7481 asm_fprintf (asm_out_file, "\t# fn:%s vx%d\n", fname, TARGET_OPT_VX);
7482 asm_fprintf (asm_out_file, "\t# fn:%s ps%d\n", fname,
7483 TARGET_PACKED_STACK);
7484 asm_fprintf (asm_out_file, "\t# fn:%s se%d\n", fname, TARGET_SMALL_EXEC);
7485 asm_fprintf (asm_out_file, "\t# fn:%s mv%d\n", fname, TARGET_MVCLE);
7486 asm_fprintf (asm_out_file, "\t# fn:%s zv%d\n", fname, TARGET_ZVECTOR);
7487 asm_fprintf (asm_out_file, "\t# fn:%s wd%d\n", fname,
7488 s390_warn_dynamicstack_p);
7489 }
d0de9e13 7490 ASM_OUTPUT_LABEL (asm_out_file, fname);
2d38d809
DV
7491 if (hw_after > 0)
7492 asm_fprintf (asm_out_file,
7493 "\t# post-label NOPs for hotpatch (%d halfwords)\n",
7494 hw_after);
d0de9e13
DV
7495}
7496
38899e29 7497/* Output machine-dependent UNSPECs occurring in address constant X
faeb9bb6
UW
7498 in assembler syntax to stdio stream FILE. Returns true if the
7499 constant X could be recognized, false otherwise. */
9db1d521 7500
0f8ab434 7501static bool
faeb9bb6 7502s390_output_addr_const_extra (FILE *file, rtx x)
9db1d521 7503{
faeb9bb6
UW
7504 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
7505 switch (XINT (x, 1))
7506 {
7507 case UNSPEC_GOTENT:
7508 output_addr_const (file, XVECEXP (x, 0, 0));
7509 fprintf (file, "@GOTENT");
7510 return true;
7511 case UNSPEC_GOT:
7512 output_addr_const (file, XVECEXP (x, 0, 0));
7513 fprintf (file, "@GOT");
7514 return true;
7515 case UNSPEC_GOTOFF:
7516 output_addr_const (file, XVECEXP (x, 0, 0));
7517 fprintf (file, "@GOTOFF");
7518 return true;
7519 case UNSPEC_PLT:
7520 output_addr_const (file, XVECEXP (x, 0, 0));
7521 fprintf (file, "@PLT");
7522 return true;
7523 case UNSPEC_PLTOFF:
7524 output_addr_const (file, XVECEXP (x, 0, 0));
7525 fprintf (file, "@PLTOFF");
7526 return true;
7527 case UNSPEC_TLSGD:
7528 output_addr_const (file, XVECEXP (x, 0, 0));
7529 fprintf (file, "@TLSGD");
7530 return true;
7531 case UNSPEC_TLSLDM:
7532 assemble_name (file, get_some_local_dynamic_name ());
7533 fprintf (file, "@TLSLDM");
7534 return true;
7535 case UNSPEC_DTPOFF:
7536 output_addr_const (file, XVECEXP (x, 0, 0));
7537 fprintf (file, "@DTPOFF");
7538 return true;
7539 case UNSPEC_NTPOFF:
7540 output_addr_const (file, XVECEXP (x, 0, 0));
7541 fprintf (file, "@NTPOFF");
7542 return true;
7543 case UNSPEC_GOTNTPOFF:
7544 output_addr_const (file, XVECEXP (x, 0, 0));
7545 fprintf (file, "@GOTNTPOFF");
7546 return true;
7547 case UNSPEC_INDNTPOFF:
7548 output_addr_const (file, XVECEXP (x, 0, 0));
7549 fprintf (file, "@INDNTPOFF");
7550 return true;
7551 }
9db1d521 7552
dc66391d
RS
7553 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 2)
7554 switch (XINT (x, 1))
7555 {
7556 case UNSPEC_POOL_OFFSET:
7557 x = gen_rtx_MINUS (GET_MODE (x), XVECEXP (x, 0, 0), XVECEXP (x, 0, 1));
7558 output_addr_const (file, x);
7559 return true;
7560 }
faeb9bb6 7561 return false;
9db1d521
HP
7562}
7563
c7453384 7564/* Output address operand ADDR in assembler syntax to
994fe660 7565 stdio stream FILE. */
9db1d521
HP
7566
7567void
9c808aad 7568print_operand_address (FILE *file, rtx addr)
9db1d521
HP
7569{
7570 struct s390_address ad;
e6b07173 7571 memset (&ad, 0, sizeof (s390_address));
9db1d521 7572
0ff4390d 7573 if (s390_loadrelative_operand_p (addr, NULL, NULL))
963fc8d0 7574 {
8395b41e
AK
7575 if (!TARGET_Z10)
7576 {
3f3c098d
AK
7577 output_operand_lossage ("symbolic memory references are "
7578 "only supported on z10 or later");
8395b41e
AK
7579 return;
7580 }
963fc8d0
AK
7581 output_addr_const (file, addr);
7582 return;
7583 }
7584
b808c04c 7585 if (!s390_decompose_address (addr, &ad)
93fa8428
AK
7586 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
7587 || (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx))))
c85ce869 7588 output_operand_lossage ("cannot decompose address");
c7453384 7589
9db1d521 7590 if (ad.disp)
faeb9bb6 7591 output_addr_const (file, ad.disp);
9db1d521
HP
7592 else
7593 fprintf (file, "0");
7594
7595 if (ad.base && ad.indx)
7596 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
7597 reg_names[REGNO (ad.base)]);
7598 else if (ad.base)
7599 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
7600}
7601
c7453384
EC
7602/* Output operand X in assembler syntax to stdio stream FILE.
7603 CODE specified the format flag. The following format flags
994fe660
UW
7604 are recognized:
7605
7606 'C': print opcode suffix for branch condition.
7607 'D': print opcode suffix for inverse branch condition.
f1149235 7608 'E': print opcode suffix for branch on index instruction.
7b8acc34 7609 'G': print the size of the operand in bytes.
75ca1b39
RH
7610 'J': print tls_load/tls_gdcall/tls_ldcall suffix
7611 'M': print the second word of a TImode operand.
7612 'N': print the second word of a DImode operand.
085261c8
AK
7613 'O': print only the displacement of a memory reference or address.
7614 'R': print only the base register of a memory reference or address.
fc0ea003 7615 'S': print S-type memory reference (base+displacement).
dd95128b
AK
7616 'Y': print address style operand without index (e.g. shift count or setmem
7617 operand).
994fe660 7618
5519a4f9 7619 'b': print integer X as if it's an unsigned byte.
963fc8d0 7620 'c': print integer X as if it's an signed byte.
085261c8
AK
7621 'e': "end" contiguous bitmask X in either DImode or vector inner mode.
7622 'f': "end" contiguous bitmask X in SImode.
da48f5ec 7623 'h': print integer X as if it's a signed halfword.
f19a9af7 7624 'i': print the first nonzero HImode part of X.
da48f5ec
AK
7625 'j': print the first HImode part unequal to -1 of X.
7626 'k': print the first nonzero SImode part of X.
7627 'm': print the first SImode part unequal to -1 of X.
75ca1b39 7628 'o': print integer X as if it's an unsigned 32bit word.
085261c8
AK
7629 's': "start" of contiguous bitmask X in either DImode or vector inner mode.
7630 't': CONST_INT: "start" of contiguous bitmask X in SImode.
7631 CONST_VECTOR: Generate a bitmask for vgbm instruction.
75ca1b39 7632 'x': print integer X as if it's an unsigned halfword.
085261c8 7633 'v': print register number as vector register (v1 instead of f1).
75ca1b39 7634*/
9db1d521
HP
7635
7636void
9c808aad 7637print_operand (FILE *file, rtx x, int code)
9db1d521 7638{
75ca1b39
RH
7639 HOST_WIDE_INT ival;
7640
9db1d521
HP
7641 switch (code)
7642 {
7643 case 'C':
ba956982 7644 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
9db1d521
HP
7645 return;
7646
7647 case 'D':
ba956982 7648 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
9db1d521
HP
7649 return;
7650
f1149235
AK
7651 case 'E':
7652 if (GET_CODE (x) == LE)
7653 fprintf (file, "l");
7654 else if (GET_CODE (x) == GT)
7655 fprintf (file, "h");
7656 else
3f3c098d
AK
7657 output_operand_lossage ("invalid comparison operator "
7658 "for 'E' output modifier");
f1149235
AK
7659 return;
7660
fd3cd001
UW
7661 case 'J':
7662 if (GET_CODE (x) == SYMBOL_REF)
7663 {
7664 fprintf (file, "%s", ":tls_load:");
7665 output_addr_const (file, x);
7666 }
7667 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD)
7668 {
7669 fprintf (file, "%s", ":tls_gdcall:");
7670 output_addr_const (file, XVECEXP (x, 0, 0));
7671 }
7672 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM)
7673 {
7674 fprintf (file, "%s", ":tls_ldcall:");
4fbca4ba
RS
7675 const char *name = get_some_local_dynamic_name ();
7676 gcc_assert (name);
7677 assemble_name (file, name);
fd3cd001
UW
7678 }
7679 else
3f3c098d 7680 output_operand_lossage ("invalid reference for 'J' output modifier");
fd3cd001
UW
7681 return;
7682
7b8acc34
AK
7683 case 'G':
7684 fprintf (file, "%u", GET_MODE_SIZE (GET_MODE (x)));
7685 return;
7686
9db1d521
HP
7687 case 'O':
7688 {
7689 struct s390_address ad;
8d933e31 7690 int ret;
9db1d521 7691
085261c8 7692 ret = s390_decompose_address (MEM_P (x) ? XEXP (x, 0) : x, &ad);
8395b41e
AK
7693
7694 if (!ret
7695 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
7696 || ad.indx)
7697 {
3f3c098d 7698 output_operand_lossage ("invalid address for 'O' output modifier");
8395b41e
AK
7699 return;
7700 }
9db1d521
HP
7701
7702 if (ad.disp)
faeb9bb6 7703 output_addr_const (file, ad.disp);
9db1d521
HP
7704 else
7705 fprintf (file, "0");
7706 }
7707 return;
7708
7709 case 'R':
7710 {
7711 struct s390_address ad;
8d933e31 7712 int ret;
9db1d521 7713
085261c8 7714 ret = s390_decompose_address (MEM_P (x) ? XEXP (x, 0) : x, &ad);
8395b41e
AK
7715
7716 if (!ret
7717 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
7718 || ad.indx)
7719 {
3f3c098d 7720 output_operand_lossage ("invalid address for 'R' output modifier");
8395b41e
AK
7721 return;
7722 }
9db1d521
HP
7723
7724 if (ad.base)
7725 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
7726 else
7727 fprintf (file, "0");
7728 }
7729 return;
7730
fc0ea003
UW
7731 case 'S':
7732 {
7733 struct s390_address ad;
8d933e31 7734 int ret;
fc0ea003 7735
8395b41e
AK
7736 if (!MEM_P (x))
7737 {
3f3c098d
AK
7738 output_operand_lossage ("memory reference expected for "
7739 "'S' output modifier");
8395b41e
AK
7740 return;
7741 }
8d933e31 7742 ret = s390_decompose_address (XEXP (x, 0), &ad);
8395b41e
AK
7743
7744 if (!ret
7745 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
7746 || ad.indx)
7747 {
3f3c098d 7748 output_operand_lossage ("invalid address for 'S' output modifier");
8395b41e
AK
7749 return;
7750 }
fc0ea003
UW
7751
7752 if (ad.disp)
7753 output_addr_const (file, ad.disp);
7754 else
7755 fprintf (file, "0");
7756
7757 if (ad.base)
7758 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
7759 }
7760 return;
7761
9db1d521
HP
7762 case 'N':
7763 if (GET_CODE (x) == REG)
7764 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
7765 else if (GET_CODE (x) == MEM)
0a81f074
RS
7766 x = change_address (x, VOIDmode,
7767 plus_constant (Pmode, XEXP (x, 0), 4));
9db1d521 7768 else
3f3c098d
AK
7769 output_operand_lossage ("register or memory expression expected "
7770 "for 'N' output modifier");
9db1d521
HP
7771 break;
7772
7773 case 'M':
7774 if (GET_CODE (x) == REG)
7775 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
7776 else if (GET_CODE (x) == MEM)
0a81f074
RS
7777 x = change_address (x, VOIDmode,
7778 plus_constant (Pmode, XEXP (x, 0), 8));
9db1d521 7779 else
3f3c098d
AK
7780 output_operand_lossage ("register or memory expression expected "
7781 "for 'M' output modifier");
9db1d521 7782 break;
ac32b25e
UW
7783
7784 case 'Y':
dd95128b 7785 print_addrstyle_operand (file, x);
ac32b25e 7786 return;
9db1d521
HP
7787 }
7788
7789 switch (GET_CODE (x))
7790 {
7791 case REG:
085261c8
AK
7792 /* Print FP regs as fx instead of vx when they are accessed
7793 through non-vector mode. */
7794 if (code == 'v'
7795 || VECTOR_NOFP_REG_P (x)
7796 || (FP_REG_P (x) && VECTOR_MODE_P (GET_MODE (x)))
7797 || (VECTOR_REG_P (x)
7798 && (GET_MODE_SIZE (GET_MODE (x)) /
7799 s390_class_max_nregs (FP_REGS, GET_MODE (x))) > 8))
7800 fprintf (file, "%%v%s", reg_names[REGNO (x)] + 2);
7801 else
7802 fprintf (file, "%s", reg_names[REGNO (x)]);
9db1d521
HP
7803 break;
7804
7805 case MEM:
cc8ca59e 7806 output_address (GET_MODE (x), XEXP (x, 0));
9db1d521
HP
7807 break;
7808
7809 case CONST:
7810 case CODE_LABEL:
7811 case LABEL_REF:
7812 case SYMBOL_REF:
faeb9bb6 7813 output_addr_const (file, x);
9db1d521
HP
7814 break;
7815
7816 case CONST_INT:
75ca1b39
RH
7817 ival = INTVAL (x);
7818 switch (code)
7819 {
7820 case 0:
7821 break;
7822 case 'b':
7823 ival &= 0xff;
7824 break;
7825 case 'c':
7826 ival = ((ival & 0xff) ^ 0x80) - 0x80;
7827 break;
7828 case 'x':
7829 ival &= 0xffff;
7830 break;
7831 case 'h':
7832 ival = ((ival & 0xffff) ^ 0x8000) - 0x8000;
7833 break;
7834 case 'i':
7835 ival = s390_extract_part (x, HImode, 0);
7836 break;
7837 case 'j':
7838 ival = s390_extract_part (x, HImode, -1);
7839 break;
7840 case 'k':
7841 ival = s390_extract_part (x, SImode, 0);
7842 break;
7843 case 'm':
7844 ival = s390_extract_part (x, SImode, -1);
7845 break;
7846 case 'o':
7847 ival &= 0xffffffff;
7848 break;
7849 case 'e': case 'f':
7850 case 's': case 't':
7851 {
c2586c82
DV
7852 int start, end;
7853 int len;
75ca1b39
RH
7854 bool ok;
7855
7856 len = (code == 's' || code == 'e' ? 64 : 32);
c2586c82 7857 ok = s390_contiguous_bitmask_p (ival, true, len, &start, &end);
75ca1b39
RH
7858 gcc_assert (ok);
7859 if (code == 's' || code == 't')
c2586c82 7860 ival = start;
75ca1b39 7861 else
c2586c82 7862 ival = end;
75ca1b39
RH
7863 }
7864 break;
7865 default:
7866 output_operand_lossage ("invalid constant for output modifier '%c'", code);
7867 }
7868 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival);
4023fb28
UW
7869 break;
7870
089b05b1 7871 case CONST_WIDE_INT:
4023fb28 7872 if (code == 'b')
089b05b1
DV
7873 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7874 CONST_WIDE_INT_ELT (x, 0) & 0xff);
9db1d521 7875 else if (code == 'x')
089b05b1
DV
7876 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7877 CONST_WIDE_INT_ELT (x, 0) & 0xffff);
9db1d521 7878 else if (code == 'h')
3f3c098d 7879 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
089b05b1 7880 ((CONST_WIDE_INT_ELT (x, 0) & 0xffff) ^ 0x8000) - 0x8000);
9db1d521 7881 else
8395b41e
AK
7882 {
7883 if (code == 0)
3f3c098d
AK
7884 output_operand_lossage ("invalid constant - try using "
7885 "an output modifier");
8395b41e 7886 else
3f3c098d
AK
7887 output_operand_lossage ("invalid constant for output modifier '%c'",
7888 code);
8395b41e 7889 }
9db1d521 7890 break;
085261c8
AK
7891 case CONST_VECTOR:
7892 switch (code)
7893 {
b0057efd
AK
7894 case 'h':
7895 gcc_assert (const_vec_duplicate_p (x));
7896 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7897 ((INTVAL (XVECEXP (x, 0, 0)) & 0xffff) ^ 0x8000) - 0x8000);
7898 break;
085261c8
AK
7899 case 'e':
7900 case 's':
7901 {
c2586c82 7902 int start, end;
085261c8
AK
7903 bool ok;
7904
c2586c82 7905 ok = s390_contiguous_bitmask_vector_p (x, &start, &end);
085261c8 7906 gcc_assert (ok);
c2586c82 7907 ival = (code == 's') ? start : end;
085261c8
AK
7908 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival);
7909 }
7910 break;
7911 case 't':
7912 {
7913 unsigned mask;
7914 bool ok = s390_bytemask_vector_p (x, &mask);
7915 gcc_assert (ok);
7916 fprintf (file, "%u", mask);
7917 }
7918 break;
7919
7920 default:
7921 output_operand_lossage ("invalid constant vector for output "
7922 "modifier '%c'", code);
7923 }
7924 break;
9db1d521
HP
7925
7926 default:
8395b41e 7927 if (code == 0)
3f3c098d
AK
7928 output_operand_lossage ("invalid expression - try using "
7929 "an output modifier");
8395b41e 7930 else
3f3c098d
AK
7931 output_operand_lossage ("invalid expression for output "
7932 "modifier '%c'", code);
9db1d521
HP
7933 break;
7934 }
7935}
7936
301d03af
RS
7937/* Target hook for assembling integer objects. We need to define it
7938 here to work a round a bug in some versions of GAS, which couldn't
7939 handle values smaller than INT_MIN when printed in decimal. */
7940
7941static bool
9c808aad 7942s390_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af
RS
7943{
7944 if (size == 8 && aligned_p
7945 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
7946 {
4a0a75dd
KG
7947 fprintf (asm_out_file, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX "\n",
7948 INTVAL (x));
301d03af
RS
7949 return true;
7950 }
7951 return default_assemble_integer (x, size, aligned_p);
7952}
7953
c7453384 7954/* Returns true if register REGNO is used for forming
994fe660 7955 a memory address in expression X. */
9db1d521 7956
3ed99cc9 7957static bool
9c808aad 7958reg_used_in_mem_p (int regno, rtx x)
9db1d521
HP
7959{
7960 enum rtx_code code = GET_CODE (x);
7961 int i, j;
7962 const char *fmt;
c7453384 7963
9db1d521
HP
7964 if (code == MEM)
7965 {
c9bd6bcd 7966 if (refers_to_regno_p (regno, XEXP (x, 0)))
3ed99cc9 7967 return true;
9db1d521 7968 }
c7453384 7969 else if (code == SET
4023fb28
UW
7970 && GET_CODE (SET_DEST (x)) == PC)
7971 {
c9bd6bcd 7972 if (refers_to_regno_p (regno, SET_SRC (x)))
3ed99cc9 7973 return true;
4023fb28 7974 }
9db1d521
HP
7975
7976 fmt = GET_RTX_FORMAT (code);
7977 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7978 {
7979 if (fmt[i] == 'e'
7980 && reg_used_in_mem_p (regno, XEXP (x, i)))
3ed99cc9 7981 return true;
c7453384 7982
9db1d521
HP
7983 else if (fmt[i] == 'E')
7984 for (j = 0; j < XVECLEN (x, i); j++)
7985 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
3ed99cc9 7986 return true;
9db1d521 7987 }
3ed99cc9 7988 return false;
9db1d521
HP
7989}
7990
d65f7478 7991/* Returns true if expression DEP_RTX sets an address register
994fe660 7992 used by instruction INSN to address memory. */
9db1d521 7993
3ed99cc9 7994static bool
647d790d 7995addr_generation_dependency_p (rtx dep_rtx, rtx_insn *insn)
9db1d521 7996{
4023fb28 7997 rtx target, pat;
9db1d521 7998
b64925dc 7999 if (NONJUMP_INSN_P (dep_rtx))
34f0d87a 8000 dep_rtx = PATTERN (dep_rtx);
077dab3b 8001
9db1d521
HP
8002 if (GET_CODE (dep_rtx) == SET)
8003 {
8004 target = SET_DEST (dep_rtx);
cc7ab9b7
UW
8005 if (GET_CODE (target) == STRICT_LOW_PART)
8006 target = XEXP (target, 0);
8007 while (GET_CODE (target) == SUBREG)
8008 target = SUBREG_REG (target);
8009
9db1d521
HP
8010 if (GET_CODE (target) == REG)
8011 {
8012 int regno = REGNO (target);
8013
077dab3b 8014 if (s390_safe_attr_type (insn) == TYPE_LA)
4023fb28
UW
8015 {
8016 pat = PATTERN (insn);
8017 if (GET_CODE (pat) == PARALLEL)
8018 {
8d933e31 8019 gcc_assert (XVECLEN (pat, 0) == 2);
4023fb28
UW
8020 pat = XVECEXP (pat, 0, 0);
8021 }
8d933e31 8022 gcc_assert (GET_CODE (pat) == SET);
c9bd6bcd 8023 return refers_to_regno_p (regno, SET_SRC (pat));
4023fb28 8024 }
077dab3b 8025 else if (get_attr_atype (insn) == ATYPE_AGEN)
4023fb28
UW
8026 return reg_used_in_mem_p (regno, PATTERN (insn));
8027 }
9db1d521 8028 }
3ed99cc9 8029 return false;
9db1d521
HP
8030}
8031
077dab3b
HP
8032/* Return 1, if dep_insn sets register used in insn in the agen unit. */
8033
c7453384 8034int
647d790d 8035s390_agen_dep_p (rtx_insn *dep_insn, rtx_insn *insn)
c7453384 8036{
077dab3b
HP
8037 rtx dep_rtx = PATTERN (dep_insn);
8038 int i;
c7453384
EC
8039
8040 if (GET_CODE (dep_rtx) == SET
077dab3b
HP
8041 && addr_generation_dependency_p (dep_rtx, insn))
8042 return 1;
8043 else if (GET_CODE (dep_rtx) == PARALLEL)
8044 {
8045 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
8046 {
8047 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
8048 return 1;
8049 }
8050 }
8051 return 0;
8052}
8053
9381e3f1 8054
52609473
HP
8055/* A C statement (sans semicolon) to update the integer scheduling priority
8056 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
8057 reduce the priority to execute INSN later. Do not define this macro if
c7453384 8058 you do not need to adjust the scheduling priorities of insns.
52609473 8059
c7453384 8060 A STD instruction should be scheduled earlier,
52609473 8061 in order to use the bypass. */
52609473 8062static int
ac44248e 8063s390_adjust_priority (rtx_insn *insn, int priority)
52609473
HP
8064{
8065 if (! INSN_P (insn))
8066 return priority;
8067
fd9c86e0 8068 if (s390_tune <= PROCESSOR_2064_Z900)
52609473
HP
8069 return priority;
8070
8071 switch (s390_safe_attr_type (insn))
8072 {
cfdb984b
AS
8073 case TYPE_FSTOREDF:
8074 case TYPE_FSTORESF:
52609473
HP
8075 priority = priority << 3;
8076 break;
8077 case TYPE_STORE:
ea77e738 8078 case TYPE_STM:
52609473
HP
8079 priority = priority << 1;
8080 break;
8081 default:
8082 break;
8083 }
8084 return priority;
8085}
f2d3c02a 8086
2cdece44 8087
077dab3b 8088/* The number of instructions that can be issued per cycle. */
f2d3c02a 8089
077dab3b 8090static int
9c808aad 8091s390_issue_rate (void)
077dab3b 8092{
93538e8e
AK
8093 switch (s390_tune)
8094 {
8095 case PROCESSOR_2084_Z990:
8096 case PROCESSOR_2094_Z9_109:
fd9c86e0 8097 case PROCESSOR_2094_Z9_EC:
65b1d8ea 8098 case PROCESSOR_2817_Z196:
93538e8e
AK
8099 return 3;
8100 case PROCESSOR_2097_Z10:
8101 return 2;
bacf8ec3
DV
8102 case PROCESSOR_9672_G5:
8103 case PROCESSOR_9672_G6:
8104 case PROCESSOR_2064_Z900:
ff39d72a
AK
8105 /* Starting with EC12 we use the sched_reorder hook to take care
8106 of instruction dispatch constraints. The algorithm only
8107 picks the best instruction and assumes only a single
8108 instruction gets issued per cycle. */
8109 case PROCESSOR_2827_ZEC12:
bacf8ec3 8110 case PROCESSOR_2964_Z13:
2731a5b3 8111 case PROCESSOR_3906_Z14:
93538e8e
AK
8112 default:
8113 return 1;
8114 }
077dab3b 8115}
f2d3c02a 8116
52609473 8117static int
9c808aad 8118s390_first_cycle_multipass_dfa_lookahead (void)
52609473 8119{
64e1e4c4 8120 return 4;
52609473
HP
8121}
8122
585539a1
UW
8123/* Annotate every literal pool reference in X by an UNSPEC_LTREF expression.
8124 Fix up MEMs as required. */
8125
8126static void
8127annotate_constant_pool_refs (rtx *x)
8128{
8129 int i, j;
8130 const char *fmt;
8131
8d933e31
AS
8132 gcc_assert (GET_CODE (*x) != SYMBOL_REF
8133 || !CONSTANT_POOL_ADDRESS_P (*x));
585539a1
UW
8134
8135 /* Literal pool references can only occur inside a MEM ... */
8136 if (GET_CODE (*x) == MEM)
8137 {
8138 rtx memref = XEXP (*x, 0);
8139
8140 if (GET_CODE (memref) == SYMBOL_REF
8141 && CONSTANT_POOL_ADDRESS_P (memref))
8142 {
8143 rtx base = cfun->machine->base_reg;
8144 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, memref, base),
8145 UNSPEC_LTREF);
8146
8147 *x = replace_equiv_address (*x, addr);
8148 return;
8149 }
8150
8151 if (GET_CODE (memref) == CONST
8152 && GET_CODE (XEXP (memref, 0)) == PLUS
8153 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
8154 && GET_CODE (XEXP (XEXP (memref, 0), 0)) == SYMBOL_REF
8155 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (memref, 0), 0)))
8156 {
8157 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
8158 rtx sym = XEXP (XEXP (memref, 0), 0);
8159 rtx base = cfun->machine->base_reg;
8160 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
8161 UNSPEC_LTREF);
8162
0a81f074 8163 *x = replace_equiv_address (*x, plus_constant (Pmode, addr, off));
585539a1
UW
8164 return;
8165 }
8166 }
8167
8168 /* ... or a load-address type pattern. */
8169 if (GET_CODE (*x) == SET)
8170 {
8171 rtx addrref = SET_SRC (*x);
8172
8173 if (GET_CODE (addrref) == SYMBOL_REF
8174 && CONSTANT_POOL_ADDRESS_P (addrref))
8175 {
8176 rtx base = cfun->machine->base_reg;
8177 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, addrref, base),
8178 UNSPEC_LTREF);
8179
8180 SET_SRC (*x) = addr;
8181 return;
8182 }
8183
8184 if (GET_CODE (addrref) == CONST
8185 && GET_CODE (XEXP (addrref, 0)) == PLUS
8186 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
8187 && GET_CODE (XEXP (XEXP (addrref, 0), 0)) == SYMBOL_REF
8188 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (addrref, 0), 0)))
8189 {
8190 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
8191 rtx sym = XEXP (XEXP (addrref, 0), 0);
8192 rtx base = cfun->machine->base_reg;
8193 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
8194 UNSPEC_LTREF);
8195
0a81f074 8196 SET_SRC (*x) = plus_constant (Pmode, addr, off);
585539a1
UW
8197 return;
8198 }
8199 }
8200
8201 /* Annotate LTREL_BASE as well. */
8202 if (GET_CODE (*x) == UNSPEC
8203 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
8204 {
8205 rtx base = cfun->machine->base_reg;
8206 *x = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XVECEXP (*x, 0, 0), base),
8207 UNSPEC_LTREL_BASE);
8208 return;
8209 }
8210
8211 fmt = GET_RTX_FORMAT (GET_CODE (*x));
8212 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
8213 {
8214 if (fmt[i] == 'e')
8215 {
8216 annotate_constant_pool_refs (&XEXP (*x, i));
8217 }
8218 else if (fmt[i] == 'E')
8219 {
8220 for (j = 0; j < XVECLEN (*x, i); j++)
8221 annotate_constant_pool_refs (&XVECEXP (*x, i, j));
8222 }
8223 }
8224}
8225
ab96de7e
AS
8226/* Split all branches that exceed the maximum distance.
8227 Returns true if this created a new literal pool entry. */
8228
8229static int
8230s390_split_branches (void)
8231{
8232 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
8d933e31 8233 int new_literal = 0, ret;
775c43d3 8234 rtx_insn *insn;
17f385d8 8235 rtx pat, target;
ab96de7e
AS
8236 rtx *label;
8237
8238 /* We need correct insn addresses. */
8239
8240 shorten_branches (get_insns ());
8241
8242 /* Find all branches that exceed 64KB, and split them. */
8243
8244 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8245 {
966f97ac 8246 if (! JUMP_P (insn) || tablejump_p (insn, NULL, NULL))
ab96de7e
AS
8247 continue;
8248
8249 pat = PATTERN (insn);
966f97ac 8250 if (GET_CODE (pat) == PARALLEL)
ab96de7e
AS
8251 pat = XVECEXP (pat, 0, 0);
8252 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
8253 continue;
8254
8255 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
8256 {
8257 label = &SET_SRC (pat);
8258 }
8259 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
8260 {
8261 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
8262 label = &XEXP (SET_SRC (pat), 1);
8263 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
8264 label = &XEXP (SET_SRC (pat), 2);
8265 else
8266 continue;
8267 }
8268 else
8269 continue;
8270
8271 if (get_attr_length (insn) <= 4)
8272 continue;
8273
e2df5c1d
UW
8274 /* We are going to use the return register as scratch register,
8275 make sure it will be saved/restored by the prologue/epilogue. */
8276 cfun_frame_layout.save_return_addr_p = 1;
8277
ab96de7e
AS
8278 if (!flag_pic)
8279 {
8280 new_literal = 1;
17f385d8 8281 rtx mem = force_const_mem (Pmode, *label);
f7df4a84
RS
8282 rtx_insn *set_insn = emit_insn_before (gen_rtx_SET (temp_reg, mem),
8283 insn);
17f385d8
DM
8284 INSN_ADDRESSES_NEW (set_insn, -1);
8285 annotate_constant_pool_refs (&PATTERN (set_insn));
ab96de7e
AS
8286
8287 target = temp_reg;
8288 }
8289 else
8290 {
8291 new_literal = 1;
8292 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, *label),
8293 UNSPEC_LTREL_OFFSET);
8294 target = gen_rtx_CONST (Pmode, target);
8295 target = force_const_mem (Pmode, target);
f7df4a84
RS
8296 rtx_insn *set_insn = emit_insn_before (gen_rtx_SET (temp_reg, target),
8297 insn);
17f385d8
DM
8298 INSN_ADDRESSES_NEW (set_insn, -1);
8299 annotate_constant_pool_refs (&PATTERN (set_insn));
ab96de7e
AS
8300
8301 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XEXP (target, 0),
8302 cfun->machine->base_reg),
8303 UNSPEC_LTREL_BASE);
8304 target = gen_rtx_PLUS (Pmode, temp_reg, target);
8305 }
8306
8d933e31
AS
8307 ret = validate_change (insn, label, target, 0);
8308 gcc_assert (ret);
ab96de7e
AS
8309 }
8310
8311 return new_literal;
8312}
8313
b2ccb744 8314
f4aa3848
AK
8315/* Find an annotated literal pool symbol referenced in RTX X,
8316 and store it at REF. Will abort if X contains references to
585539a1
UW
8317 more than one such pool symbol; multiple references to the same
8318 symbol are allowed, however.
b2ccb744 8319
c7453384 8320 The rtx pointed to by REF must be initialized to NULL_RTX
b2ccb744
UW
8321 by the caller before calling this routine. */
8322
8323static void
9c808aad 8324find_constant_pool_ref (rtx x, rtx *ref)
b2ccb744
UW
8325{
8326 int i, j;
8327 const char *fmt;
8328
fd7643fb
UW
8329 /* Ignore LTREL_BASE references. */
8330 if (GET_CODE (x) == UNSPEC
8331 && XINT (x, 1) == UNSPEC_LTREL_BASE)
8332 return;
5af2f3d3
UW
8333 /* Likewise POOL_ENTRY insns. */
8334 if (GET_CODE (x) == UNSPEC_VOLATILE
8335 && XINT (x, 1) == UNSPECV_POOL_ENTRY)
8336 return;
fd7643fb 8337
8d933e31
AS
8338 gcc_assert (GET_CODE (x) != SYMBOL_REF
8339 || !CONSTANT_POOL_ADDRESS_P (x));
585539a1
UW
8340
8341 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_LTREF)
b2ccb744 8342 {
585539a1 8343 rtx sym = XVECEXP (x, 0, 0);
8d933e31
AS
8344 gcc_assert (GET_CODE (sym) == SYMBOL_REF
8345 && CONSTANT_POOL_ADDRESS_P (sym));
585539a1 8346
b2ccb744 8347 if (*ref == NULL_RTX)
585539a1 8348 *ref = sym;
f4aa3848 8349 else
8d933e31 8350 gcc_assert (*ref == sym);
585539a1
UW
8351
8352 return;
b2ccb744
UW
8353 }
8354
8355 fmt = GET_RTX_FORMAT (GET_CODE (x));
8356 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
8357 {
8358 if (fmt[i] == 'e')
8359 {
8360 find_constant_pool_ref (XEXP (x, i), ref);
8361 }
8362 else if (fmt[i] == 'E')
8363 {
8364 for (j = 0; j < XVECLEN (x, i); j++)
8365 find_constant_pool_ref (XVECEXP (x, i, j), ref);
8366 }
8367 }
8368}
8369
f4aa3848 8370/* Replace every reference to the annotated literal pool
585539a1 8371 symbol REF in X by its base plus OFFSET. */
b2ccb744
UW
8372
8373static void
585539a1 8374replace_constant_pool_ref (rtx *x, rtx ref, rtx offset)
b2ccb744
UW
8375{
8376 int i, j;
8377 const char *fmt;
8378
8d933e31 8379 gcc_assert (*x != ref);
b2ccb744 8380
585539a1
UW
8381 if (GET_CODE (*x) == UNSPEC
8382 && XINT (*x, 1) == UNSPEC_LTREF
8383 && XVECEXP (*x, 0, 0) == ref)
b2ccb744 8384 {
585539a1
UW
8385 *x = gen_rtx_PLUS (Pmode, XVECEXP (*x, 0, 1), offset);
8386 return;
b2ccb744
UW
8387 }
8388
585539a1
UW
8389 if (GET_CODE (*x) == PLUS
8390 && GET_CODE (XEXP (*x, 1)) == CONST_INT
8391 && GET_CODE (XEXP (*x, 0)) == UNSPEC
8392 && XINT (XEXP (*x, 0), 1) == UNSPEC_LTREF
8393 && XVECEXP (XEXP (*x, 0), 0, 0) == ref)
b2ccb744 8394 {
585539a1 8395 rtx addr = gen_rtx_PLUS (Pmode, XVECEXP (XEXP (*x, 0), 0, 1), offset);
0a81f074 8396 *x = plus_constant (Pmode, addr, INTVAL (XEXP (*x, 1)));
585539a1 8397 return;
b2ccb744
UW
8398 }
8399
8400 fmt = GET_RTX_FORMAT (GET_CODE (*x));
8401 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
8402 {
8403 if (fmt[i] == 'e')
8404 {
585539a1 8405 replace_constant_pool_ref (&XEXP (*x, i), ref, offset);
b2ccb744
UW
8406 }
8407 else if (fmt[i] == 'E')
8408 {
8409 for (j = 0; j < XVECLEN (*x, i); j++)
585539a1 8410 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, offset);
b2ccb744
UW
8411 }
8412 }
8413}
8414
c7453384 8415/* Check whether X contains an UNSPEC_LTREL_BASE.
fd7643fb 8416 Return its constant pool symbol if found, NULL_RTX otherwise. */
aee4e0db 8417
fd7643fb 8418static rtx
9c808aad 8419find_ltrel_base (rtx x)
aee4e0db 8420{
aee4e0db
UW
8421 int i, j;
8422 const char *fmt;
8423
fd7643fb
UW
8424 if (GET_CODE (x) == UNSPEC
8425 && XINT (x, 1) == UNSPEC_LTREL_BASE)
8426 return XVECEXP (x, 0, 0);
aee4e0db
UW
8427
8428 fmt = GET_RTX_FORMAT (GET_CODE (x));
8429 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
8430 {
8431 if (fmt[i] == 'e')
8432 {
fd7643fb
UW
8433 rtx fnd = find_ltrel_base (XEXP (x, i));
8434 if (fnd)
8435 return fnd;
aee4e0db
UW
8436 }
8437 else if (fmt[i] == 'E')
8438 {
8439 for (j = 0; j < XVECLEN (x, i); j++)
fd7643fb
UW
8440 {
8441 rtx fnd = find_ltrel_base (XVECEXP (x, i, j));
8442 if (fnd)
8443 return fnd;
8444 }
aee4e0db
UW
8445 }
8446 }
8447
fd7643fb 8448 return NULL_RTX;
aee4e0db
UW
8449}
8450
585539a1 8451/* Replace any occurrence of UNSPEC_LTREL_BASE in X with its base. */
aee4e0db
UW
8452
8453static void
585539a1 8454replace_ltrel_base (rtx *x)
aee4e0db 8455{
fd7643fb 8456 int i, j;
aee4e0db
UW
8457 const char *fmt;
8458
fd7643fb
UW
8459 if (GET_CODE (*x) == UNSPEC
8460 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
aee4e0db 8461 {
585539a1 8462 *x = XVECEXP (*x, 0, 1);
fd7643fb 8463 return;
aee4e0db
UW
8464 }
8465
8466 fmt = GET_RTX_FORMAT (GET_CODE (*x));
8467 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
8468 {
8469 if (fmt[i] == 'e')
8470 {
585539a1 8471 replace_ltrel_base (&XEXP (*x, i));
aee4e0db
UW
8472 }
8473 else if (fmt[i] == 'E')
8474 {
8475 for (j = 0; j < XVECLEN (*x, i); j++)
585539a1 8476 replace_ltrel_base (&XVECEXP (*x, i, j));
aee4e0db
UW
8477 }
8478 }
8479}
8480
8481
fd7643fb 8482/* We keep a list of constants which we have to add to internal
b2ccb744
UW
8483 constant tables in the middle of large functions. */
8484
e0654cf2 8485#define NR_C_MODES 32
ef4bddc2 8486machine_mode constant_modes[NR_C_MODES] =
b2ccb744 8487{
4dc19cc0 8488 TFmode, TImode, TDmode,
e0654cf2
AK
8489 V16QImode, V8HImode, V4SImode, V2DImode, V1TImode,
8490 V4SFmode, V2DFmode, V1TFmode,
4dc19cc0 8491 DFmode, DImode, DDmode,
085261c8 8492 V8QImode, V4HImode, V2SImode, V1DImode, V2SFmode, V1DFmode,
4dc19cc0 8493 SFmode, SImode, SDmode,
085261c8 8494 V4QImode, V2HImode, V1SImode, V1SFmode,
b2ccb744 8495 HImode,
085261c8
AK
8496 V2QImode, V1HImode,
8497 QImode,
8498 V1QImode
b2ccb744
UW
8499};
8500
b2ccb744
UW
8501struct constant
8502{
8503 struct constant *next;
8504 rtx value;
775c43d3 8505 rtx_code_label *label;
b2ccb744
UW
8506};
8507
8508struct constant_pool
8509{
8510 struct constant_pool *next;
775c43d3
DM
8511 rtx_insn *first_insn;
8512 rtx_insn *pool_insn;
aee4e0db 8513 bitmap insns;
775c43d3 8514 rtx_insn *emit_pool_after;
b2ccb744
UW
8515
8516 struct constant *constants[NR_C_MODES];
9bb86f41 8517 struct constant *execute;
775c43d3 8518 rtx_code_label *label;
b2ccb744
UW
8519 int size;
8520};
8521
ab96de7e
AS
8522/* Allocate new constant_pool structure. */
8523
8524static struct constant_pool *
8525s390_alloc_pool (void)
8526{
8527 struct constant_pool *pool;
8528 int i;
8529
8530 pool = (struct constant_pool *) xmalloc (sizeof *pool);
8531 pool->next = NULL;
8532 for (i = 0; i < NR_C_MODES; i++)
8533 pool->constants[i] = NULL;
8534
8535 pool->execute = NULL;
8536 pool->label = gen_label_rtx ();
775c43d3
DM
8537 pool->first_insn = NULL;
8538 pool->pool_insn = NULL;
ab96de7e
AS
8539 pool->insns = BITMAP_ALLOC (NULL);
8540 pool->size = 0;
775c43d3 8541 pool->emit_pool_after = NULL;
ab96de7e
AS
8542
8543 return pool;
8544}
b2ccb744
UW
8545
8546/* Create new constant pool covering instructions starting at INSN
8547 and chain it to the end of POOL_LIST. */
8548
8549static struct constant_pool *
775c43d3 8550s390_start_pool (struct constant_pool **pool_list, rtx_insn *insn)
b2ccb744
UW
8551{
8552 struct constant_pool *pool, **prev;
b2ccb744 8553
5af2f3d3 8554 pool = s390_alloc_pool ();
b2ccb744 8555 pool->first_insn = insn;
aee4e0db 8556
b2ccb744
UW
8557 for (prev = pool_list; *prev; prev = &(*prev)->next)
8558 ;
8559 *prev = pool;
8560
8561 return pool;
8562}
8563
aee4e0db
UW
8564/* End range of instructions covered by POOL at INSN and emit
8565 placeholder insn representing the pool. */
b2ccb744
UW
8566
8567static void
775c43d3 8568s390_end_pool (struct constant_pool *pool, rtx_insn *insn)
b2ccb744 8569{
aee4e0db
UW
8570 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
8571
8572 if (!insn)
8573 insn = get_last_insn ();
8574
8575 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
8576 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
8577}
8578
8579/* Add INSN to the list of insns covered by POOL. */
8580
8581static void
9c808aad 8582s390_add_pool_insn (struct constant_pool *pool, rtx insn)
aee4e0db
UW
8583{
8584 bitmap_set_bit (pool->insns, INSN_UID (insn));
b2ccb744
UW
8585}
8586
8587/* Return pool out of POOL_LIST that covers INSN. */
8588
8589static struct constant_pool *
9c808aad 8590s390_find_pool (struct constant_pool *pool_list, rtx insn)
b2ccb744 8591{
b2ccb744
UW
8592 struct constant_pool *pool;
8593
b2ccb744 8594 for (pool = pool_list; pool; pool = pool->next)
aee4e0db 8595 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
b2ccb744
UW
8596 break;
8597
8598 return pool;
8599}
8600
aee4e0db 8601/* Add constant VAL of mode MODE to the constant pool POOL. */
b2ccb744 8602
aee4e0db 8603static void
ef4bddc2 8604s390_add_constant (struct constant_pool *pool, rtx val, machine_mode mode)
b2ccb744
UW
8605{
8606 struct constant *c;
b2ccb744
UW
8607 int i;
8608
8609 for (i = 0; i < NR_C_MODES; i++)
8610 if (constant_modes[i] == mode)
8611 break;
8d933e31 8612 gcc_assert (i != NR_C_MODES);
b2ccb744
UW
8613
8614 for (c = pool->constants[i]; c != NULL; c = c->next)
8615 if (rtx_equal_p (val, c->value))
8616 break;
8617
8618 if (c == NULL)
8619 {
8620 c = (struct constant *) xmalloc (sizeof *c);
8621 c->value = val;
8622 c->label = gen_label_rtx ();
8623 c->next = pool->constants[i];
8624 pool->constants[i] = c;
8625 pool->size += GET_MODE_SIZE (mode);
8626 }
aee4e0db 8627}
b2ccb744 8628
dc66391d
RS
8629/* Return an rtx that represents the offset of X from the start of
8630 pool POOL. */
8631
8632static rtx
8633s390_pool_offset (struct constant_pool *pool, rtx x)
8634{
8635 rtx label;
8636
8637 label = gen_rtx_LABEL_REF (GET_MODE (x), pool->label);
8638 x = gen_rtx_UNSPEC (GET_MODE (x), gen_rtvec (2, x, label),
8639 UNSPEC_POOL_OFFSET);
8640 return gen_rtx_CONST (GET_MODE (x), x);
8641}
8642
aee4e0db
UW
8643/* Find constant VAL of mode MODE in the constant pool POOL.
8644 Return an RTX describing the distance from the start of
8645 the pool to the location of the new constant. */
c7453384 8646
aee4e0db 8647static rtx
9c808aad 8648s390_find_constant (struct constant_pool *pool, rtx val,
ef4bddc2 8649 machine_mode mode)
aee4e0db
UW
8650{
8651 struct constant *c;
aee4e0db 8652 int i;
c7453384 8653
aee4e0db
UW
8654 for (i = 0; i < NR_C_MODES; i++)
8655 if (constant_modes[i] == mode)
8656 break;
8d933e31 8657 gcc_assert (i != NR_C_MODES);
c7453384 8658
aee4e0db
UW
8659 for (c = pool->constants[i]; c != NULL; c = c->next)
8660 if (rtx_equal_p (val, c->value))
8661 break;
c7453384 8662
8d933e31 8663 gcc_assert (c);
c7453384 8664
dc66391d 8665 return s390_pool_offset (pool, gen_rtx_LABEL_REF (Pmode, c->label));
b2ccb744
UW
8666}
8667
ab96de7e
AS
8668/* Check whether INSN is an execute. Return the label_ref to its
8669 execute target template if so, NULL_RTX otherwise. */
8670
8671static rtx
8672s390_execute_label (rtx insn)
8673{
b64925dc 8674 if (NONJUMP_INSN_P (insn)
ab96de7e
AS
8675 && GET_CODE (PATTERN (insn)) == PARALLEL
8676 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == UNSPEC
8677 && XINT (XVECEXP (PATTERN (insn), 0, 0), 1) == UNSPEC_EXECUTE)
8678 return XVECEXP (XVECEXP (PATTERN (insn), 0, 0), 0, 2);
8679
8680 return NULL_RTX;
8681}
8682
9bb86f41
UW
8683/* Add execute target for INSN to the constant pool POOL. */
8684
8685static void
8686s390_add_execute (struct constant_pool *pool, rtx insn)
8687{
8688 struct constant *c;
8689
8690 for (c = pool->execute; c != NULL; c = c->next)
8691 if (INSN_UID (insn) == INSN_UID (c->value))
8692 break;
8693
8694 if (c == NULL)
8695 {
9bb86f41
UW
8696 c = (struct constant *) xmalloc (sizeof *c);
8697 c->value = insn;
d24959df 8698 c->label = gen_label_rtx ();
9bb86f41
UW
8699 c->next = pool->execute;
8700 pool->execute = c;
d24959df 8701 pool->size += 6;
9bb86f41
UW
8702 }
8703}
8704
8705/* Find execute target for INSN in the constant pool POOL.
8706 Return an RTX describing the distance from the start of
8707 the pool to the location of the execute target. */
8708
8709static rtx
8710s390_find_execute (struct constant_pool *pool, rtx insn)
8711{
8712 struct constant *c;
9bb86f41
UW
8713
8714 for (c = pool->execute; c != NULL; c = c->next)
8715 if (INSN_UID (insn) == INSN_UID (c->value))
8716 break;
8717
8d933e31 8718 gcc_assert (c);
9bb86f41 8719
dc66391d 8720 return s390_pool_offset (pool, gen_rtx_LABEL_REF (Pmode, c->label));
9bb86f41
UW
8721}
8722
ab96de7e 8723/* For an execute INSN, extract the execute target template. */
9bb86f41
UW
8724
8725static rtx
ab96de7e 8726s390_execute_target (rtx insn)
9bb86f41 8727{
ab96de7e
AS
8728 rtx pattern = PATTERN (insn);
8729 gcc_assert (s390_execute_label (insn));
9bb86f41
UW
8730
8731 if (XVECLEN (pattern, 0) == 2)
8732 {
8733 pattern = copy_rtx (XVECEXP (pattern, 0, 1));
8734 }
8735 else
8736 {
8737 rtvec vec = rtvec_alloc (XVECLEN (pattern, 0) - 1);
8738 int i;
8739
8740 for (i = 0; i < XVECLEN (pattern, 0) - 1; i++)
8741 RTVEC_ELT (vec, i) = copy_rtx (XVECEXP (pattern, 0, i + 1));
8742
8743 pattern = gen_rtx_PARALLEL (VOIDmode, vec);
8744 }
8745
8746 return pattern;
8747}
8748
8749/* Indicate that INSN cannot be duplicated. This is the case for
8750 execute insns that carry a unique label. */
8751
8752static bool
ac44248e 8753s390_cannot_copy_insn_p (rtx_insn *insn)
9bb86f41
UW
8754{
8755 rtx label = s390_execute_label (insn);
8756 return label && label != const0_rtx;
8757}
8758
5af2f3d3
UW
8759/* Dump out the constants in POOL. If REMOTE_LABEL is true,
8760 do not emit the pool base label. */
b2ccb744 8761
9bb86f41 8762static void
5af2f3d3 8763s390_dump_pool (struct constant_pool *pool, bool remote_label)
b2ccb744
UW
8764{
8765 struct constant *c;
775c43d3 8766 rtx_insn *insn = pool->pool_insn;
b2ccb744
UW
8767 int i;
8768
9bb86f41
UW
8769 /* Switch to rodata section. */
8770 if (TARGET_CPU_ZARCH)
8771 {
8772 insn = emit_insn_after (gen_pool_section_start (), insn);
8773 INSN_ADDRESSES_NEW (insn, -1);
8774 }
8775
8776 /* Ensure minimum pool alignment. */
9e8327e3 8777 if (TARGET_CPU_ZARCH)
9bb86f41 8778 insn = emit_insn_after (gen_pool_align (GEN_INT (8)), insn);
b2ccb744 8779 else
9bb86f41 8780 insn = emit_insn_after (gen_pool_align (GEN_INT (4)), insn);
b2ccb744
UW
8781 INSN_ADDRESSES_NEW (insn, -1);
8782
9bb86f41 8783 /* Emit pool base label. */
5af2f3d3
UW
8784 if (!remote_label)
8785 {
8786 insn = emit_label_after (pool->label, insn);
8787 INSN_ADDRESSES_NEW (insn, -1);
8788 }
b2ccb744
UW
8789
8790 /* Dump constants in descending alignment requirement order,
8791 ensuring proper alignment for every constant. */
8792 for (i = 0; i < NR_C_MODES; i++)
8793 for (c = pool->constants[i]; c; c = c->next)
8794 {
fd7643fb 8795 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
77340500 8796 rtx value = copy_rtx (c->value);
aee4e0db
UW
8797 if (GET_CODE (value) == CONST
8798 && GET_CODE (XEXP (value, 0)) == UNSPEC
fd7643fb 8799 && XINT (XEXP (value, 0), 1) == UNSPEC_LTREL_OFFSET
aee4e0db 8800 && XVECLEN (XEXP (value, 0), 0) == 1)
dc66391d 8801 value = s390_pool_offset (pool, XVECEXP (XEXP (value, 0), 0, 0));
aee4e0db 8802
b2ccb744
UW
8803 insn = emit_label_after (c->label, insn);
8804 INSN_ADDRESSES_NEW (insn, -1);
416cf582 8805
38899e29 8806 value = gen_rtx_UNSPEC_VOLATILE (constant_modes[i],
416cf582
UW
8807 gen_rtvec (1, value),
8808 UNSPECV_POOL_ENTRY);
8809 insn = emit_insn_after (value, insn);
b2ccb744
UW
8810 INSN_ADDRESSES_NEW (insn, -1);
8811 }
8812
9bb86f41
UW
8813 /* Ensure minimum alignment for instructions. */
8814 insn = emit_insn_after (gen_pool_align (GEN_INT (2)), insn);
b2ccb744
UW
8815 INSN_ADDRESSES_NEW (insn, -1);
8816
9bb86f41
UW
8817 /* Output in-pool execute template insns. */
8818 for (c = pool->execute; c; c = c->next)
8819 {
9bb86f41
UW
8820 insn = emit_label_after (c->label, insn);
8821 INSN_ADDRESSES_NEW (insn, -1);
8822
8823 insn = emit_insn_after (s390_execute_target (c->value), insn);
8824 INSN_ADDRESSES_NEW (insn, -1);
8825 }
8826
8827 /* Switch back to previous section. */
8828 if (TARGET_CPU_ZARCH)
8829 {
8830 insn = emit_insn_after (gen_pool_section_end (), insn);
8831 INSN_ADDRESSES_NEW (insn, -1);
8832 }
8833
b2ccb744
UW
8834 insn = emit_barrier_after (insn);
8835 INSN_ADDRESSES_NEW (insn, -1);
8836
aee4e0db
UW
8837 /* Remove placeholder insn. */
8838 remove_insn (pool->pool_insn);
9bb86f41
UW
8839}
8840
b2ccb744
UW
8841/* Free all memory used by POOL. */
8842
8843static void
9c808aad 8844s390_free_pool (struct constant_pool *pool)
b2ccb744 8845{
9bb86f41 8846 struct constant *c, *next;
b2ccb744
UW
8847 int i;
8848
8849 for (i = 0; i < NR_C_MODES; i++)
9bb86f41
UW
8850 for (c = pool->constants[i]; c; c = next)
8851 {
8852 next = c->next;
8853 free (c);
8854 }
8855
8856 for (c = pool->execute; c; c = next)
b2ccb744 8857 {
9bb86f41
UW
8858 next = c->next;
8859 free (c);
b2ccb744
UW
8860 }
8861
7b210806 8862 BITMAP_FREE (pool->insns);
b2ccb744 8863 free (pool);
c7453384 8864}
b2ccb744 8865
b2ccb744 8866
5af2f3d3
UW
8867/* Collect main literal pool. Return NULL on overflow. */
8868
8869static struct constant_pool *
8870s390_mainpool_start (void)
8871{
8872 struct constant_pool *pool;
775c43d3 8873 rtx_insn *insn;
5af2f3d3
UW
8874
8875 pool = s390_alloc_pool ();
8876
8877 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8878 {
b64925dc 8879 if (NONJUMP_INSN_P (insn)
585539a1
UW
8880 && GET_CODE (PATTERN (insn)) == SET
8881 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC_VOLATILE
8882 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPECV_MAIN_POOL)
5af2f3d3 8883 {
177bc204
RS
8884 /* There might be two main_pool instructions if base_reg
8885 is call-clobbered; one for shrink-wrapped code and one
8886 for the rest. We want to keep the first. */
8887 if (pool->pool_insn)
8888 {
8889 insn = PREV_INSN (insn);
8890 delete_insn (NEXT_INSN (insn));
8891 continue;
8892 }
5af2f3d3
UW
8893 pool->pool_insn = insn;
8894 }
8895
d24959df 8896 if (!TARGET_CPU_ZARCH && s390_execute_label (insn))
9bb86f41
UW
8897 {
8898 s390_add_execute (pool, insn);
8899 }
b64925dc 8900 else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
5af2f3d3
UW
8901 {
8902 rtx pool_ref = NULL_RTX;
8903 find_constant_pool_ref (PATTERN (insn), &pool_ref);
8904 if (pool_ref)
8905 {
8906 rtx constant = get_pool_constant (pool_ref);
ef4bddc2 8907 machine_mode mode = get_pool_mode (pool_ref);
5af2f3d3
UW
8908 s390_add_constant (pool, constant, mode);
8909 }
8910 }
03870a04
AK
8911
8912 /* If hot/cold partitioning is enabled we have to make sure that
8913 the literal pool is emitted in the same section where the
8914 initialization of the literal pool base pointer takes place.
8915 emit_pool_after is only used in the non-overflow case on non
8916 Z cpus where we can emit the literal pool at the end of the
8917 function body within the text section. */
8918 if (NOTE_P (insn)
b49326f1
AK
8919 && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS
8920 && !pool->emit_pool_after)
8921 pool->emit_pool_after = PREV_INSN (insn);
5af2f3d3
UW
8922 }
8923
8d933e31 8924 gcc_assert (pool->pool_insn || pool->size == 0);
5af2f3d3
UW
8925
8926 if (pool->size >= 4096)
8927 {
d76e8439
UW
8928 /* We're going to chunkify the pool, so remove the main
8929 pool placeholder insn. */
8930 remove_insn (pool->pool_insn);
8931
5af2f3d3
UW
8932 s390_free_pool (pool);
8933 pool = NULL;
8934 }
8935
03870a04
AK
8936 /* If the functions ends with the section where the literal pool
8937 should be emitted set the marker to its end. */
b49326f1 8938 if (pool && !pool->emit_pool_after)
03870a04
AK
8939 pool->emit_pool_after = get_last_insn ();
8940
5af2f3d3
UW
8941 return pool;
8942}
8943
8944/* POOL holds the main literal pool as collected by s390_mainpool_start.
8945 Modify the current function to output the pool constants as well as
585539a1 8946 the pool register setup instruction. */
5af2f3d3
UW
8947
8948static void
585539a1 8949s390_mainpool_finish (struct constant_pool *pool)
5af2f3d3 8950{
91086990 8951 rtx base_reg = cfun->machine->base_reg;
5af2f3d3
UW
8952
8953 /* If the pool is empty, we're done. */
8954 if (pool->size == 0)
8955 {
91086990
UW
8956 /* We don't actually need a base register after all. */
8957 cfun->machine->base_reg = NULL_RTX;
8958
8959 if (pool->pool_insn)
8960 remove_insn (pool->pool_insn);
5af2f3d3
UW
8961 s390_free_pool (pool);
8962 return;
8963 }
8964
8965 /* We need correct insn addresses. */
8966 shorten_branches (get_insns ());
8967
9e8327e3 8968 /* On zSeries, we use a LARL to load the pool register. The pool is
5af2f3d3 8969 located in the .rodata section, so we emit it after the function. */
9e8327e3 8970 if (TARGET_CPU_ZARCH)
5af2f3d3 8971 {
17f385d8
DM
8972 rtx set = gen_main_base_64 (base_reg, pool->label);
8973 rtx_insn *insn = emit_insn_after (set, pool->pool_insn);
5af2f3d3
UW
8974 INSN_ADDRESSES_NEW (insn, -1);
8975 remove_insn (pool->pool_insn);
38899e29
EC
8976
8977 insn = get_last_insn ();
5af2f3d3
UW
8978 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
8979 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
8980
8981 s390_dump_pool (pool, 0);
8982 }
8983
9e8327e3 8984 /* On S/390, if the total size of the function's code plus literal pool
5af2f3d3
UW
8985 does not exceed 4096 bytes, we use BASR to set up a function base
8986 pointer, and emit the literal pool at the end of the function. */
03870a04 8987 else if (INSN_ADDRESSES (INSN_UID (pool->emit_pool_after))
5af2f3d3
UW
8988 + pool->size + 8 /* alignment slop */ < 4096)
8989 {
17f385d8
DM
8990 rtx set = gen_main_base_31_small (base_reg, pool->label);
8991 rtx_insn *insn = emit_insn_after (set, pool->pool_insn);
5af2f3d3
UW
8992 INSN_ADDRESSES_NEW (insn, -1);
8993 remove_insn (pool->pool_insn);
8994
8995 insn = emit_label_after (pool->label, insn);
8996 INSN_ADDRESSES_NEW (insn, -1);
8997
03870a04
AK
8998 /* emit_pool_after will be set by s390_mainpool_start to the
8999 last insn of the section where the literal pool should be
9000 emitted. */
9001 insn = pool->emit_pool_after;
9002
5af2f3d3
UW
9003 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
9004 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
9005
9006 s390_dump_pool (pool, 1);
9007 }
9008
9009 /* Otherwise, we emit an inline literal pool and use BASR to branch
9010 over it, setting up the pool register at the same time. */
9011 else
9012 {
17f385d8 9013 rtx_code_label *pool_end = gen_label_rtx ();
5af2f3d3 9014
17f385d8
DM
9015 rtx pat = gen_main_base_31_large (base_reg, pool->label, pool_end);
9016 rtx_insn *insn = emit_jump_insn_after (pat, pool->pool_insn);
b0a1ac21 9017 JUMP_LABEL (insn) = pool_end;
5af2f3d3
UW
9018 INSN_ADDRESSES_NEW (insn, -1);
9019 remove_insn (pool->pool_insn);
9020
9021 insn = emit_label_after (pool->label, insn);
9022 INSN_ADDRESSES_NEW (insn, -1);
9023
9024 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
9025 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
9026
9027 insn = emit_label_after (pool_end, pool->pool_insn);
9028 INSN_ADDRESSES_NEW (insn, -1);
9029
9030 s390_dump_pool (pool, 1);
9031 }
9032
9033
9034 /* Replace all literal pool references. */
9035
b32d5189 9036 for (rtx_insn *insn = get_insns (); insn; insn = NEXT_INSN (insn))
5af2f3d3
UW
9037 {
9038 if (INSN_P (insn))
585539a1 9039 replace_ltrel_base (&PATTERN (insn));
5af2f3d3 9040
b64925dc 9041 if (NONJUMP_INSN_P (insn) || CALL_P (insn))
5af2f3d3
UW
9042 {
9043 rtx addr, pool_ref = NULL_RTX;
9044 find_constant_pool_ref (PATTERN (insn), &pool_ref);
9045 if (pool_ref)
9046 {
9bb86f41
UW
9047 if (s390_execute_label (insn))
9048 addr = s390_find_execute (pool, insn);
9049 else
9050 addr = s390_find_constant (pool, get_pool_constant (pool_ref),
9051 get_pool_mode (pool_ref));
9052
5af2f3d3
UW
9053 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
9054 INSN_CODE (insn) = -1;
9055 }
9056 }
9057 }
9058
9059
9060 /* Free the pool. */
9061 s390_free_pool (pool);
9062}
9063
9064/* POOL holds the main literal pool as collected by s390_mainpool_start.
9065 We have decided we cannot use this pool, so revert all changes
9066 to the current function that were done by s390_mainpool_start. */
9067static void
9068s390_mainpool_cancel (struct constant_pool *pool)
9069{
9070 /* We didn't actually change the instruction stream, so simply
9071 free the pool memory. */
9072 s390_free_pool (pool);
9073}
9074
9075
585539a1 9076/* Chunkify the literal pool. */
9db1d521 9077
b2ccb744
UW
9078#define S390_POOL_CHUNK_MIN 0xc00
9079#define S390_POOL_CHUNK_MAX 0xe00
9080
c7453384 9081static struct constant_pool *
585539a1 9082s390_chunkify_start (void)
9db1d521 9083{
b2ccb744
UW
9084 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
9085 int extra_size = 0;
9086 bitmap far_labels;
fd7643fb 9087 rtx pending_ltrel = NULL_RTX;
775c43d3 9088 rtx_insn *insn;
9db1d521 9089
9c808aad 9090 rtx (*gen_reload_base) (rtx, rtx) =
9e8327e3 9091 TARGET_CPU_ZARCH? gen_reload_base_64 : gen_reload_base_31;
aee4e0db
UW
9092
9093
c3cc6b78
UW
9094 /* We need correct insn addresses. */
9095
9096 shorten_branches (get_insns ());
9097
fd7643fb 9098 /* Scan all insns and move literals to pool chunks. */
13e58269 9099
13e58269 9100 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9db1d521 9101 {
03870a04
AK
9102 bool section_switch_p = false;
9103
fd7643fb
UW
9104 /* Check for pending LTREL_BASE. */
9105 if (INSN_P (insn))
9106 {
9107 rtx ltrel_base = find_ltrel_base (PATTERN (insn));
9108 if (ltrel_base)
9109 {
8d933e31
AS
9110 gcc_assert (ltrel_base == pending_ltrel);
9111 pending_ltrel = NULL_RTX;
fd7643fb
UW
9112 }
9113 }
9114
d24959df 9115 if (!TARGET_CPU_ZARCH && s390_execute_label (insn))
9bb86f41
UW
9116 {
9117 if (!curr_pool)
9118 curr_pool = s390_start_pool (&pool_list, insn);
9119
9120 s390_add_execute (curr_pool, insn);
9121 s390_add_pool_insn (curr_pool, insn);
9122 }
b64925dc 9123 else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
b2ccb744 9124 {
aee4e0db 9125 rtx pool_ref = NULL_RTX;
b2ccb744
UW
9126 find_constant_pool_ref (PATTERN (insn), &pool_ref);
9127 if (pool_ref)
9128 {
fd7643fb 9129 rtx constant = get_pool_constant (pool_ref);
ef4bddc2 9130 machine_mode mode = get_pool_mode (pool_ref);
fd7643fb 9131
b2ccb744
UW
9132 if (!curr_pool)
9133 curr_pool = s390_start_pool (&pool_list, insn);
9134
fd7643fb 9135 s390_add_constant (curr_pool, constant, mode);
aee4e0db 9136 s390_add_pool_insn (curr_pool, insn);
aee4e0db 9137
fd7643fb
UW
9138 /* Don't split the pool chunk between a LTREL_OFFSET load
9139 and the corresponding LTREL_BASE. */
9140 if (GET_CODE (constant) == CONST
9141 && GET_CODE (XEXP (constant, 0)) == UNSPEC
9142 && XINT (XEXP (constant, 0), 1) == UNSPEC_LTREL_OFFSET)
9143 {
8d933e31 9144 gcc_assert (!pending_ltrel);
fd7643fb
UW
9145 pending_ltrel = pool_ref;
9146 }
b2ccb744
UW
9147 }
9148 }
9149
39718607 9150 if (JUMP_P (insn) || JUMP_TABLE_DATA_P (insn) || LABEL_P (insn))
fd7643fb
UW
9151 {
9152 if (curr_pool)
9153 s390_add_pool_insn (curr_pool, insn);
9154 /* An LTREL_BASE must follow within the same basic block. */
8d933e31 9155 gcc_assert (!pending_ltrel);
fd7643fb 9156 }
aee4e0db 9157
00fbd5c8
JJ
9158 if (NOTE_P (insn))
9159 switch (NOTE_KIND (insn))
9160 {
9161 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
9162 section_switch_p = true;
9163 break;
9164 case NOTE_INSN_VAR_LOCATION:
9165 case NOTE_INSN_CALL_ARG_LOCATION:
9166 continue;
9167 default:
9168 break;
9169 }
03870a04 9170
c7453384 9171 if (!curr_pool
b2ccb744
UW
9172 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
9173 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
9db1d521 9174 continue;
13e58269 9175
9e8327e3 9176 if (TARGET_CPU_ZARCH)
9db1d521 9177 {
b2ccb744
UW
9178 if (curr_pool->size < S390_POOL_CHUNK_MAX)
9179 continue;
13e58269 9180
775c43d3 9181 s390_end_pool (curr_pool, NULL);
b2ccb744
UW
9182 curr_pool = NULL;
9183 }
9184 else
9db1d521 9185 {
b2ccb744 9186 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
9c808aad 9187 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
b2ccb744
UW
9188 + extra_size;
9189
9190 /* We will later have to insert base register reload insns.
9191 Those will have an effect on code size, which we need to
9192 consider here. This calculation makes rather pessimistic
9193 worst-case assumptions. */
b64925dc 9194 if (LABEL_P (insn))
b2ccb744 9195 extra_size += 6;
b2ccb744
UW
9196
9197 if (chunk_size < S390_POOL_CHUNK_MIN
03870a04
AK
9198 && curr_pool->size < S390_POOL_CHUNK_MIN
9199 && !section_switch_p)
b2ccb744
UW
9200 continue;
9201
9202 /* Pool chunks can only be inserted after BARRIERs ... */
b64925dc 9203 if (BARRIER_P (insn))
b2ccb744
UW
9204 {
9205 s390_end_pool (curr_pool, insn);
9206 curr_pool = NULL;
9207 extra_size = 0;
9208 }
9209
9210 /* ... so if we don't find one in time, create one. */
03870a04
AK
9211 else if (chunk_size > S390_POOL_CHUNK_MAX
9212 || curr_pool->size > S390_POOL_CHUNK_MAX
9213 || section_switch_p)
b2ccb744 9214 {
775c43d3 9215 rtx_insn *label, *jump, *barrier, *next, *prev;
b2ccb744 9216
03870a04
AK
9217 if (!section_switch_p)
9218 {
9219 /* We can insert the barrier only after a 'real' insn. */
b64925dc 9220 if (! NONJUMP_INSN_P (insn) && ! CALL_P (insn))
03870a04
AK
9221 continue;
9222 if (get_attr_length (insn) == 0)
9223 continue;
9224 /* Don't separate LTREL_BASE from the corresponding
00fbd5c8 9225 LTREL_OFFSET load. */
03870a04
AK
9226 if (pending_ltrel)
9227 continue;
00fbd5c8
JJ
9228 next = insn;
9229 do
9230 {
9231 insn = next;
9232 next = NEXT_INSN (insn);
9233 }
9234 while (next
9235 && NOTE_P (next)
9236 && (NOTE_KIND (next) == NOTE_INSN_VAR_LOCATION
9237 || NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION));
03870a04
AK
9238 }
9239 else
9240 {
9241 gcc_assert (!pending_ltrel);
9242
9243 /* The old pool has to end before the section switch
9244 note in order to make it part of the current
9245 section. */
9246 insn = PREV_INSN (insn);
9247 }
aee4e0db 9248
9c808aad 9249 label = gen_label_rtx ();
00fbd5c8
JJ
9250 prev = insn;
9251 if (prev && NOTE_P (prev))
9252 prev = prev_nonnote_insn (prev);
9253 if (prev)
9254 jump = emit_jump_insn_after_setloc (gen_jump (label), insn,
9d12bc68 9255 INSN_LOCATION (prev));
00fbd5c8
JJ
9256 else
9257 jump = emit_jump_insn_after_noloc (gen_jump (label), insn);
b2ccb744
UW
9258 barrier = emit_barrier_after (jump);
9259 insn = emit_label_after (label, barrier);
9260 JUMP_LABEL (jump) = label;
9261 LABEL_NUSES (label) = 1;
9262
aee4e0db
UW
9263 INSN_ADDRESSES_NEW (jump, -1);
9264 INSN_ADDRESSES_NEW (barrier, -1);
b2ccb744
UW
9265 INSN_ADDRESSES_NEW (insn, -1);
9266
9267 s390_end_pool (curr_pool, barrier);
9268 curr_pool = NULL;
9269 extra_size = 0;
9270 }
13e58269 9271 }
9db1d521 9272 }
ce50cae8 9273
aee4e0db 9274 if (curr_pool)
775c43d3 9275 s390_end_pool (curr_pool, NULL);
8d933e31 9276 gcc_assert (!pending_ltrel);
b2ccb744 9277
c7453384 9278 /* Find all labels that are branched into
13e58269 9279 from an insn belonging to a different chunk. */
ce50cae8 9280
7b210806 9281 far_labels = BITMAP_ALLOC (NULL);
6bc627b3 9282
13e58269 9283 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9db1d521 9284 {
8942ee0f 9285 rtx_jump_table_data *table;
966f97ac 9286
b2ccb744
UW
9287 /* Labels marked with LABEL_PRESERVE_P can be target
9288 of non-local jumps, so we have to mark them.
9289 The same holds for named labels.
9290
9291 Don't do that, however, if it is the label before
9292 a jump table. */
9293
b64925dc 9294 if (LABEL_P (insn)
b2ccb744
UW
9295 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
9296 {
775c43d3 9297 rtx_insn *vec_insn = NEXT_INSN (insn);
34f0d87a 9298 if (! vec_insn || ! JUMP_TABLE_DATA_P (vec_insn))
b2ccb744
UW
9299 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
9300 }
966f97ac
JJ
9301 /* Check potential targets in a table jump (casesi_jump). */
9302 else if (tablejump_p (insn, NULL, &table))
9303 {
9304 rtx vec_pat = PATTERN (table);
9305 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
9306
9307 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
9308 {
9309 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
b2ccb744 9310
966f97ac
JJ
9311 if (s390_find_pool (pool_list, label)
9312 != s390_find_pool (pool_list, insn))
9313 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
9314 }
9315 }
9316 /* If we have a direct jump (conditional or unconditional),
9317 check all potential targets. */
b64925dc 9318 else if (JUMP_P (insn))
13e58269 9319 {
966f97ac 9320 rtx pat = PATTERN (insn);
5fdc1e5d 9321
966f97ac 9322 if (GET_CODE (pat) == PARALLEL)
0a3bdf9d
UW
9323 pat = XVECEXP (pat, 0, 0);
9324
966f97ac
JJ
9325 if (GET_CODE (pat) == SET)
9326 {
aee4e0db 9327 rtx label = JUMP_LABEL (insn);
177bc204 9328 if (label && !ANY_RETURN_P (label))
13e58269 9329 {
966f97ac 9330 if (s390_find_pool (pool_list, label)
b2ccb744
UW
9331 != s390_find_pool (pool_list, insn))
9332 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
13e58269 9333 }
b2ccb744 9334 }
966f97ac 9335 }
9db1d521 9336 }
ce50cae8 9337
b2ccb744
UW
9338 /* Insert base register reload insns before every pool. */
9339
9340 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
aee4e0db 9341 {
f4aa3848 9342 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
585539a1 9343 curr_pool->label);
775c43d3 9344 rtx_insn *insn = curr_pool->first_insn;
aee4e0db
UW
9345 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
9346 }
b2ccb744
UW
9347
9348 /* Insert base register reload insns at every far label. */
13e58269 9349
13e58269 9350 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
b64925dc 9351 if (LABEL_P (insn)
b2ccb744
UW
9352 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
9353 {
9354 struct constant_pool *pool = s390_find_pool (pool_list, insn);
9355 if (pool)
9356 {
f4aa3848 9357 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
585539a1 9358 pool->label);
aee4e0db 9359 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
b2ccb744
UW
9360 }
9361 }
9362
aee4e0db 9363
7b210806 9364 BITMAP_FREE (far_labels);
13e58269 9365
13e58269
UW
9366
9367 /* Recompute insn addresses. */
9368
9369 init_insn_lengths ();
9370 shorten_branches (get_insns ());
9db1d521 9371
aee4e0db
UW
9372 return pool_list;
9373}
9db1d521 9374
aee4e0db 9375/* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
c7453384 9376 After we have decided to use this list, finish implementing
585539a1 9377 all changes to the current function as required. */
c7453384 9378
aee4e0db 9379static void
585539a1 9380s390_chunkify_finish (struct constant_pool *pool_list)
aee4e0db 9381{
aee4e0db 9382 struct constant_pool *curr_pool = NULL;
775c43d3 9383 rtx_insn *insn;
c7453384
EC
9384
9385
aee4e0db
UW
9386 /* Replace all literal pool references. */
9387
c7453384 9388 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
aee4e0db 9389 {
fd7643fb 9390 if (INSN_P (insn))
585539a1 9391 replace_ltrel_base (&PATTERN (insn));
fd7643fb 9392
aee4e0db
UW
9393 curr_pool = s390_find_pool (pool_list, insn);
9394 if (!curr_pool)
9395 continue;
9396
b64925dc 9397 if (NONJUMP_INSN_P (insn) || CALL_P (insn))
aee4e0db
UW
9398 {
9399 rtx addr, pool_ref = NULL_RTX;
9400 find_constant_pool_ref (PATTERN (insn), &pool_ref);
9401 if (pool_ref)
9402 {
9bb86f41
UW
9403 if (s390_execute_label (insn))
9404 addr = s390_find_execute (curr_pool, insn);
9405 else
9406 addr = s390_find_constant (curr_pool,
9407 get_pool_constant (pool_ref),
9408 get_pool_mode (pool_ref));
9409
aee4e0db
UW
9410 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
9411 INSN_CODE (insn) = -1;
9412 }
aee4e0db
UW
9413 }
9414 }
9415
9416 /* Dump out all literal pools. */
c7453384 9417
aee4e0db 9418 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
5af2f3d3 9419 s390_dump_pool (curr_pool, 0);
c7453384 9420
aee4e0db
UW
9421 /* Free pool list. */
9422
9423 while (pool_list)
9424 {
9425 struct constant_pool *next = pool_list->next;
9426 s390_free_pool (pool_list);
9427 pool_list = next;
9428 }
9429}
9430
9431/* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
9432 We have decided we cannot use this list, so revert all changes
9433 to the current function that were done by s390_chunkify_start. */
c7453384 9434
aee4e0db 9435static void
9c808aad 9436s390_chunkify_cancel (struct constant_pool *pool_list)
aee4e0db
UW
9437{
9438 struct constant_pool *curr_pool = NULL;
775c43d3 9439 rtx_insn *insn;
aee4e0db
UW
9440
9441 /* Remove all pool placeholder insns. */
9442
9443 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
9444 {
9445 /* Did we insert an extra barrier? Remove it. */
775c43d3
DM
9446 rtx_insn *barrier = PREV_INSN (curr_pool->pool_insn);
9447 rtx_insn *jump = barrier? PREV_INSN (barrier) : NULL;
9448 rtx_insn *label = NEXT_INSN (curr_pool->pool_insn);
aee4e0db 9449
b64925dc
SB
9450 if (jump && JUMP_P (jump)
9451 && barrier && BARRIER_P (barrier)
9452 && label && LABEL_P (label)
aee4e0db
UW
9453 && GET_CODE (PATTERN (jump)) == SET
9454 && SET_DEST (PATTERN (jump)) == pc_rtx
9455 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
9456 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
9457 {
9458 remove_insn (jump);
9459 remove_insn (barrier);
9460 remove_insn (label);
b2ccb744 9461 }
9db1d521 9462
aee4e0db
UW
9463 remove_insn (curr_pool->pool_insn);
9464 }
9465
fd7643fb 9466 /* Remove all base register reload insns. */
aee4e0db
UW
9467
9468 for (insn = get_insns (); insn; )
9469 {
775c43d3 9470 rtx_insn *next_insn = NEXT_INSN (insn);
aee4e0db 9471
b64925dc 9472 if (NONJUMP_INSN_P (insn)
aee4e0db
UW
9473 && GET_CODE (PATTERN (insn)) == SET
9474 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
fd7643fb 9475 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
aee4e0db 9476 remove_insn (insn);
9db1d521 9477
aee4e0db
UW
9478 insn = next_insn;
9479 }
9480
9481 /* Free pool list. */
9db1d521 9482
b2ccb744 9483 while (pool_list)
9db1d521 9484 {
b2ccb744
UW
9485 struct constant_pool *next = pool_list->next;
9486 s390_free_pool (pool_list);
9487 pool_list = next;
9db1d521 9488 }
9db1d521
HP
9489}
9490
faeb9bb6 9491/* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
416cf582
UW
9492
9493void
ef4bddc2 9494s390_output_pool_entry (rtx exp, machine_mode mode, unsigned int align)
416cf582 9495{
416cf582
UW
9496 switch (GET_MODE_CLASS (mode))
9497 {
9498 case MODE_FLOAT:
4dc19cc0 9499 case MODE_DECIMAL_FLOAT:
8d933e31 9500 gcc_assert (GET_CODE (exp) == CONST_DOUBLE);
416cf582 9501
fc0461ae
RS
9502 assemble_real (*CONST_DOUBLE_REAL_VALUE (exp),
9503 as_a <scalar_float_mode> (mode), align);
416cf582
UW
9504 break;
9505
9506 case MODE_INT:
faeb9bb6 9507 assemble_integer (exp, GET_MODE_SIZE (mode), align, 1);
a7fe25b8 9508 mark_symbol_refs_as_used (exp);
416cf582
UW
9509 break;
9510
085261c8
AK
9511 case MODE_VECTOR_INT:
9512 case MODE_VECTOR_FLOAT:
9513 {
9514 int i;
9515 machine_mode inner_mode;
9516 gcc_assert (GET_CODE (exp) == CONST_VECTOR);
9517
9518 inner_mode = GET_MODE_INNER (GET_MODE (exp));
9519 for (i = 0; i < XVECLEN (exp, 0); i++)
9520 s390_output_pool_entry (XVECEXP (exp, 0, i),
9521 inner_mode,
9522 i == 0
9523 ? align
9524 : GET_MODE_BITSIZE (inner_mode));
9525 }
9526 break;
9527
416cf582 9528 default:
8d933e31 9529 gcc_unreachable ();
416cf582
UW
9530 }
9531}
9532
9533
ab96de7e
AS
9534/* Return an RTL expression representing the value of the return address
9535 for the frame COUNT steps up from the current frame. FRAME is the
9536 frame pointer of that frame. */
b2ccb744 9537
ab96de7e
AS
9538rtx
9539s390_return_addr_rtx (int count, rtx frame ATTRIBUTE_UNUSED)
b2ccb744 9540{
ab96de7e
AS
9541 int offset;
9542 rtx addr;
aee4e0db 9543
ab96de7e 9544 /* Without backchain, we fail for all but the current frame. */
c3cc6b78 9545
ab96de7e
AS
9546 if (!TARGET_BACKCHAIN && count > 0)
9547 return NULL_RTX;
c3cc6b78 9548
ab96de7e
AS
9549 /* For the current frame, we need to make sure the initial
9550 value of RETURN_REGNUM is actually saved. */
c3cc6b78 9551
ab96de7e 9552 if (count == 0)
c3cc6b78 9553 {
7bcebb25
AK
9554 /* On non-z architectures branch splitting could overwrite r14. */
9555 if (TARGET_CPU_ZARCH)
9556 return get_hard_reg_initial_val (Pmode, RETURN_REGNUM);
9557 else
9558 {
9559 cfun_frame_layout.save_return_addr_p = true;
9560 return gen_rtx_MEM (Pmode, return_address_pointer_rtx);
9561 }
ab96de7e 9562 }
c3cc6b78 9563
ab96de7e 9564 if (TARGET_PACKED_STACK)
9602b6a1 9565 offset = -2 * UNITS_PER_LONG;
ab96de7e 9566 else
9602b6a1 9567 offset = RETURN_REGNUM * UNITS_PER_LONG;
c3cc6b78 9568
0a81f074 9569 addr = plus_constant (Pmode, frame, offset);
ab96de7e
AS
9570 addr = memory_address (Pmode, addr);
9571 return gen_rtx_MEM (Pmode, addr);
9572}
c3cc6b78 9573
ab96de7e
AS
9574/* Return an RTL expression representing the back chain stored in
9575 the current stack frame. */
545d16ff 9576
ab96de7e
AS
9577rtx
9578s390_back_chain_rtx (void)
9579{
9580 rtx chain;
545d16ff 9581
ab96de7e 9582 gcc_assert (TARGET_BACKCHAIN);
545d16ff 9583
ab96de7e 9584 if (TARGET_PACKED_STACK)
0a81f074 9585 chain = plus_constant (Pmode, stack_pointer_rtx,
9602b6a1 9586 STACK_POINTER_OFFSET - UNITS_PER_LONG);
ab96de7e
AS
9587 else
9588 chain = stack_pointer_rtx;
545d16ff 9589
ab96de7e
AS
9590 chain = gen_rtx_MEM (Pmode, chain);
9591 return chain;
9592}
c3cc6b78 9593
ab96de7e
AS
9594/* Find first call clobbered register unused in a function.
9595 This could be used as base register in a leaf function
9596 or for holding the return address before epilogue. */
c3cc6b78 9597
ab96de7e
AS
9598static int
9599find_unused_clobbered_reg (void)
9600{
9601 int i;
9602 for (i = 0; i < 6; i++)
6fb5fa3c 9603 if (!df_regs_ever_live_p (i))
ab96de7e
AS
9604 return i;
9605 return 0;
9606}
c3cc6b78 9607
7bcebb25 9608
f4aa3848 9609/* Helper function for s390_regs_ever_clobbered. Sets the fields in DATA for all
7bcebb25
AK
9610 clobbered hard regs in SETREG. */
9611
9612static void
7bc980e1 9613s390_reg_clobbered_rtx (rtx setreg, const_rtx set_insn ATTRIBUTE_UNUSED, void *data)
7bcebb25 9614{
6455a49e 9615 char *regs_ever_clobbered = (char *)data;
7bcebb25 9616 unsigned int i, regno;
ef4bddc2 9617 machine_mode mode = GET_MODE (setreg);
7bcebb25
AK
9618
9619 if (GET_CODE (setreg) == SUBREG)
9620 {
9621 rtx inner = SUBREG_REG (setreg);
5a3fe9b6 9622 if (!GENERAL_REG_P (inner) && !FP_REG_P (inner))
7bcebb25
AK
9623 return;
9624 regno = subreg_regno (setreg);
9625 }
5a3fe9b6 9626 else if (GENERAL_REG_P (setreg) || FP_REG_P (setreg))
7bcebb25
AK
9627 regno = REGNO (setreg);
9628 else
9629 return;
9630
9631 for (i = regno;
9632 i < regno + HARD_REGNO_NREGS (regno, mode);
9633 i++)
9634 regs_ever_clobbered[i] = 1;
9635}
9636
9637/* Walks through all basic blocks of the current function looking
9638 for clobbered hard regs using s390_reg_clobbered_rtx. The fields
9639 of the passed integer array REGS_EVER_CLOBBERED are set to one for
9640 each of those regs. */
9641
9642static void
6455a49e 9643s390_regs_ever_clobbered (char regs_ever_clobbered[])
7bcebb25
AK
9644{
9645 basic_block cur_bb;
775c43d3 9646 rtx_insn *cur_insn;
7bcebb25
AK
9647 unsigned int i;
9648
6455a49e 9649 memset (regs_ever_clobbered, 0, 32);
7bcebb25
AK
9650
9651 /* For non-leaf functions we have to consider all call clobbered regs to be
9652 clobbered. */
416ff32e 9653 if (!crtl->is_leaf)
7bcebb25 9654 {
5a3fe9b6 9655 for (i = 0; i < 32; i++)
7bcebb25
AK
9656 regs_ever_clobbered[i] = call_really_used_regs[i];
9657 }
9658
9659 /* Make the "magic" eh_return registers live if necessary. For regs_ever_live
9660 this work is done by liveness analysis (mark_regs_live_at_end).
9661 Special care is needed for functions containing landing pads. Landing pads
9662 may use the eh registers, but the code which sets these registers is not
9663 contained in that function. Hence s390_regs_ever_clobbered is not able to
9664 deal with this automatically. */
e3b5732b 9665 if (crtl->calls_eh_return || cfun->machine->has_landing_pad_p)
7bcebb25 9666 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM ; i++)
f4aa3848
AK
9667 if (crtl->calls_eh_return
9668 || (cfun->machine->has_landing_pad_p
6fb5fa3c 9669 && df_regs_ever_live_p (EH_RETURN_DATA_REGNO (i))))
297a777d 9670 regs_ever_clobbered[EH_RETURN_DATA_REGNO (i)] = 1;
7bcebb25
AK
9671
9672 /* For nonlocal gotos all call-saved registers have to be saved.
9673 This flag is also set for the unwinding code in libgcc.
9674 See expand_builtin_unwind_init. For regs_ever_live this is done by
9675 reload. */
6455a49e 9676 if (crtl->saves_all_registers)
5a3fe9b6 9677 for (i = 0; i < 32; i++)
7bcebb25
AK
9678 if (!call_really_used_regs[i])
9679 regs_ever_clobbered[i] = 1;
9680
11cd3bed 9681 FOR_EACH_BB_FN (cur_bb, cfun)
7bcebb25
AK
9682 {
9683 FOR_BB_INSNS (cur_bb, cur_insn)
9684 {
6455a49e
AK
9685 rtx pat;
9686
9687 if (!INSN_P (cur_insn))
9688 continue;
9689
9690 pat = PATTERN (cur_insn);
9691
9692 /* Ignore GPR restore insns. */
9693 if (epilogue_completed && RTX_FRAME_RELATED_P (cur_insn))
9694 {
9695 if (GET_CODE (pat) == SET
9696 && GENERAL_REG_P (SET_DEST (pat)))
9697 {
9698 /* lgdr */
9699 if (GET_MODE (SET_SRC (pat)) == DImode
9700 && FP_REG_P (SET_SRC (pat)))
9701 continue;
9702
9703 /* l / lg */
9704 if (GET_CODE (SET_SRC (pat)) == MEM)
9705 continue;
9706 }
9707
9708 /* lm / lmg */
9709 if (GET_CODE (pat) == PARALLEL
9710 && load_multiple_operation (pat, VOIDmode))
9711 continue;
9712 }
9713
9714 note_stores (pat,
9715 s390_reg_clobbered_rtx,
9716 regs_ever_clobbered);
7bcebb25
AK
9717 }
9718 }
9719}
9720
f4aa3848
AK
9721/* Determine the frame area which actually has to be accessed
9722 in the function epilogue. The values are stored at the
ab96de7e 9723 given pointers AREA_BOTTOM (address of the lowest used stack
f4aa3848 9724 address) and AREA_TOP (address of the first item which does
ab96de7e 9725 not belong to the stack frame). */
545d16ff 9726
ab96de7e
AS
9727static void
9728s390_frame_area (int *area_bottom, int *area_top)
9729{
9730 int b, t;
545d16ff 9731
ab96de7e
AS
9732 b = INT_MAX;
9733 t = INT_MIN;
adf39f8f
AK
9734
9735 if (cfun_frame_layout.first_restore_gpr != -1)
9736 {
9737 b = (cfun_frame_layout.gprs_offset
9602b6a1 9738 + cfun_frame_layout.first_restore_gpr * UNITS_PER_LONG);
adf39f8f 9739 t = b + (cfun_frame_layout.last_restore_gpr
9602b6a1 9740 - cfun_frame_layout.first_restore_gpr + 1) * UNITS_PER_LONG;
adf39f8f
AK
9741 }
9742
9743 if (TARGET_64BIT && cfun_save_high_fprs_p)
9744 {
9745 b = MIN (b, cfun_frame_layout.f8_offset);
9746 t = MAX (t, (cfun_frame_layout.f8_offset
9747 + cfun_frame_layout.high_fprs * 8));
9748 }
9749
9750 if (!TARGET_64BIT)
b89b22fc 9751 {
2cf4c39e 9752 if (cfun_fpr_save_p (FPR4_REGNUM))
adf39f8f 9753 {
b89b22fc
AK
9754 b = MIN (b, cfun_frame_layout.f4_offset);
9755 t = MAX (t, cfun_frame_layout.f4_offset + 8);
adf39f8f 9756 }
2cf4c39e 9757 if (cfun_fpr_save_p (FPR6_REGNUM))
b89b22fc
AK
9758 {
9759 b = MIN (b, cfun_frame_layout.f4_offset + 8);
9760 t = MAX (t, cfun_frame_layout.f4_offset + 16);
9761 }
9762 }
adf39f8f
AK
9763 *area_bottom = b;
9764 *area_top = t;
9765}
6455a49e
AK
9766/* Update gpr_save_slots in the frame layout trying to make use of
9767 FPRs as GPR save slots.
9768 This is a helper routine of s390_register_info. */
4023fb28
UW
9769
9770static void
6455a49e 9771s390_register_info_gprtofpr ()
4023fb28 9772{
6455a49e 9773 int save_reg_slot = FPR0_REGNUM;
4023fb28 9774 int i, j;
4023fb28 9775
6455a49e
AK
9776 if (!TARGET_Z10 || !TARGET_HARD_FLOAT || !crtl->is_leaf)
9777 return;
7bcebb25 9778
d0ee17a5
DV
9779 /* builtin_eh_return needs to be able to modify the return address
9780 on the stack. It could also adjust the FPR save slot instead but
9781 is it worth the trouble?! */
9782 if (crtl->calls_eh_return)
9783 return;
9784
6455a49e 9785 for (i = 15; i >= 6; i--)
5a3fe9b6 9786 {
82379bdf 9787 if (cfun_gpr_save_slot (i) == SAVE_SLOT_NONE)
6455a49e 9788 continue;
36c0bd4f 9789
6455a49e
AK
9790 /* Advance to the next FP register which can be used as a
9791 GPR save slot. */
9792 while ((!call_really_used_regs[save_reg_slot]
9793 || df_regs_ever_live_p (save_reg_slot)
9794 || cfun_fpr_save_p (save_reg_slot))
9795 && FP_REGNO_P (save_reg_slot))
9796 save_reg_slot++;
9797 if (!FP_REGNO_P (save_reg_slot))
9798 {
9799 /* We only want to use ldgr/lgdr if we can get rid of
9800 stm/lm entirely. So undo the gpr slot allocation in
9801 case we ran out of FPR save slots. */
9802 for (j = 6; j <= 15; j++)
9803 if (FP_REGNO_P (cfun_gpr_save_slot (j)))
82379bdf 9804 cfun_gpr_save_slot (j) = SAVE_SLOT_STACK;
6455a49e 9805 break;
36c0bd4f 9806 }
6455a49e 9807 cfun_gpr_save_slot (i) = save_reg_slot++;
5a3fe9b6 9808 }
6455a49e 9809}
5a3fe9b6 9810
6455a49e
AK
9811/* Set the bits in fpr_bitmap for FPRs which need to be saved due to
9812 stdarg.
9813 This is a helper routine for s390_register_info. */
7bcebb25 9814
6455a49e
AK
9815static void
9816s390_register_info_stdarg_fpr ()
9817{
9818 int i;
9819 int min_fpr;
9820 int max_fpr;
9821
9822 /* Save the FP argument regs for stdarg. f0, f2 for 31 bit and
9823 f0-f4 for 64 bit. */
9824 if (!cfun->stdarg
9825 || !TARGET_HARD_FLOAT
9826 || !cfun->va_list_fpr_size
9827 || crtl->args.info.fprs >= FP_ARG_NUM_REG)
9828 return;
9829
9830 min_fpr = crtl->args.info.fprs;
82379bdf
AK
9831 max_fpr = min_fpr + cfun->va_list_fpr_size - 1;
9832 if (max_fpr >= FP_ARG_NUM_REG)
9833 max_fpr = FP_ARG_NUM_REG - 1;
6455a49e 9834
82379bdf
AK
9835 /* FPR argument regs start at f0. */
9836 min_fpr += FPR0_REGNUM;
9837 max_fpr += FPR0_REGNUM;
9838
9839 for (i = min_fpr; i <= max_fpr; i++)
9840 cfun_set_fpr_save (i);
6455a49e
AK
9841}
9842
9843/* Reserve the GPR save slots for GPRs which need to be saved due to
9844 stdarg.
9845 This is a helper routine for s390_register_info. */
9846
9847static void
9848s390_register_info_stdarg_gpr ()
9849{
9850 int i;
9851 int min_gpr;
9852 int max_gpr;
9853
9854 if (!cfun->stdarg
9855 || !cfun->va_list_gpr_size
9856 || crtl->args.info.gprs >= GP_ARG_NUM_REG)
9857 return;
9858
9859 min_gpr = crtl->args.info.gprs;
82379bdf
AK
9860 max_gpr = min_gpr + cfun->va_list_gpr_size - 1;
9861 if (max_gpr >= GP_ARG_NUM_REG)
9862 max_gpr = GP_ARG_NUM_REG - 1;
9863
9864 /* GPR argument regs start at r2. */
9865 min_gpr += GPR2_REGNUM;
9866 max_gpr += GPR2_REGNUM;
9867
9868 /* If r6 was supposed to be saved into an FPR and now needs to go to
9869 the stack for vararg we have to adjust the restore range to make
9870 sure that the restore is done from stack as well. */
9871 if (FP_REGNO_P (cfun_gpr_save_slot (GPR6_REGNUM))
9872 && min_gpr <= GPR6_REGNUM
9873 && max_gpr >= GPR6_REGNUM)
9874 {
9875 if (cfun_frame_layout.first_restore_gpr == -1
9876 || cfun_frame_layout.first_restore_gpr > GPR6_REGNUM)
9877 cfun_frame_layout.first_restore_gpr = GPR6_REGNUM;
9878 if (cfun_frame_layout.last_restore_gpr == -1
9879 || cfun_frame_layout.last_restore_gpr < GPR6_REGNUM)
9880 cfun_frame_layout.last_restore_gpr = GPR6_REGNUM;
9881 }
9882
9883 if (cfun_frame_layout.first_save_gpr == -1
9884 || cfun_frame_layout.first_save_gpr > min_gpr)
9885 cfun_frame_layout.first_save_gpr = min_gpr;
9886
9887 if (cfun_frame_layout.last_save_gpr == -1
9888 || cfun_frame_layout.last_save_gpr < max_gpr)
9889 cfun_frame_layout.last_save_gpr = max_gpr;
9890
9891 for (i = min_gpr; i <= max_gpr; i++)
9892 cfun_gpr_save_slot (i) = SAVE_SLOT_STACK;
9893}
9894
9895/* Calculate the save and restore ranges for stm(g) and lm(g) in the
9896 prologue and epilogue. */
6455a49e 9897
82379bdf
AK
9898static void
9899s390_register_info_set_ranges ()
9900{
9901 int i, j;
9902
9903 /* Find the first and the last save slot supposed to use the stack
9904 to set the restore range.
9905 Vararg regs might be marked as save to stack but only the
9906 call-saved regs really need restoring (i.e. r6). This code
9907 assumes that the vararg regs have not yet been recorded in
9908 cfun_gpr_save_slot. */
9909 for (i = 0; i < 16 && cfun_gpr_save_slot (i) != SAVE_SLOT_STACK; i++);
9910 for (j = 15; j > i && cfun_gpr_save_slot (j) != SAVE_SLOT_STACK; j--);
9911 cfun_frame_layout.first_restore_gpr = (i == 16) ? -1 : i;
9912 cfun_frame_layout.last_restore_gpr = (i == 16) ? -1 : j;
82379bdf
AK
9913 cfun_frame_layout.first_save_gpr = (i == 16) ? -1 : i;
9914 cfun_frame_layout.last_save_gpr = (i == 16) ? -1 : j;
6455a49e
AK
9915}
9916
9917/* The GPR and FPR save slots in cfun->machine->frame_layout are set
9918 for registers which need to be saved in function prologue.
9919 This function can be used until the insns emitted for save/restore
9920 of the regs are visible in the RTL stream. */
9921
9922static void
9923s390_register_info ()
9924{
82379bdf 9925 int i;
6455a49e
AK
9926 char clobbered_regs[32];
9927
9928 gcc_assert (!epilogue_completed);
9929
9930 if (reload_completed)
9931 /* After reload we rely on our own routine to determine which
9932 registers need saving. */
9933 s390_regs_ever_clobbered (clobbered_regs);
9934 else
9935 /* During reload we use regs_ever_live as a base since reload
9936 does changes in there which we otherwise would not be aware
9937 of. */
9938 for (i = 0; i < 32; i++)
9939 clobbered_regs[i] = df_regs_ever_live_p (i);
9940
9941 for (i = 0; i < 32; i++)
9942 clobbered_regs[i] = clobbered_regs[i] && !global_regs[i];
9943
9944 /* Mark the call-saved FPRs which need to be saved.
9945 This needs to be done before checking the special GPRs since the
9946 stack pointer usage depends on whether high FPRs have to be saved
9947 or not. */
9948 cfun_frame_layout.fpr_bitmap = 0;
9949 cfun_frame_layout.high_fprs = 0;
9950 for (i = FPR0_REGNUM; i <= FPR15_REGNUM; i++)
9951 if (clobbered_regs[i] && !call_really_used_regs[i])
9952 {
9953 cfun_set_fpr_save (i);
9954 if (i >= FPR8_REGNUM)
9955 cfun_frame_layout.high_fprs++;
9956 }
c3cc6b78 9957
4cb4721f
MK
9958 /* Register 12 is used for GOT address, but also as temp in prologue
9959 for split-stack stdarg functions (unless r14 is available). */
9960 clobbered_regs[12]
9961 |= ((flag_pic && df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM))
9962 || (flag_split_stack && cfun->stdarg
9963 && (crtl->is_leaf || TARGET_TPF_PROFILING
9964 || has_hard_reg_initial_val (Pmode, RETURN_REGNUM))));
91086990 9965
f4aa3848 9966 clobbered_regs[BASE_REGNUM]
e2df5c1d 9967 |= (cfun->machine->base_reg
6455a49e 9968 && REGNO (cfun->machine->base_reg) == BASE_REGNUM);
91086990 9969
6455a49e
AK
9970 clobbered_regs[HARD_FRAME_POINTER_REGNUM]
9971 |= !!frame_pointer_needed;
9972
9973 /* On pre z900 machines this might take until machine dependent
9974 reorg to decide.
9975 save_return_addr_p will only be set on non-zarch machines so
9976 there is no risk that r14 goes into an FPR instead of a stack
9977 slot. */
7bcebb25 9978 clobbered_regs[RETURN_REGNUM]
416ff32e 9979 |= (!crtl->is_leaf
dc4477f5 9980 || TARGET_TPF_PROFILING
e2df5c1d
UW
9981 || cfun->machine->split_branches_pending_p
9982 || cfun_frame_layout.save_return_addr_p
6455a49e 9983 || crtl->calls_eh_return);
91086990 9984
7bcebb25 9985 clobbered_regs[STACK_POINTER_REGNUM]
416ff32e 9986 |= (!crtl->is_leaf
e2df5c1d
UW
9987 || TARGET_TPF_PROFILING
9988 || cfun_save_high_fprs_p
9989 || get_frame_size () > 0
36c0bd4f 9990 || (reload_completed && cfun_frame_layout.frame_size > 0)
6455a49e
AK
9991 || cfun->calls_alloca);
9992
82379bdf 9993 memset (cfun_frame_layout.gpr_save_slots, SAVE_SLOT_NONE, 16);
7bcebb25 9994
b767fc11 9995 for (i = 6; i < 16; i++)
6455a49e 9996 if (clobbered_regs[i])
82379bdf 9997 cfun_gpr_save_slot (i) = SAVE_SLOT_STACK;
c3cc6b78 9998
6455a49e
AK
9999 s390_register_info_stdarg_fpr ();
10000 s390_register_info_gprtofpr ();
82379bdf 10001 s390_register_info_set_ranges ();
6455a49e 10002 /* stdarg functions might need to save GPRs 2 to 6. This might
82379bdf
AK
10003 override the GPR->FPR save decision made by
10004 s390_register_info_gprtofpr for r6 since vararg regs must go to
10005 the stack. */
6455a49e 10006 s390_register_info_stdarg_gpr ();
6455a49e 10007}
c3cc6b78 10008
6455a49e
AK
10009/* This function is called by s390_optimize_prologue in order to get
10010 rid of unnecessary GPR save/restore instructions. The register info
10011 for the GPRs is re-computed and the ranges are re-calculated. */
29a79fcf 10012
6455a49e
AK
10013static void
10014s390_optimize_register_info ()
10015{
10016 char clobbered_regs[32];
82379bdf 10017 int i;
29a79fcf 10018
6455a49e
AK
10019 gcc_assert (epilogue_completed);
10020 gcc_assert (!cfun->machine->split_branches_pending_p);
b767fc11 10021
6455a49e 10022 s390_regs_ever_clobbered (clobbered_regs);
29a79fcf 10023
6455a49e
AK
10024 for (i = 0; i < 32; i++)
10025 clobbered_regs[i] = clobbered_regs[i] && !global_regs[i];
29a79fcf 10026
6455a49e
AK
10027 /* There is still special treatment needed for cases invisible to
10028 s390_regs_ever_clobbered. */
10029 clobbered_regs[RETURN_REGNUM]
10030 |= (TARGET_TPF_PROFILING
10031 /* When expanding builtin_return_addr in ESA mode we do not
10032 know whether r14 will later be needed as scratch reg when
10033 doing branch splitting. So the builtin always accesses the
10034 r14 save slot and we need to stick to the save/restore
10035 decision for r14 even if it turns out that it didn't get
10036 clobbered. */
10037 || cfun_frame_layout.save_return_addr_p
10038 || crtl->calls_eh_return);
10039
82379bdf 10040 memset (cfun_frame_layout.gpr_save_slots, SAVE_SLOT_NONE, 6);
6455a49e
AK
10041
10042 for (i = 6; i < 16; i++)
10043 if (!clobbered_regs[i])
82379bdf 10044 cfun_gpr_save_slot (i) = SAVE_SLOT_NONE;
6455a49e 10045
82379bdf 10046 s390_register_info_set_ranges ();
6455a49e 10047 s390_register_info_stdarg_gpr ();
adf39f8f
AK
10048}
10049
91086990 10050/* Fill cfun->machine with info about frame of current function. */
adf39f8f
AK
10051
10052static void
91086990 10053s390_frame_info (void)
adf39f8f 10054{
74129172 10055 HOST_WIDE_INT lowest_offset;
adf39f8f 10056
6455a49e
AK
10057 cfun_frame_layout.first_save_gpr_slot = cfun_frame_layout.first_save_gpr;
10058 cfun_frame_layout.last_save_gpr_slot = cfun_frame_layout.last_save_gpr;
10059
10060 /* The va_arg builtin uses a constant distance of 16 *
10061 UNITS_PER_LONG (r0-r15) to reach the FPRs from the reg_save_area
10062 pointer. So even if we are going to save the stack pointer in an
10063 FPR we need the stack space in order to keep the offsets
10064 correct. */
10065 if (cfun->stdarg && cfun_save_arg_fprs_p)
10066 {
10067 cfun_frame_layout.last_save_gpr_slot = STACK_POINTER_REGNUM;
10068
10069 if (cfun_frame_layout.first_save_gpr_slot == -1)
10070 cfun_frame_layout.first_save_gpr_slot = STACK_POINTER_REGNUM;
10071 }
10072
adf39f8f 10073 cfun_frame_layout.frame_size = get_frame_size ();
adf39f8f 10074 if (!TARGET_64BIT && cfun_frame_layout.frame_size > 0x7fff0000)
40fecdd6
JM
10075 fatal_error (input_location,
10076 "total size of local variables exceeds architecture limit");
f4aa3848 10077
b3d31392 10078 if (!TARGET_PACKED_STACK)
adf39f8f 10079 {
74129172 10080 /* Fixed stack layout. */
adf39f8f 10081 cfun_frame_layout.backchain_offset = 0;
9602b6a1 10082 cfun_frame_layout.f0_offset = 16 * UNITS_PER_LONG;
adf39f8f
AK
10083 cfun_frame_layout.f4_offset = cfun_frame_layout.f0_offset + 2 * 8;
10084 cfun_frame_layout.f8_offset = -cfun_frame_layout.high_fprs * 8;
fb3712f6 10085 cfun_frame_layout.gprs_offset = (cfun_frame_layout.first_save_gpr_slot
9602b6a1 10086 * UNITS_PER_LONG);
adf39f8f 10087 }
74129172 10088 else if (TARGET_BACKCHAIN)
adf39f8f 10089 {
74129172
AK
10090 /* Kernel stack layout - packed stack, backchain, no float */
10091 gcc_assert (TARGET_SOFT_FLOAT);
adf39f8f 10092 cfun_frame_layout.backchain_offset = (STACK_POINTER_OFFSET
9602b6a1 10093 - UNITS_PER_LONG);
74129172
AK
10094
10095 /* The distance between the backchain and the return address
10096 save slot must not change. So we always need a slot for the
10097 stack pointer which resides in between. */
10098 cfun_frame_layout.last_save_gpr_slot = STACK_POINTER_REGNUM;
10099
f4aa3848 10100 cfun_frame_layout.gprs_offset
74129172 10101 = cfun_frame_layout.backchain_offset - cfun_gprs_save_area_size;
f4aa3848 10102
74129172
AK
10103 /* FPRs will not be saved. Nevertheless pick sane values to
10104 keep area calculations valid. */
10105 cfun_frame_layout.f0_offset =
10106 cfun_frame_layout.f4_offset =
10107 cfun_frame_layout.f8_offset = cfun_frame_layout.gprs_offset;
adf39f8f 10108 }
74129172 10109 else
adf39f8f 10110 {
e179df83
AK
10111 int num_fprs;
10112
74129172 10113 /* Packed stack layout without backchain. */
f4aa3848 10114
e179df83
AK
10115 /* With stdarg FPRs need their dedicated slots. */
10116 num_fprs = (TARGET_64BIT && cfun->stdarg ? 2
10117 : (cfun_fpr_save_p (FPR4_REGNUM) +
10118 cfun_fpr_save_p (FPR6_REGNUM)));
10119 cfun_frame_layout.f4_offset = STACK_POINTER_OFFSET - 8 * num_fprs;
10120
10121 num_fprs = (cfun->stdarg ? 2
10122 : (cfun_fpr_save_p (FPR0_REGNUM)
10123 + cfun_fpr_save_p (FPR2_REGNUM)));
10124 cfun_frame_layout.f0_offset = cfun_frame_layout.f4_offset - 8 * num_fprs;
f4aa3848
AK
10125
10126 cfun_frame_layout.gprs_offset
adf39f8f 10127 = cfun_frame_layout.f0_offset - cfun_gprs_save_area_size;
74129172
AK
10128
10129 cfun_frame_layout.f8_offset = (cfun_frame_layout.gprs_offset
10130 - cfun_frame_layout.high_fprs * 8);
adf39f8f
AK
10131 }
10132
74129172
AK
10133 if (cfun_save_high_fprs_p)
10134 cfun_frame_layout.frame_size += cfun_frame_layout.high_fprs * 8;
10135
10136 if (!crtl->is_leaf)
10137 cfun_frame_layout.frame_size += crtl->outgoing_args_size;
10138
10139 /* In the following cases we have to allocate a STACK_POINTER_OFFSET
10140 sized area at the bottom of the stack. This is required also for
10141 leaf functions. When GCC generates a local stack reference it
10142 will always add STACK_POINTER_OFFSET to all these references. */
416ff32e 10143 if (crtl->is_leaf
adf39f8f
AK
10144 && !TARGET_TPF_PROFILING
10145 && cfun_frame_layout.frame_size == 0
6455a49e 10146 && !cfun->calls_alloca)
adf39f8f
AK
10147 return;
10148
74129172
AK
10149 /* Calculate the number of bytes we have used in our own register
10150 save area. With the packed stack layout we can re-use the
10151 remaining bytes for normal stack elements. */
adf39f8f 10152
74129172
AK
10153 if (TARGET_PACKED_STACK)
10154 lowest_offset = MIN (MIN (cfun_frame_layout.f0_offset,
10155 cfun_frame_layout.f4_offset),
10156 cfun_frame_layout.gprs_offset);
10157 else
10158 lowest_offset = 0;
f4aa3848 10159
74129172
AK
10160 if (TARGET_BACKCHAIN)
10161 lowest_offset = MIN (lowest_offset, cfun_frame_layout.backchain_offset);
f4aa3848 10162
74129172 10163 cfun_frame_layout.frame_size += STACK_POINTER_OFFSET - lowest_offset;
adf39f8f 10164
74129172
AK
10165 /* If under 31 bit an odd number of gprs has to be saved we have to
10166 adjust the frame size to sustain 8 byte alignment of stack
10167 frames. */
10168 cfun_frame_layout.frame_size = ((cfun_frame_layout.frame_size +
10169 STACK_BOUNDARY / BITS_PER_UNIT - 1)
10170 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1));
4023fb28
UW
10171}
10172
91086990
UW
10173/* Generate frame layout. Fills in register and frame data for the current
10174 function in cfun->machine. This routine can be called multiple times;
10175 it will re-do the complete frame layout every time. */
4023fb28 10176
91086990
UW
10177static void
10178s390_init_frame_layout (void)
9db1d521 10179{
91086990
UW
10180 HOST_WIDE_INT frame_size;
10181 int base_used;
6455a49e 10182
9158295f
AK
10183 /* After LRA the frame layout is supposed to be read-only and should
10184 not be re-computed. */
10185 if (reload_completed)
10186 return;
b767fc11 10187
91086990
UW
10188 /* On S/390 machines, we may need to perform branch splitting, which
10189 will require both base and return address register. We have no
10190 choice but to assume we're going to need them until right at the
10191 end of the machine dependent reorg phase. */
10192 if (!TARGET_CPU_ZARCH)
10193 cfun->machine->split_branches_pending_p = true;
10194
10195 do
10196 {
10197 frame_size = cfun_frame_layout.frame_size;
10198
10199 /* Try to predict whether we'll need the base register. */
10200 base_used = cfun->machine->split_branches_pending_p
e3b5732b 10201 || crtl->uses_const_pool
20f04e65
AK
10202 || (!DISP_IN_RANGE (frame_size)
10203 && !CONST_OK_FOR_K (frame_size));
91086990
UW
10204
10205 /* Decide which register to use as literal pool base. In small
10206 leaf functions, try to use an unused call-clobbered register
10207 as base register to avoid save/restore overhead. */
10208 if (!base_used)
10209 cfun->machine->base_reg = NULL_RTX;
91086990 10210 else
10148d74
DV
10211 {
10212 int br = 0;
10213
10214 if (crtl->is_leaf)
10215 /* Prefer r5 (most likely to be free). */
10216 for (br = 5; br >= 2 && df_regs_ever_live_p (br); br--)
10217 ;
10218 cfun->machine->base_reg =
0b729f58 10219 gen_rtx_REG (Pmode, (br >= 2) ? br : BASE_REGNUM);
10148d74 10220 }
adf39f8f 10221
6455a49e 10222 s390_register_info ();
91086990
UW
10223 s390_frame_info ();
10224 }
10225 while (frame_size != cfun_frame_layout.frame_size);
9db1d521
HP
10226}
10227
5a3fe9b6
AK
10228/* Remove the FPR clobbers from a tbegin insn if it can be proven that
10229 the TX is nonescaping. A transaction is considered escaping if
10230 there is at least one path from tbegin returning CC0 to the
10231 function exit block without an tend.
10232
10233 The check so far has some limitations:
10234 - only single tbegin/tend BBs are supported
10235 - the first cond jump after tbegin must separate the CC0 path from ~CC0
10236 - when CC is copied to a GPR and the CC0 check is done with the GPR
10237 this is not supported
10238*/
10239
10240static void
10241s390_optimize_nonescaping_tx (void)
10242{
10243 const unsigned int CC0 = 1 << 3;
10244 basic_block tbegin_bb = NULL;
10245 basic_block tend_bb = NULL;
10246 basic_block bb;
775c43d3 10247 rtx_insn *insn;
5a3fe9b6
AK
10248 bool result = true;
10249 int bb_index;
775c43d3 10250 rtx_insn *tbegin_insn = NULL;
5a3fe9b6
AK
10251
10252 if (!cfun->machine->tbegin_p)
10253 return;
10254
0cae8d31 10255 for (bb_index = 0; bb_index < n_basic_blocks_for_fn (cfun); bb_index++)
5a3fe9b6 10256 {
06e28de2 10257 bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
5a3fe9b6 10258
2561451d
AK
10259 if (!bb)
10260 continue;
10261
5a3fe9b6
AK
10262 FOR_BB_INSNS (bb, insn)
10263 {
10264 rtx ite, cc, pat, target;
10265 unsigned HOST_WIDE_INT mask;
10266
10267 if (!INSN_P (insn) || INSN_CODE (insn) <= 0)
10268 continue;
10269
10270 pat = PATTERN (insn);
10271
10272 if (GET_CODE (pat) == PARALLEL)
10273 pat = XVECEXP (pat, 0, 0);
10274
10275 if (GET_CODE (pat) != SET
10276 || GET_CODE (SET_SRC (pat)) != UNSPEC_VOLATILE)
10277 continue;
10278
10279 if (XINT (SET_SRC (pat), 1) == UNSPECV_TBEGIN)
10280 {
b32d5189 10281 rtx_insn *tmp;
5a3fe9b6
AK
10282
10283 tbegin_insn = insn;
10284
10285 /* Just return if the tbegin doesn't have clobbers. */
10286 if (GET_CODE (PATTERN (insn)) != PARALLEL)
10287 return;
10288
10289 if (tbegin_bb != NULL)
10290 return;
10291
10292 /* Find the next conditional jump. */
10293 for (tmp = NEXT_INSN (insn);
10294 tmp != NULL_RTX;
10295 tmp = NEXT_INSN (tmp))
10296 {
10297 if (reg_set_p (gen_rtx_REG (CCmode, CC_REGNUM), tmp))
10298 return;
10299 if (!JUMP_P (tmp))
10300 continue;
10301
10302 ite = SET_SRC (PATTERN (tmp));
10303 if (GET_CODE (ite) != IF_THEN_ELSE)
10304 continue;
10305
10306 cc = XEXP (XEXP (ite, 0), 0);
10307 if (!REG_P (cc) || !CC_REGNO_P (REGNO (cc))
10308 || GET_MODE (cc) != CCRAWmode
10309 || GET_CODE (XEXP (XEXP (ite, 0), 1)) != CONST_INT)
10310 return;
10311
10312 if (bb->succs->length () != 2)
10313 return;
10314
10315 mask = INTVAL (XEXP (XEXP (ite, 0), 1));
10316 if (GET_CODE (XEXP (ite, 0)) == NE)
10317 mask ^= 0xf;
10318
10319 if (mask == CC0)
10320 target = XEXP (ite, 1);
10321 else if (mask == (CC0 ^ 0xf))
10322 target = XEXP (ite, 2);
10323 else
10324 return;
10325
10326 {
10327 edge_iterator ei;
10328 edge e1, e2;
10329
10330 ei = ei_start (bb->succs);
10331 e1 = ei_safe_edge (ei);
10332 ei_next (&ei);
10333 e2 = ei_safe_edge (ei);
10334
10335 if (e2->flags & EDGE_FALLTHRU)
10336 {
10337 e2 = e1;
10338 e1 = ei_safe_edge (ei);
10339 }
10340
10341 if (!(e1->flags & EDGE_FALLTHRU))
10342 return;
10343
10344 tbegin_bb = (target == pc_rtx) ? e1->dest : e2->dest;
10345 }
10346 if (tmp == BB_END (bb))
10347 break;
10348 }
10349 }
10350
10351 if (XINT (SET_SRC (pat), 1) == UNSPECV_TEND)
10352 {
10353 if (tend_bb != NULL)
10354 return;
10355 tend_bb = bb;
10356 }
10357 }
10358 }
10359
10360 /* Either we successfully remove the FPR clobbers here or we are not
10361 able to do anything for this TX. Both cases don't qualify for
10362 another look. */
10363 cfun->machine->tbegin_p = false;
10364
10365 if (tbegin_bb == NULL || tend_bb == NULL)
10366 return;
10367
10368 calculate_dominance_info (CDI_POST_DOMINATORS);
10369 result = dominated_by_p (CDI_POST_DOMINATORS, tbegin_bb, tend_bb);
10370 free_dominance_info (CDI_POST_DOMINATORS);
10371
10372 if (!result)
10373 return;
10374
2561451d
AK
10375 PATTERN (tbegin_insn) = gen_rtx_PARALLEL (VOIDmode,
10376 gen_rtvec (2,
10377 XVECEXP (PATTERN (tbegin_insn), 0, 0),
10378 XVECEXP (PATTERN (tbegin_insn), 0, 1)));
5a3fe9b6
AK
10379 INSN_CODE (tbegin_insn) = -1;
10380 df_insn_rescan (tbegin_insn);
10381
10382 return;
10383}
10384
74aa8b4b
AK
10385/* Return true if it is legal to put a value with MODE into REGNO. */
10386
10387bool
ef4bddc2 10388s390_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
74aa8b4b 10389{
085261c8
AK
10390 if (!TARGET_VX && VECTOR_NOFP_REGNO_P (regno))
10391 return false;
10392
74aa8b4b
AK
10393 switch (REGNO_REG_CLASS (regno))
10394 {
085261c8
AK
10395 case VEC_REGS:
10396 return ((GET_MODE_CLASS (mode) == MODE_INT
10397 && s390_class_max_nregs (VEC_REGS, mode) == 1)
10398 || mode == DFmode
2de2b3f9 10399 || (TARGET_VXE && mode == SFmode)
085261c8
AK
10400 || s390_vector_mode_supported_p (mode));
10401 break;
74aa8b4b 10402 case FP_REGS:
085261c8
AK
10403 if (TARGET_VX
10404 && ((GET_MODE_CLASS (mode) == MODE_INT
10405 && s390_class_max_nregs (FP_REGS, mode) == 1)
10406 || mode == DFmode
10407 || s390_vector_mode_supported_p (mode)))
10408 return true;
10409
74aa8b4b
AK
10410 if (REGNO_PAIR_OK (regno, mode))
10411 {
10412 if (mode == SImode || mode == DImode)
10413 return true;
10414
10415 if (FLOAT_MODE_P (mode) && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
10416 return true;
10417 }
10418 break;
10419 case ADDR_REGS:
10420 if (FRAME_REGNO_P (regno) && mode == Pmode)
10421 return true;
10422
10423 /* fallthrough */
10424 case GENERAL_REGS:
10425 if (REGNO_PAIR_OK (regno, mode))
10426 {
9602b6a1 10427 if (TARGET_ZARCH
4dc19cc0 10428 || (mode != TFmode && mode != TCmode && mode != TDmode))
74aa8b4b 10429 return true;
f4aa3848 10430 }
74aa8b4b
AK
10431 break;
10432 case CC_REGS:
10433 if (GET_MODE_CLASS (mode) == MODE_CC)
10434 return true;
10435 break;
10436 case ACCESS_REGS:
10437 if (REGNO_PAIR_OK (regno, mode))
10438 {
10439 if (mode == SImode || mode == Pmode)
10440 return true;
10441 }
10442 break;
10443 default:
10444 return false;
10445 }
f4aa3848 10446
74aa8b4b
AK
10447 return false;
10448}
10449
7633f08e
UW
10450/* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
10451
10452bool
10453s390_hard_regno_rename_ok (unsigned int old_reg, unsigned int new_reg)
10454{
10455 /* Once we've decided upon a register to use as base register, it must
10456 no longer be used for any other purpose. */
10457 if (cfun->machine->base_reg)
10458 if (REGNO (cfun->machine->base_reg) == old_reg
10459 || REGNO (cfun->machine->base_reg) == new_reg)
10460 return false;
10461
6455a49e
AK
10462 /* Prevent regrename from using call-saved regs which haven't
10463 actually been saved. This is necessary since regrename assumes
10464 the backend save/restore decisions are based on
10465 df_regs_ever_live. Since we have our own routine we have to tell
10466 regrename manually about it. */
10467 if (GENERAL_REGNO_P (new_reg)
10468 && !call_really_used_regs[new_reg]
82379bdf 10469 && cfun_gpr_save_slot (new_reg) == SAVE_SLOT_NONE)
6455a49e
AK
10470 return false;
10471
10472 return true;
10473}
10474
10475/* Return nonzero if register REGNO can be used as a scratch register
10476 in peephole2. */
10477
10478static bool
10479s390_hard_regno_scratch_ok (unsigned int regno)
10480{
10481 /* See s390_hard_regno_rename_ok. */
10482 if (GENERAL_REGNO_P (regno)
10483 && !call_really_used_regs[regno]
82379bdf 10484 && cfun_gpr_save_slot (regno) == SAVE_SLOT_NONE)
6455a49e
AK
10485 return false;
10486
7633f08e
UW
10487 return true;
10488}
10489
74aa8b4b 10490/* Maximum number of registers to represent a value of mode MODE
0a2aaacc 10491 in a register of class RCLASS. */
74aa8b4b 10492
c4100a35 10493int
ef4bddc2 10494s390_class_max_nregs (enum reg_class rclass, machine_mode mode)
74aa8b4b 10495{
085261c8
AK
10496 int reg_size;
10497 bool reg_pair_required_p = false;
10498
0a2aaacc 10499 switch (rclass)
74aa8b4b
AK
10500 {
10501 case FP_REGS:
085261c8
AK
10502 case VEC_REGS:
10503 reg_size = TARGET_VX ? 16 : 8;
10504
10505 /* TF and TD modes would fit into a VR but we put them into a
10506 register pair since we do not have 128bit FP instructions on
10507 full VRs. */
10508 if (TARGET_VX
10509 && SCALAR_FLOAT_MODE_P (mode)
10510 && GET_MODE_SIZE (mode) >= 16)
10511 reg_pair_required_p = true;
10512
10513 /* Even if complex types would fit into a single FPR/VR we force
10514 them into a register pair to deal with the parts more easily.
10515 (FIXME: What about complex ints?) */
74aa8b4b 10516 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
085261c8
AK
10517 reg_pair_required_p = true;
10518 break;
74aa8b4b 10519 case ACCESS_REGS:
085261c8
AK
10520 reg_size = 4;
10521 break;
74aa8b4b 10522 default:
085261c8 10523 reg_size = UNITS_PER_WORD;
74aa8b4b
AK
10524 break;
10525 }
085261c8
AK
10526
10527 if (reg_pair_required_p)
10528 return 2 * ((GET_MODE_SIZE (mode) / 2 + reg_size - 1) / reg_size);
10529
10530 return (GET_MODE_SIZE (mode) + reg_size - 1) / reg_size;
10531}
10532
10533/* Return TRUE if changing mode from FROM to TO should not be allowed
10534 for register class CLASS. */
10535
10536int
10537s390_cannot_change_mode_class (machine_mode from_mode,
10538 machine_mode to_mode,
10539 enum reg_class rclass)
10540{
10541 machine_mode small_mode;
10542 machine_mode big_mode;
10543
2de2b3f9
AK
10544 /* V1TF and TF have different representations in vector
10545 registers. */
10546 if (reg_classes_intersect_p (VEC_REGS, rclass)
10547 && ((from_mode == V1TFmode && to_mode == TFmode)
10548 || (from_mode == TFmode && to_mode == V1TFmode)))
10549 return 1;
10550
085261c8
AK
10551 if (GET_MODE_SIZE (from_mode) == GET_MODE_SIZE (to_mode))
10552 return 0;
10553
10554 if (GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
10555 {
10556 small_mode = from_mode;
10557 big_mode = to_mode;
10558 }
10559 else
10560 {
10561 small_mode = to_mode;
10562 big_mode = from_mode;
10563 }
10564
10565 /* Values residing in VRs are little-endian style. All modes are
10566 placed left-aligned in an VR. This means that we cannot allow
10567 switching between modes with differing sizes. Also if the vector
10568 facility is available we still place TFmode values in VR register
10569 pairs, since the only instructions we have operating on TFmodes
10570 only deal with register pairs. Therefore we have to allow DFmode
10571 subregs of TFmodes to enable the TFmode splitters. */
10572 if (reg_classes_intersect_p (VEC_REGS, rclass)
10573 && (GET_MODE_SIZE (small_mode) < 8
10574 || s390_class_max_nregs (VEC_REGS, big_mode) == 1))
10575 return 1;
10576
10577 /* Likewise for access registers, since they have only half the
10578 word size on 64-bit. */
10579 if (reg_classes_intersect_p (ACCESS_REGS, rclass))
10580 return 1;
10581
10582 return 0;
74aa8b4b
AK
10583}
10584
3597e113
VM
10585/* Return true if we use LRA instead of reload pass. */
10586static bool
10587s390_lra_p (void)
10588{
10589 return s390_lra_flag;
10590}
10591
91086990
UW
10592/* Return true if register FROM can be eliminated via register TO. */
10593
7b5cbb57
AS
10594static bool
10595s390_can_eliminate (const int from, const int to)
91086990 10596{
7633f08e
UW
10597 /* On zSeries machines, we have not marked the base register as fixed.
10598 Instead, we have an elimination rule BASE_REGNUM -> BASE_REGNUM.
10599 If a function requires the base register, we say here that this
10600 elimination cannot be performed. This will cause reload to free
10601 up the base register (as if it were fixed). On the other hand,
10602 if the current function does *not* require the base register, we
10603 say here the elimination succeeds, which in turn allows reload
10604 to allocate the base register for any other purpose. */
10605 if (from == BASE_REGNUM && to == BASE_REGNUM)
10606 {
10607 if (TARGET_CPU_ZARCH)
10608 {
10609 s390_init_frame_layout ();
10610 return cfun->machine->base_reg == NULL_RTX;
10611 }
10612
10613 return false;
10614 }
10615
10616 /* Everything else must point into the stack frame. */
91086990
UW
10617 gcc_assert (to == STACK_POINTER_REGNUM
10618 || to == HARD_FRAME_POINTER_REGNUM);
10619
10620 gcc_assert (from == FRAME_POINTER_REGNUM
10621 || from == ARG_POINTER_REGNUM
10622 || from == RETURN_ADDRESS_POINTER_REGNUM);
10623
10624 /* Make sure we actually saved the return address. */
10625 if (from == RETURN_ADDRESS_POINTER_REGNUM)
e3b5732b
JH
10626 if (!crtl->calls_eh_return
10627 && !cfun->stdarg
91086990
UW
10628 && !cfun_frame_layout.save_return_addr_p)
10629 return false;
10630
10631 return true;
10632}
10633
10634/* Return offset between register FROM and TO initially after prolog. */
a38e09bc
AK
10635
10636HOST_WIDE_INT
91086990 10637s390_initial_elimination_offset (int from, int to)
a38e09bc 10638{
91086990 10639 HOST_WIDE_INT offset;
a38e09bc 10640
91086990
UW
10641 /* ??? Why are we called for non-eliminable pairs? */
10642 if (!s390_can_eliminate (from, to))
10643 return 0;
10644
10645 switch (from)
10646 {
10647 case FRAME_POINTER_REGNUM:
f4aa3848 10648 offset = (get_frame_size()
63296cb1 10649 + STACK_POINTER_OFFSET
38173d38 10650 + crtl->outgoing_args_size);
91086990 10651 break;
adf39f8f 10652
91086990
UW
10653 case ARG_POINTER_REGNUM:
10654 s390_init_frame_layout ();
10655 offset = cfun_frame_layout.frame_size + STACK_POINTER_OFFSET;
10656 break;
10657
10658 case RETURN_ADDRESS_POINTER_REGNUM:
10659 s390_init_frame_layout ();
6455a49e
AK
10660
10661 if (cfun_frame_layout.first_save_gpr_slot == -1)
10662 {
10663 /* If it turns out that for stdarg nothing went into the reg
10664 save area we also do not need the return address
10665 pointer. */
10666 if (cfun->stdarg && !cfun_save_arg_fprs_p)
10667 return 0;
10668
10669 gcc_unreachable ();
10670 }
10671
10672 /* In order to make the following work it is not necessary for
10673 r14 to have a save slot. It is sufficient if one other GPR
10674 got one. Since the GPRs are always stored without gaps we
10675 are able to calculate where the r14 save slot would
10676 reside. */
10677 offset = (cfun_frame_layout.frame_size + cfun_frame_layout.gprs_offset +
10678 (RETURN_REGNUM - cfun_frame_layout.first_save_gpr_slot) *
10679 UNITS_PER_LONG);
91086990
UW
10680 break;
10681
7633f08e
UW
10682 case BASE_REGNUM:
10683 offset = 0;
10684 break;
10685
91086990
UW
10686 default:
10687 gcc_unreachable ();
10688 }
10689
10690 return offset;
a38e09bc
AK
10691}
10692
4023fb28 10693/* Emit insn to save fpr REGNUM at offset OFFSET relative
c7453384 10694 to register BASE. Return generated insn. */
994fe660 10695
9db1d521 10696static rtx
9c808aad 10697save_fpr (rtx base, int offset, int regnum)
9db1d521 10698{
4023fb28 10699 rtx addr;
0a81f074 10700 addr = gen_rtx_MEM (DFmode, plus_constant (Pmode, base, offset));
dcc9eb26
AK
10701
10702 if (regnum >= 16 && regnum <= (16 + FP_ARG_NUM_REG))
10703 set_mem_alias_set (addr, get_varargs_alias_set ());
10704 else
10705 set_mem_alias_set (addr, get_frame_alias_set ());
9db1d521 10706
4023fb28
UW
10707 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
10708}
9db1d521 10709
4023fb28 10710/* Emit insn to restore fpr REGNUM from offset OFFSET relative
c7453384 10711 to register BASE. Return generated insn. */
9db1d521 10712
4023fb28 10713static rtx
9c808aad 10714restore_fpr (rtx base, int offset, int regnum)
4023fb28
UW
10715{
10716 rtx addr;
0a81f074 10717 addr = gen_rtx_MEM (DFmode, plus_constant (Pmode, base, offset));
dcc9eb26 10718 set_mem_alias_set (addr, get_frame_alias_set ());
9db1d521 10719
4023fb28 10720 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
9db1d521
HP
10721}
10722
75707b28
JJ
10723/* Return true if REGNO is a global register, but not one
10724 of the special ones that need to be saved/restored in anyway. */
10725
10726static inline bool
10727global_not_special_regno_p (int regno)
10728{
10729 return (global_regs[regno]
10730 /* These registers are special and need to be
10731 restored in any case. */
10732 && !(regno == STACK_POINTER_REGNUM
10733 || regno == RETURN_REGNUM
10734 || regno == BASE_REGNUM
10735 || (flag_pic && regno == (int)PIC_OFFSET_TABLE_REGNUM)));
10736}
10737
c3cc6b78 10738/* Generate insn to save registers FIRST to LAST into
c7453384 10739 the register save area located at offset OFFSET
c3cc6b78 10740 relative to register BASE. */
9db1d521 10741
c3cc6b78 10742static rtx
9c808aad 10743save_gprs (rtx base, int offset, int first, int last)
9db1d521 10744{
c3cc6b78
UW
10745 rtx addr, insn, note;
10746 int i;
10747
0a81f074 10748 addr = plus_constant (Pmode, base, offset);
c3cc6b78 10749 addr = gen_rtx_MEM (Pmode, addr);
dcc9eb26
AK
10750
10751 set_mem_alias_set (addr, get_frame_alias_set ());
c3cc6b78
UW
10752
10753 /* Special-case single register. */
10754 if (first == last)
10755 {
10756 if (TARGET_64BIT)
10757 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
10758 else
10759 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
10760
75707b28
JJ
10761 if (!global_not_special_regno_p (first))
10762 RTX_FRAME_RELATED_P (insn) = 1;
c3cc6b78
UW
10763 return insn;
10764 }
10765
10766
10767 insn = gen_store_multiple (addr,
10768 gen_rtx_REG (Pmode, first),
10769 GEN_INT (last - first + 1));
10770
e3b5732b 10771 if (first <= 6 && cfun->stdarg)
dcc9eb26
AK
10772 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
10773 {
10774 rtx mem = XEXP (XVECEXP (PATTERN (insn), 0, i), 0);
f4aa3848 10775
dcc9eb26
AK
10776 if (first + i <= 6)
10777 set_mem_alias_set (mem, get_varargs_alias_set ());
10778 }
c3cc6b78
UW
10779
10780 /* We need to set the FRAME_RELATED flag on all SETs
10781 inside the store-multiple pattern.
10782
10783 However, we must not emit DWARF records for registers 2..5
c7453384 10784 if they are stored for use by variable arguments ...
c3cc6b78 10785
a4d05547 10786 ??? Unfortunately, it is not enough to simply not the
c3cc6b78
UW
10787 FRAME_RELATED flags for those SETs, because the first SET
10788 of the PARALLEL is always treated as if it had the flag
10789 set, even if it does not. Therefore we emit a new pattern
10790 without those registers as REG_FRAME_RELATED_EXPR note. */
10791
75707b28 10792 if (first >= 6 && !global_not_special_regno_p (first))
c3cc6b78
UW
10793 {
10794 rtx pat = PATTERN (insn);
10795
10796 for (i = 0; i < XVECLEN (pat, 0); i++)
75707b28
JJ
10797 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
10798 && !global_not_special_regno_p (REGNO (SET_SRC (XVECEXP (pat,
10799 0, i)))))
c3cc6b78
UW
10800 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
10801
10802 RTX_FRAME_RELATED_P (insn) = 1;
10803 }
10804 else if (last >= 6)
10805 {
75707b28
JJ
10806 int start;
10807
10808 for (start = first >= 6 ? first : 6; start <= last; start++)
10809 if (!global_not_special_regno_p (start))
10810 break;
10811
10812 if (start > last)
10813 return insn;
10814
0a81f074
RS
10815 addr = plus_constant (Pmode, base,
10816 offset + (start - first) * UNITS_PER_LONG);
6455a49e
AK
10817
10818 if (start == last)
10819 {
10820 if (TARGET_64BIT)
10821 note = gen_movdi (gen_rtx_MEM (Pmode, addr),
10822 gen_rtx_REG (Pmode, start));
10823 else
10824 note = gen_movsi (gen_rtx_MEM (Pmode, addr),
10825 gen_rtx_REG (Pmode, start));
10826 note = PATTERN (note);
10827
10828 add_reg_note (insn, REG_FRAME_RELATED_EXPR, note);
10829 RTX_FRAME_RELATED_P (insn) = 1;
10830
10831 return insn;
10832 }
10833
c7453384 10834 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
75707b28
JJ
10835 gen_rtx_REG (Pmode, start),
10836 GEN_INT (last - start + 1));
c3cc6b78
UW
10837 note = PATTERN (note);
10838
bbbbb16a 10839 add_reg_note (insn, REG_FRAME_RELATED_EXPR, note);
c3cc6b78
UW
10840
10841 for (i = 0; i < XVECLEN (note, 0); i++)
75707b28
JJ
10842 if (GET_CODE (XVECEXP (note, 0, i)) == SET
10843 && !global_not_special_regno_p (REGNO (SET_SRC (XVECEXP (note,
10844 0, i)))))
c3cc6b78
UW
10845 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
10846
10847 RTX_FRAME_RELATED_P (insn) = 1;
10848 }
10849
10850 return insn;
4023fb28 10851}
9db1d521 10852
c3cc6b78 10853/* Generate insn to restore registers FIRST to LAST from
c7453384 10854 the register save area located at offset OFFSET
c3cc6b78 10855 relative to register BASE. */
9db1d521 10856
c3cc6b78 10857static rtx
9c808aad 10858restore_gprs (rtx base, int offset, int first, int last)
4023fb28 10859{
c3cc6b78
UW
10860 rtx addr, insn;
10861
0a81f074 10862 addr = plus_constant (Pmode, base, offset);
c3cc6b78 10863 addr = gen_rtx_MEM (Pmode, addr);
dcc9eb26 10864 set_mem_alias_set (addr, get_frame_alias_set ());
c3cc6b78
UW
10865
10866 /* Special-case single register. */
10867 if (first == last)
10868 {
10869 if (TARGET_64BIT)
10870 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
10871 else
10872 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
10873
6455a49e 10874 RTX_FRAME_RELATED_P (insn) = 1;
c3cc6b78
UW
10875 return insn;
10876 }
10877
10878 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
10879 addr,
10880 GEN_INT (last - first + 1));
6455a49e 10881 RTX_FRAME_RELATED_P (insn) = 1;
c3cc6b78 10882 return insn;
4023fb28 10883}
9db1d521 10884
585539a1 10885/* Return insn sequence to load the GOT register. */
fd7643fb 10886
775c43d3 10887rtx_insn *
585539a1 10888s390_load_got (void)
fd7643fb 10889{
775c43d3 10890 rtx_insn *insns;
585539a1 10891
68c0ef75
AK
10892 /* We cannot use pic_offset_table_rtx here since we use this
10893 function also for non-pic if __tls_get_offset is called and in
10894 that case PIC_OFFSET_TABLE_REGNUM as well as pic_offset_table_rtx
10895 aren't usable. */
10896 rtx got_rtx = gen_rtx_REG (Pmode, 12);
10897
585539a1
UW
10898 start_sequence ();
10899
9e8327e3 10900 if (TARGET_CPU_ZARCH)
fd7643fb 10901 {
935b5226 10902 emit_move_insn (got_rtx, s390_got_symbol ());
fd7643fb
UW
10903 }
10904 else
10905 {
585539a1 10906 rtx offset;
fd7643fb 10907
935b5226 10908 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, s390_got_symbol ()),
fd7643fb
UW
10909 UNSPEC_LTREL_OFFSET);
10910 offset = gen_rtx_CONST (Pmode, offset);
10911 offset = force_const_mem (Pmode, offset);
10912
68c0ef75 10913 emit_move_insn (got_rtx, offset);
fd7643fb 10914
c7453384 10915 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (offset, 0)),
fd7643fb 10916 UNSPEC_LTREL_BASE);
68c0ef75 10917 offset = gen_rtx_PLUS (Pmode, got_rtx, offset);
fd7643fb 10918
68c0ef75 10919 emit_move_insn (got_rtx, offset);
fd7643fb 10920 }
585539a1
UW
10921
10922 insns = get_insns ();
10923 end_sequence ();
10924 return insns;
fd7643fb
UW
10925}
10926
12959abe
AK
10927/* This ties together stack memory (MEM with an alias set of frame_alias_set)
10928 and the change to the stack pointer. */
10929
10930static void
10931s390_emit_stack_tie (void)
10932{
10933 rtx mem = gen_frame_mem (BLKmode,
10934 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
10935
10936 emit_insn (gen_stack_tie (mem));
10937}
10938
6455a49e
AK
10939/* Copy GPRS into FPR save slots. */
10940
10941static void
10942s390_save_gprs_to_fprs (void)
10943{
10944 int i;
10945
10946 if (!TARGET_Z10 || !TARGET_HARD_FLOAT || !crtl->is_leaf)
10947 return;
10948
10949 for (i = 6; i < 16; i++)
10950 {
10951 if (FP_REGNO_P (cfun_gpr_save_slot (i)))
10952 {
775c43d3 10953 rtx_insn *insn =
6455a49e
AK
10954 emit_move_insn (gen_rtx_REG (DImode, cfun_gpr_save_slot (i)),
10955 gen_rtx_REG (DImode, i));
10956 RTX_FRAME_RELATED_P (insn) = 1;
b8fedf99
AK
10957 /* This prevents dwarf2cfi from interpreting the set. Doing
10958 so it might emit def_cfa_register infos setting an FPR as
10959 new CFA. */
efa68ffc 10960 add_reg_note (insn, REG_CFA_REGISTER, copy_rtx (PATTERN (insn)));
6455a49e
AK
10961 }
10962 }
10963}
10964
10965/* Restore GPRs from FPR save slots. */
10966
10967static void
10968s390_restore_gprs_from_fprs (void)
10969{
10970 int i;
10971
10972 if (!TARGET_Z10 || !TARGET_HARD_FLOAT || !crtl->is_leaf)
10973 return;
10974
10975 for (i = 6; i < 16; i++)
10976 {
82c6f58a
AK
10977 rtx_insn *insn;
10978
10979 if (!FP_REGNO_P (cfun_gpr_save_slot (i)))
10980 continue;
10981
10982 rtx fpr = gen_rtx_REG (DImode, cfun_gpr_save_slot (i));
10983
10984 if (i == STACK_POINTER_REGNUM)
10985 insn = emit_insn (gen_stack_restore_from_fpr (fpr));
10986 else
10987 insn = emit_move_insn (gen_rtx_REG (DImode, i), fpr);
10988
10989 df_set_regs_ever_live (i, true);
10990 add_reg_note (insn, REG_CFA_RESTORE, gen_rtx_REG (DImode, i));
10991 if (i == STACK_POINTER_REGNUM)
10992 add_reg_note (insn, REG_CFA_DEF_CFA,
10993 plus_constant (Pmode, stack_pointer_rtx,
10994 STACK_POINTER_OFFSET));
10995 RTX_FRAME_RELATED_P (insn) = 1;
6455a49e
AK
10996 }
10997}
10998
9db1d521 10999
4099494d
RS
11000/* A pass run immediately before shrink-wrapping and prologue and epilogue
11001 generation. */
11002
4099494d
RS
11003namespace {
11004
11005const pass_data pass_data_s390_early_mach =
11006{
11007 RTL_PASS, /* type */
11008 "early_mach", /* name */
11009 OPTGROUP_NONE, /* optinfo_flags */
4099494d
RS
11010 TV_MACH_DEP, /* tv_id */
11011 0, /* properties_required */
11012 0, /* properties_provided */
11013 0, /* properties_destroyed */
11014 0, /* todo_flags_start */
3bea341f 11015 ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
4099494d 11016};
585539a1 11017
4099494d
RS
11018class pass_s390_early_mach : public rtl_opt_pass
11019{
11020public:
11021 pass_s390_early_mach (gcc::context *ctxt)
11022 : rtl_opt_pass (pass_data_s390_early_mach, ctxt)
11023 {}
11024
11025 /* opt_pass methods: */
be55bfe6 11026 virtual unsigned int execute (function *);
4099494d
RS
11027
11028}; // class pass_s390_early_mach
11029
be55bfe6
TS
11030unsigned int
11031pass_s390_early_mach::execute (function *fun)
11032{
775c43d3 11033 rtx_insn *insn;
be55bfe6
TS
11034
11035 /* Try to get rid of the FPR clobbers. */
11036 s390_optimize_nonescaping_tx ();
11037
11038 /* Re-compute register info. */
11039 s390_register_info ();
11040
11041 /* If we're using a base register, ensure that it is always valid for
11042 the first non-prologue instruction. */
11043 if (fun->machine->base_reg)
11044 emit_insn_at_entry (gen_main_pool (fun->machine->base_reg));
11045
11046 /* Annotate all constant pool references to let the scheduler know
11047 they implicitly use the base register. */
11048 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
11049 if (INSN_P (insn))
11050 {
11051 annotate_constant_pool_refs (&PATTERN (insn));
11052 df_insn_rescan (insn);
11053 }
11054 return 0;
11055}
11056
4099494d
RS
11057} // anon namespace
11058
11059/* Expand the prologue into a bunch of separate insns. */
11060
11061void
11062s390_emit_prologue (void)
11063{
11064 rtx insn, addr;
11065 rtx temp_reg;
11066 int i;
11067 int offset;
11068 int next_fpr = 0;
585539a1 11069
c7453384 11070 /* Choose best register to use for temp use within prologue.
4cb4721f
MK
11071 TPF with profiling must avoid the register 14 - the tracing function
11072 needs the original contents of r14 to be preserved. */
c7453384 11073
f4aa3848 11074 if (!has_hard_reg_initial_val (Pmode, RETURN_REGNUM)
416ff32e 11075 && !crtl->is_leaf
7bcebb25 11076 && !TARGET_TPF_PROFILING)
4023fb28 11077 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4cb4721f
MK
11078 else if (flag_split_stack && cfun->stdarg)
11079 temp_reg = gen_rtx_REG (Pmode, 12);
9db1d521 11080 else
4023fb28 11081 temp_reg = gen_rtx_REG (Pmode, 1);
9db1d521 11082
6455a49e
AK
11083 s390_save_gprs_to_fprs ();
11084
4023fb28 11085 /* Save call saved gprs. */
adf39f8f 11086 if (cfun_frame_layout.first_save_gpr != -1)
2790879f 11087 {
f4aa3848
AK
11088 insn = save_gprs (stack_pointer_rtx,
11089 cfun_frame_layout.gprs_offset +
9602b6a1 11090 UNITS_PER_LONG * (cfun_frame_layout.first_save_gpr
fb3712f6 11091 - cfun_frame_layout.first_save_gpr_slot),
f4aa3848 11092 cfun_frame_layout.first_save_gpr,
2790879f
AK
11093 cfun_frame_layout.last_save_gpr);
11094 emit_insn (insn);
11095 }
4023fb28 11096
5af2f3d3 11097 /* Dummy insn to mark literal pool slot. */
c7453384 11098
91086990
UW
11099 if (cfun->machine->base_reg)
11100 emit_insn (gen_main_pool (cfun->machine->base_reg));
c7453384 11101
adf39f8f 11102 offset = cfun_frame_layout.f0_offset;
4023fb28 11103
adf39f8f 11104 /* Save f0 and f2. */
2cf4c39e 11105 for (i = FPR0_REGNUM; i <= FPR0_REGNUM + 1; i++)
adf39f8f 11106 {
b89b22fc 11107 if (cfun_fpr_save_p (i))
adf39f8f 11108 {
b89b22fc 11109 save_fpr (stack_pointer_rtx, offset, i);
adf39f8f
AK
11110 offset += 8;
11111 }
e179df83
AK
11112 else if (!TARGET_PACKED_STACK || cfun->stdarg)
11113 offset += 8;
adf39f8f 11114 }
9db1d521 11115
adf39f8f
AK
11116 /* Save f4 and f6. */
11117 offset = cfun_frame_layout.f4_offset;
2cf4c39e 11118 for (i = FPR4_REGNUM; i <= FPR4_REGNUM + 1; i++)
adf39f8f 11119 {
b89b22fc 11120 if (cfun_fpr_save_p (i))
4023fb28 11121 {
b89b22fc 11122 insn = save_fpr (stack_pointer_rtx, offset, i);
adf39f8f
AK
11123 offset += 8;
11124
e179df83
AK
11125 /* If f4 and f6 are call clobbered they are saved due to
11126 stdargs and therefore are not frame related. */
b89b22fc 11127 if (!call_really_used_regs[i])
adf39f8f 11128 RTX_FRAME_RELATED_P (insn) = 1;
4023fb28 11129 }
e179df83 11130 else if (!TARGET_PACKED_STACK || call_really_used_regs[i])
adf39f8f
AK
11131 offset += 8;
11132 }
11133
b3d31392 11134 if (TARGET_PACKED_STACK
adf39f8f
AK
11135 && cfun_save_high_fprs_p
11136 && cfun_frame_layout.f8_offset + cfun_frame_layout.high_fprs * 8 > 0)
11137 {
11138 offset = (cfun_frame_layout.f8_offset
11139 + (cfun_frame_layout.high_fprs - 1) * 8);
11140
2cf4c39e 11141 for (i = FPR15_REGNUM; i >= FPR8_REGNUM && offset >= 0; i--)
b89b22fc 11142 if (cfun_fpr_save_p (i))
adf39f8f 11143 {
b89b22fc 11144 insn = save_fpr (stack_pointer_rtx, offset, i);
f4aa3848 11145
adf39f8f
AK
11146 RTX_FRAME_RELATED_P (insn) = 1;
11147 offset -= 8;
11148 }
11149 if (offset >= cfun_frame_layout.f8_offset)
b89b22fc 11150 next_fpr = i;
adf39f8f 11151 }
f4aa3848 11152
b3d31392 11153 if (!TARGET_PACKED_STACK)
2cf4c39e 11154 next_fpr = cfun_save_high_fprs_p ? FPR15_REGNUM : 0;
9db1d521 11155
a11e0df4 11156 if (flag_stack_usage_info)
5c779305
AK
11157 current_function_static_stack_size = cfun_frame_layout.frame_size;
11158
4023fb28 11159 /* Decrement stack pointer. */
9db1d521 11160
adf39f8f 11161 if (cfun_frame_layout.frame_size > 0)
4023fb28 11162 {
adf39f8f 11163 rtx frame_off = GEN_INT (-cfun_frame_layout.frame_size);
bbbbb16a 11164 rtx real_frame_off;
9db1d521 11165
d75f90f1
AK
11166 if (s390_stack_size)
11167 {
690e7b63 11168 HOST_WIDE_INT stack_guard;
d75f90f1 11169
690e7b63
AK
11170 if (s390_stack_guard)
11171 stack_guard = s390_stack_guard;
d75f90f1 11172 else
690e7b63
AK
11173 {
11174 /* If no value for stack guard is provided the smallest power of 2
11175 larger than the current frame size is chosen. */
11176 stack_guard = 1;
11177 while (stack_guard < cfun_frame_layout.frame_size)
11178 stack_guard <<= 1;
11179 }
d75f90f1 11180
690e7b63
AK
11181 if (cfun_frame_layout.frame_size >= s390_stack_size)
11182 {
cabbe12a 11183 warning (0, "frame size of function %qs is %wd"
690e7b63 11184 " bytes exceeding user provided stack limit of "
cabbe12a 11185 "%d bytes. "
690e7b63
AK
11186 "An unconditional trap is added.",
11187 current_function_name(), cfun_frame_layout.frame_size,
11188 s390_stack_size);
11189 emit_insn (gen_trap ());
d26d508a 11190 emit_barrier ();
690e7b63
AK
11191 }
11192 else
11193 {
a3e7e012
AK
11194 /* stack_guard has to be smaller than s390_stack_size.
11195 Otherwise we would emit an AND with zero which would
11196 not match the test under mask pattern. */
11197 if (stack_guard >= s390_stack_size)
11198 {
f137aa63 11199 warning (0, "frame size of function %qs is %wd"
a3e7e012
AK
11200 " bytes which is more than half the stack size. "
11201 "The dynamic check would not be reliable. "
11202 "No check emitted for this function.",
11203 current_function_name(),
11204 cfun_frame_layout.frame_size);
11205 }
690e7b63 11206 else
a3e7e012
AK
11207 {
11208 HOST_WIDE_INT stack_check_mask = ((s390_stack_size - 1)
11209 & ~(stack_guard - 1));
11210
11211 rtx t = gen_rtx_AND (Pmode, stack_pointer_rtx,
11212 GEN_INT (stack_check_mask));
11213 if (TARGET_64BIT)
11214 emit_insn (gen_ctrapdi4 (gen_rtx_EQ (VOIDmode,
11215 t, const0_rtx),
11216 t, const0_rtx, const0_rtx));
11217 else
11218 emit_insn (gen_ctrapsi4 (gen_rtx_EQ (VOIDmode,
11219 t, const0_rtx),
11220 t, const0_rtx, const0_rtx));
11221 }
690e7b63 11222 }
d75f90f1
AK
11223 }
11224
f4aa3848 11225 if (s390_warn_framesize > 0
d75f90f1 11226 && cfun_frame_layout.frame_size >= s390_warn_framesize)
f137aa63 11227 warning (0, "frame size of %qs is %wd bytes",
d75f90f1
AK
11228 current_function_name (), cfun_frame_layout.frame_size);
11229
11230 if (s390_warn_dynamicstack_p && cfun->calls_alloca)
d4ee4d25 11231 warning (0, "%qs uses dynamic stack allocation", current_function_name ());
d75f90f1 11232
4023fb28 11233 /* Save incoming stack pointer into temp reg. */
66480e91 11234 if (TARGET_BACKCHAIN || next_fpr)
adf39f8f 11235 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
c7453384 11236
fae778eb 11237 /* Subtract frame size from stack pointer. */
4023fb28 11238
d3632d41
UW
11239 if (DISP_IN_RANGE (INTVAL (frame_off)))
11240 {
f7df4a84 11241 insn = gen_rtx_SET (stack_pointer_rtx,
f4aa3848 11242 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
9c808aad 11243 frame_off));
d3632d41
UW
11244 insn = emit_insn (insn);
11245 }
11246 else
11247 {
b5c67a49 11248 if (!CONST_OK_FOR_K (INTVAL (frame_off)))
d3632d41
UW
11249 frame_off = force_const_mem (Pmode, frame_off);
11250
11251 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
585539a1 11252 annotate_constant_pool_refs (&PATTERN (insn));
d3632d41 11253 }
4023fb28 11254
4023fb28 11255 RTX_FRAME_RELATED_P (insn) = 1;
bbbbb16a
ILT
11256 real_frame_off = GEN_INT (-cfun_frame_layout.frame_size);
11257 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
f7df4a84 11258 gen_rtx_SET (stack_pointer_rtx,
bbbbb16a
ILT
11259 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
11260 real_frame_off)));
4023fb28
UW
11261
11262 /* Set backchain. */
c7453384 11263
66480e91 11264 if (TARGET_BACKCHAIN)
9db1d521 11265 {
adf39f8f 11266 if (cfun_frame_layout.backchain_offset)
f4aa3848 11267 addr = gen_rtx_MEM (Pmode,
0a81f074 11268 plus_constant (Pmode, stack_pointer_rtx,
adf39f8f
AK
11269 cfun_frame_layout.backchain_offset));
11270 else
f4aa3848 11271 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
dcc9eb26 11272 set_mem_alias_set (addr, get_frame_alias_set ());
4023fb28 11273 insn = emit_insn (gen_move_insn (addr, temp_reg));
9db1d521 11274 }
7d798969 11275
8f4f502f 11276 /* If we support non-call exceptions (e.g. for Java),
7d798969
UW
11277 we need to make sure the backchain pointer is set up
11278 before any possibly trapping memory access. */
8f4f502f 11279 if (TARGET_BACKCHAIN && cfun->can_throw_non_call_exceptions)
7d798969
UW
11280 {
11281 addr = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
c41c1387 11282 emit_clobber (addr);
7d798969 11283 }
4023fb28 11284 }
9db1d521 11285
4023fb28 11286 /* Save fprs 8 - 15 (64 bit ABI). */
c7453384 11287
adf39f8f 11288 if (cfun_save_high_fprs_p && next_fpr)
4023fb28 11289 {
12959abe
AK
11290 /* If the stack might be accessed through a different register
11291 we have to make sure that the stack pointer decrement is not
11292 moved below the use of the stack slots. */
11293 s390_emit_stack_tie ();
11294
f4aa3848 11295 insn = emit_insn (gen_add2_insn (temp_reg,
adf39f8f
AK
11296 GEN_INT (cfun_frame_layout.f8_offset)));
11297
11298 offset = 0;
9db1d521 11299
2cf4c39e 11300 for (i = FPR8_REGNUM; i <= next_fpr; i++)
b89b22fc 11301 if (cfun_fpr_save_p (i))
4023fb28 11302 {
0a81f074 11303 rtx addr = plus_constant (Pmode, stack_pointer_rtx,
adf39f8f
AK
11304 cfun_frame_layout.frame_size
11305 + cfun_frame_layout.f8_offset
11306 + offset);
f4aa3848 11307
adf39f8f
AK
11308 insn = save_fpr (temp_reg, offset, i);
11309 offset += 8;
4023fb28 11310 RTX_FRAME_RELATED_P (insn) = 1;
bbbbb16a 11311 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
f7df4a84 11312 gen_rtx_SET (gen_rtx_MEM (DFmode, addr),
bbbbb16a 11313 gen_rtx_REG (DFmode, i)));
4023fb28
UW
11314 }
11315 }
c7453384 11316
4023fb28 11317 /* Set frame pointer, if needed. */
c7453384 11318
29742ba4 11319 if (frame_pointer_needed)
4023fb28
UW
11320 {
11321 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
11322 RTX_FRAME_RELATED_P (insn) = 1;
11323 }
9db1d521 11324
4023fb28 11325 /* Set up got pointer, if needed. */
c7453384 11326
6fb5fa3c 11327 if (flag_pic && df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM))
585539a1 11328 {
775c43d3 11329 rtx_insn *insns = s390_load_got ();
585539a1 11330
b32d5189 11331 for (rtx_insn *insn = insns; insn; insn = NEXT_INSN (insn))
6fb5fa3c 11332 annotate_constant_pool_refs (&PATTERN (insn));
585539a1
UW
11333
11334 emit_insn (insns);
11335 }
c7453384 11336
3839e36a 11337 if (TARGET_TPF_PROFILING)
c7453384
EC
11338 {
11339 /* Generate a BAS instruction to serve as a function
11340 entry intercept to facilitate the use of tracing
2f7e5a0d
EC
11341 algorithms located at the branch target. */
11342 emit_insn (gen_prologue_tpf ());
c7453384
EC
11343
11344 /* Emit a blockage here so that all code
11345 lies between the profiling mechanisms. */
11346 emit_insn (gen_blockage ());
11347 }
4023fb28 11348}
9db1d521 11349
b1c9bc51 11350/* Expand the epilogue into a bunch of separate insns. */
9db1d521 11351
4023fb28 11352void
ed9676cf 11353s390_emit_epilogue (bool sibcall)
4023fb28 11354{
75707b28 11355 rtx frame_pointer, return_reg, cfa_restores = NULL_RTX;
5d4d885c 11356 int area_bottom, area_top, offset = 0;
adf39f8f 11357 int next_offset;
4023fb28 11358 rtvec p;
7333171f 11359 int i;
9db1d521 11360
3839e36a 11361 if (TARGET_TPF_PROFILING)
c7453384
EC
11362 {
11363
11364 /* Generate a BAS instruction to serve as a function
11365 entry intercept to facilitate the use of tracing
2f7e5a0d 11366 algorithms located at the branch target. */
c7453384 11367
c7453384
EC
11368 /* Emit a blockage here so that all code
11369 lies between the profiling mechanisms. */
11370 emit_insn (gen_blockage ());
11371
2f7e5a0d 11372 emit_insn (gen_epilogue_tpf ());
c7453384
EC
11373 }
11374
4023fb28 11375 /* Check whether to use frame or stack pointer for restore. */
9db1d521 11376
f4aa3848 11377 frame_pointer = (frame_pointer_needed
adf39f8f 11378 ? hard_frame_pointer_rtx : stack_pointer_rtx);
9db1d521 11379
adf39f8f 11380 s390_frame_area (&area_bottom, &area_top);
9db1d521 11381
c7453384 11382 /* Check whether we can access the register save area.
4023fb28 11383 If not, increment the frame pointer as required. */
9db1d521 11384
4023fb28
UW
11385 if (area_top <= area_bottom)
11386 {
11387 /* Nothing to restore. */
11388 }
adf39f8f
AK
11389 else if (DISP_IN_RANGE (cfun_frame_layout.frame_size + area_bottom)
11390 && DISP_IN_RANGE (cfun_frame_layout.frame_size + area_top - 1))
4023fb28
UW
11391 {
11392 /* Area is in range. */
adf39f8f 11393 offset = cfun_frame_layout.frame_size;
4023fb28
UW
11394 }
11395 else
11396 {
75707b28 11397 rtx insn, frame_off, cfa;
9db1d521 11398
c7453384 11399 offset = area_bottom < 0 ? -area_bottom : 0;
adf39f8f 11400 frame_off = GEN_INT (cfun_frame_layout.frame_size - offset);
9db1d521 11401
f7df4a84 11402 cfa = gen_rtx_SET (frame_pointer,
75707b28 11403 gen_rtx_PLUS (Pmode, frame_pointer, frame_off));
d3632d41
UW
11404 if (DISP_IN_RANGE (INTVAL (frame_off)))
11405 {
f7df4a84 11406 insn = gen_rtx_SET (frame_pointer,
d3632d41
UW
11407 gen_rtx_PLUS (Pmode, frame_pointer, frame_off));
11408 insn = emit_insn (insn);
11409 }
11410 else
11411 {
b5c67a49 11412 if (!CONST_OK_FOR_K (INTVAL (frame_off)))
d3632d41 11413 frame_off = force_const_mem (Pmode, frame_off);
9db1d521 11414
d3632d41 11415 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
585539a1 11416 annotate_constant_pool_refs (&PATTERN (insn));
d3632d41 11417 }
75707b28
JJ
11418 add_reg_note (insn, REG_CFA_ADJUST_CFA, cfa);
11419 RTX_FRAME_RELATED_P (insn) = 1;
4023fb28 11420 }
9db1d521 11421
4023fb28
UW
11422 /* Restore call saved fprs. */
11423
11424 if (TARGET_64BIT)
9db1d521 11425 {
adf39f8f
AK
11426 if (cfun_save_high_fprs_p)
11427 {
11428 next_offset = cfun_frame_layout.f8_offset;
2cf4c39e 11429 for (i = FPR8_REGNUM; i <= FPR15_REGNUM; i++)
adf39f8f 11430 {
b89b22fc 11431 if (cfun_fpr_save_p (i))
adf39f8f
AK
11432 {
11433 restore_fpr (frame_pointer,
11434 offset + next_offset, i);
75707b28
JJ
11435 cfa_restores
11436 = alloc_reg_note (REG_CFA_RESTORE,
11437 gen_rtx_REG (DFmode, i), cfa_restores);
adf39f8f
AK
11438 next_offset += 8;
11439 }
11440 }
11441 }
f4aa3848 11442
9db1d521
HP
11443 }
11444 else
11445 {
adf39f8f 11446 next_offset = cfun_frame_layout.f4_offset;
b89b22fc 11447 /* f4, f6 */
2cf4c39e 11448 for (i = FPR4_REGNUM; i <= FPR4_REGNUM + 1; i++)
adf39f8f 11449 {
b89b22fc 11450 if (cfun_fpr_save_p (i))
adf39f8f
AK
11451 {
11452 restore_fpr (frame_pointer,
11453 offset + next_offset, i);
75707b28
JJ
11454 cfa_restores
11455 = alloc_reg_note (REG_CFA_RESTORE,
11456 gen_rtx_REG (DFmode, i), cfa_restores);
adf39f8f
AK
11457 next_offset += 8;
11458 }
b3d31392 11459 else if (!TARGET_PACKED_STACK)
adf39f8f
AK
11460 next_offset += 8;
11461 }
f4aa3848 11462
4023fb28 11463 }
9db1d521 11464
4023fb28
UW
11465 /* Return register. */
11466
c7453384 11467 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4023fb28
UW
11468
11469 /* Restore call saved gprs. */
11470
adf39f8f 11471 if (cfun_frame_layout.first_restore_gpr != -1)
4023fb28 11472 {
c3cc6b78 11473 rtx insn, addr;
1447dc69
HP
11474 int i;
11475
c7453384 11476 /* Check for global register and save them
1447dc69
HP
11477 to stack location from where they get restored. */
11478
adf39f8f
AK
11479 for (i = cfun_frame_layout.first_restore_gpr;
11480 i <= cfun_frame_layout.last_restore_gpr;
1447dc69
HP
11481 i++)
11482 {
75707b28 11483 if (global_not_special_regno_p (i))
1447dc69 11484 {
0a81f074 11485 addr = plus_constant (Pmode, frame_pointer,
f4aa3848 11486 offset + cfun_frame_layout.gprs_offset
fb3712f6 11487 + (i - cfun_frame_layout.first_save_gpr_slot)
9602b6a1 11488 * UNITS_PER_LONG);
1447dc69 11489 addr = gen_rtx_MEM (Pmode, addr);
dcc9eb26 11490 set_mem_alias_set (addr, get_frame_alias_set ());
1447dc69 11491 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
c7453384 11492 }
75707b28
JJ
11493 else
11494 cfa_restores
11495 = alloc_reg_note (REG_CFA_RESTORE,
11496 gen_rtx_REG (Pmode, i), cfa_restores);
1447dc69 11497 }
4023fb28 11498
4ba66aee
AK
11499 /* Fetch return address from stack before load multiple,
11500 this will do good for scheduling.
11501
11502 Only do this if we already decided that r14 needs to be
11503 saved to a stack slot. (And not just because r14 happens to
11504 be in between two GPRs which need saving.) Otherwise it
11505 would be difficult to take that decision back in
11506 s390_optimize_prologue.
11507
11508 This optimization is only helpful on in-order machines. */
11509 if (! sibcall
11510 && cfun_gpr_save_slot (RETURN_REGNUM) == SAVE_SLOT_STACK
11511 && s390_tune <= PROCESSOR_2097_Z10)
11512 {
11513 int return_regnum = find_unused_clobbered_reg();
11514 if (!return_regnum)
11515 return_regnum = 4;
11516 return_reg = gen_rtx_REG (Pmode, return_regnum);
11517
11518 addr = plus_constant (Pmode, frame_pointer,
11519 offset + cfun_frame_layout.gprs_offset
11520 + (RETURN_REGNUM
11521 - cfun_frame_layout.first_save_gpr_slot)
11522 * UNITS_PER_LONG);
11523 addr = gen_rtx_MEM (Pmode, addr);
11524 set_mem_alias_set (addr, get_frame_alias_set ());
11525 emit_move_insn (return_reg, addr);
db716bde 11526
4ba66aee
AK
11527 /* Once we did that optimization we have to make sure
11528 s390_optimize_prologue does not try to remove the store
11529 of r14 since we will not be able to find the load issued
11530 here. */
11531 cfun_frame_layout.save_return_addr_p = true;
9db1d521 11532 }
4023fb28 11533
adf39f8f
AK
11534 insn = restore_gprs (frame_pointer,
11535 offset + cfun_frame_layout.gprs_offset
f4aa3848 11536 + (cfun_frame_layout.first_restore_gpr
fb3712f6 11537 - cfun_frame_layout.first_save_gpr_slot)
9602b6a1 11538 * UNITS_PER_LONG,
adf39f8f
AK
11539 cfun_frame_layout.first_restore_gpr,
11540 cfun_frame_layout.last_restore_gpr);
75707b28
JJ
11541 insn = emit_insn (insn);
11542 REG_NOTES (insn) = cfa_restores;
11543 add_reg_note (insn, REG_CFA_DEF_CFA,
0a81f074
RS
11544 plus_constant (Pmode, stack_pointer_rtx,
11545 STACK_POINTER_OFFSET));
75707b28 11546 RTX_FRAME_RELATED_P (insn) = 1;
4023fb28 11547 }
9db1d521 11548
6455a49e
AK
11549 s390_restore_gprs_from_fprs ();
11550
ed9676cf
AK
11551 if (! sibcall)
11552 {
c7453384 11553
ed9676cf 11554 /* Return to caller. */
38899e29 11555
ed9676cf 11556 p = rtvec_alloc (2);
38899e29 11557
3810076b 11558 RTVEC_ELT (p, 0) = ret_rtx;
ed9676cf
AK
11559 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
11560 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
11561 }
9db1d521
HP
11562}
11563
177bc204
RS
11564/* Implement TARGET_SET_UP_BY_PROLOGUE. */
11565
11566static void
11567s300_set_up_by_prologue (hard_reg_set_container *regs)
11568{
11569 if (cfun->machine->base_reg
11570 && !call_really_used_regs[REGNO (cfun->machine->base_reg)])
11571 SET_HARD_REG_BIT (regs->set, REGNO (cfun->machine->base_reg));
11572}
11573
4cb4721f
MK
11574/* -fsplit-stack support. */
11575
11576/* A SYMBOL_REF for __morestack. */
11577static GTY(()) rtx morestack_ref;
11578
11579/* When using -fsplit-stack, the allocation routines set a field in
11580 the TCB to the bottom of the stack plus this much space, measured
11581 in bytes. */
11582
11583#define SPLIT_STACK_AVAILABLE 1024
11584
11585/* Emit -fsplit-stack prologue, which goes before the regular function
11586 prologue. */
11587
11588void
11589s390_expand_split_stack_prologue (void)
11590{
11591 rtx r1, guard, cc = NULL;
11592 rtx_insn *insn;
11593 /* Offset from thread pointer to __private_ss. */
11594 int psso = TARGET_64BIT ? 0x38 : 0x20;
11595 /* Pointer size in bytes. */
11596 /* Frame size and argument size - the two parameters to __morestack. */
11597 HOST_WIDE_INT frame_size = cfun_frame_layout.frame_size;
11598 /* Align argument size to 8 bytes - simplifies __morestack code. */
11599 HOST_WIDE_INT args_size = crtl->args.size >= 0
11600 ? ((crtl->args.size + 7) & ~7)
11601 : 0;
11602 /* Label to be called by __morestack. */
11603 rtx_code_label *call_done = NULL;
11604 rtx_code_label *parm_base = NULL;
11605 rtx tmp;
11606
11607 gcc_assert (flag_split_stack && reload_completed);
11608 if (!TARGET_CPU_ZARCH)
11609 {
11610 sorry ("CPUs older than z900 are not supported for -fsplit-stack");
11611 return;
11612 }
11613
11614 r1 = gen_rtx_REG (Pmode, 1);
11615
11616 /* If no stack frame will be allocated, don't do anything. */
11617 if (!frame_size)
11618 {
11619 if (cfun->machine->split_stack_varargs_pointer != NULL_RTX)
11620 {
11621 /* If va_start is used, just use r15. */
11622 emit_move_insn (r1,
11623 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
11624 GEN_INT (STACK_POINTER_OFFSET)));
11625
11626 }
11627 return;
11628 }
11629
11630 if (morestack_ref == NULL_RTX)
11631 {
11632 morestack_ref = gen_rtx_SYMBOL_REF (Pmode, "__morestack");
11633 SYMBOL_REF_FLAGS (morestack_ref) |= (SYMBOL_FLAG_LOCAL
11634 | SYMBOL_FLAG_FUNCTION);
11635 }
11636
11637 if (CONST_OK_FOR_K (frame_size) || CONST_OK_FOR_Op (frame_size))
11638 {
11639 /* If frame_size will fit in an add instruction, do a stack space
11640 check, and only call __morestack if there's not enough space. */
11641
11642 /* Get thread pointer. r1 is the only register we can always destroy - r0
11643 could contain a static chain (and cannot be used to address memory
11644 anyway), r2-r6 can contain parameters, and r6-r15 are callee-saved. */
11645 emit_move_insn (r1, gen_rtx_REG (Pmode, TP_REGNUM));
11646 /* Aim at __private_ss. */
11647 guard = gen_rtx_MEM (Pmode, plus_constant (Pmode, r1, psso));
11648
11649 /* If less that 1kiB used, skip addition and compare directly with
11650 __private_ss. */
11651 if (frame_size > SPLIT_STACK_AVAILABLE)
11652 {
11653 emit_move_insn (r1, guard);
11654 if (TARGET_64BIT)
11655 emit_insn (gen_adddi3 (r1, r1, GEN_INT (frame_size)));
11656 else
11657 emit_insn (gen_addsi3 (r1, r1, GEN_INT (frame_size)));
11658 guard = r1;
11659 }
11660
11661 /* Compare the (maybe adjusted) guard with the stack pointer. */
11662 cc = s390_emit_compare (LT, stack_pointer_rtx, guard);
11663 }
11664
11665 call_done = gen_label_rtx ();
11666 parm_base = gen_label_rtx ();
11667
11668 /* Emit the parameter block. */
11669 tmp = gen_split_stack_data (parm_base, call_done,
11670 GEN_INT (frame_size),
11671 GEN_INT (args_size));
11672 insn = emit_insn (tmp);
11673 add_reg_note (insn, REG_LABEL_OPERAND, call_done);
11674 LABEL_NUSES (call_done)++;
11675 add_reg_note (insn, REG_LABEL_OPERAND, parm_base);
11676 LABEL_NUSES (parm_base)++;
11677
11678 /* %r1 = litbase. */
11679 insn = emit_move_insn (r1, gen_rtx_LABEL_REF (VOIDmode, parm_base));
11680 add_reg_note (insn, REG_LABEL_OPERAND, parm_base);
11681 LABEL_NUSES (parm_base)++;
11682
11683 /* Now, we need to call __morestack. It has very special calling
11684 conventions: it preserves param/return/static chain registers for
11685 calling main function body, and looks for its own parameters at %r1. */
11686
11687 if (cc != NULL)
11688 {
11689 tmp = gen_split_stack_cond_call (morestack_ref, cc, call_done);
11690
11691 insn = emit_jump_insn (tmp);
11692 JUMP_LABEL (insn) = call_done;
11693 LABEL_NUSES (call_done)++;
11694
11695 /* Mark the jump as very unlikely to be taken. */
5fa396ad
JH
11696 add_reg_br_prob_note (insn,
11697 profile_probability::very_unlikely ());
4cb4721f
MK
11698
11699 if (cfun->machine->split_stack_varargs_pointer != NULL_RTX)
11700 {
11701 /* If va_start is used, and __morestack was not called, just use
11702 r15. */
11703 emit_move_insn (r1,
11704 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
11705 GEN_INT (STACK_POINTER_OFFSET)));
11706 }
11707 }
11708 else
11709 {
11710 tmp = gen_split_stack_call (morestack_ref, call_done);
11711 insn = emit_jump_insn (tmp);
11712 JUMP_LABEL (insn) = call_done;
11713 LABEL_NUSES (call_done)++;
11714 emit_barrier ();
11715 }
11716
11717 /* __morestack will call us here. */
11718
11719 emit_label (call_done);
11720}
11721
11722/* We may have to tell the dataflow pass that the split stack prologue
11723 is initializing a register. */
11724
11725static void
11726s390_live_on_entry (bitmap regs)
11727{
11728 if (cfun->machine->split_stack_varargs_pointer != NULL_RTX)
11729 {
11730 gcc_assert (flag_split_stack);
11731 bitmap_set_bit (regs, 1);
11732 }
11733}
11734
177bc204
RS
11735/* Return true if the function can use simple_return to return outside
11736 of a shrink-wrapped region. At present shrink-wrapping is supported
11737 in all cases. */
11738
11739bool
11740s390_can_use_simple_return_insn (void)
11741{
11742 return true;
11743}
11744
11745/* Return true if the epilogue is guaranteed to contain only a return
11746 instruction and if a direct return can therefore be used instead.
11747 One of the main advantages of using direct return instructions
11748 is that we can then use conditional returns. */
11749
11750bool
11751s390_can_use_return_insn (void)
11752{
11753 int i;
11754
11755 if (!reload_completed)
11756 return false;
11757
11758 if (crtl->profile)
11759 return false;
11760
11761 if (TARGET_TPF_PROFILING)
11762 return false;
11763
11764 for (i = 0; i < 16; i++)
82379bdf 11765 if (cfun_gpr_save_slot (i) != SAVE_SLOT_NONE)
177bc204
RS
11766 return false;
11767
6e77facf
AK
11768 /* For 31 bit this is not covered by the frame_size check below
11769 since f4, f6 are saved in the register save area without needing
11770 additional stack space. */
11771 if (!TARGET_64BIT
11772 && (cfun_fpr_save_p (FPR4_REGNUM) || cfun_fpr_save_p (FPR6_REGNUM)))
11773 return false;
11774
177bc204
RS
11775 if (cfun->machine->base_reg
11776 && !call_really_used_regs[REGNO (cfun->machine->base_reg)])
11777 return false;
11778
11779 return cfun_frame_layout.frame_size == 0;
11780}
9db1d521 11781
085261c8
AK
11782/* The VX ABI differs for vararg functions. Therefore we need the
11783 prototype of the callee to be available when passing vector type
11784 values. */
11785static const char *
11786s390_invalid_arg_for_unprototyped_fn (const_tree typelist, const_tree funcdecl, const_tree val)
11787{
11788 return ((TARGET_VX_ABI
11789 && typelist == 0
11790 && VECTOR_TYPE_P (TREE_TYPE (val))
11791 && (funcdecl == NULL_TREE
11792 || (TREE_CODE (funcdecl) == FUNCTION_DECL
11793 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
f3981e7e 11794 ? N_("vector argument passed to unprototyped function")
085261c8
AK
11795 : NULL);
11796}
11797
11798
c7453384 11799/* Return the size in bytes of a function argument of
994fe660
UW
11800 type TYPE and/or mode MODE. At least one of TYPE or
11801 MODE must be specified. */
9db1d521
HP
11802
11803static int
ef4bddc2 11804s390_function_arg_size (machine_mode mode, const_tree type)
9db1d521
HP
11805{
11806 if (type)
11807 return int_size_in_bytes (type);
11808
d65f7478 11809 /* No type info available for some library calls ... */
9db1d521
HP
11810 if (mode != BLKmode)
11811 return GET_MODE_SIZE (mode);
11812
11813 /* If we have neither type nor mode, abort */
8d933e31 11814 gcc_unreachable ();
9db1d521
HP
11815}
11816
085261c8
AK
11817/* Return true if a function argument of type TYPE and mode MODE
11818 is to be passed in a vector register, if available. */
11819
11820bool
11821s390_function_arg_vector (machine_mode mode, const_tree type)
11822{
11823 if (!TARGET_VX_ABI)
11824 return false;
11825
11826 if (s390_function_arg_size (mode, type) > 16)
11827 return false;
11828
11829 /* No type info available for some library calls ... */
11830 if (!type)
11831 return VECTOR_MODE_P (mode);
11832
11833 /* The ABI says that record types with a single member are treated
11834 just like that member would be. */
11835 while (TREE_CODE (type) == RECORD_TYPE)
11836 {
11837 tree field, single = NULL_TREE;
11838
11839 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
11840 {
11841 if (TREE_CODE (field) != FIELD_DECL)
11842 continue;
11843
11844 if (single == NULL_TREE)
11845 single = TREE_TYPE (field);
11846 else
11847 return false;
11848 }
11849
11850 if (single == NULL_TREE)
11851 return false;
11852 else
11853 {
11854 /* If the field declaration adds extra byte due to
11855 e.g. padding this is not accepted as vector type. */
11856 if (int_size_in_bytes (single) <= 0
11857 || int_size_in_bytes (single) != int_size_in_bytes (type))
11858 return false;
11859 type = single;
11860 }
11861 }
11862
11863 return VECTOR_TYPE_P (type);
11864}
11865
82b1c974
UW
11866/* Return true if a function argument of type TYPE and mode MODE
11867 is to be passed in a floating-point register, if available. */
11868
11869static bool
ef4bddc2 11870s390_function_arg_float (machine_mode mode, const_tree type)
82b1c974 11871{
085261c8 11872 if (s390_function_arg_size (mode, type) > 8)
8c17530e
UW
11873 return false;
11874
82b1c974
UW
11875 /* Soft-float changes the ABI: no floating-point registers are used. */
11876 if (TARGET_SOFT_FLOAT)
11877 return false;
11878
11879 /* No type info available for some library calls ... */
11880 if (!type)
4dc19cc0 11881 return mode == SFmode || mode == DFmode || mode == SDmode || mode == DDmode;
82b1c974
UW
11882
11883 /* The ABI says that record types with a single member are treated
11884 just like that member would be. */
11885 while (TREE_CODE (type) == RECORD_TYPE)
11886 {
11887 tree field, single = NULL_TREE;
11888
910ad8de 11889 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
82b1c974
UW
11890 {
11891 if (TREE_CODE (field) != FIELD_DECL)
11892 continue;
11893
11894 if (single == NULL_TREE)
11895 single = TREE_TYPE (field);
11896 else
11897 return false;
11898 }
11899
11900 if (single == NULL_TREE)
11901 return false;
11902 else
11903 type = single;
11904 }
11905
11906 return TREE_CODE (type) == REAL_TYPE;
11907}
11908
8c17530e
UW
11909/* Return true if a function argument of type TYPE and mode MODE
11910 is to be passed in an integer register, or a pair of integer
11911 registers, if available. */
11912
11913static bool
ef4bddc2 11914s390_function_arg_integer (machine_mode mode, const_tree type)
8c17530e
UW
11915{
11916 int size = s390_function_arg_size (mode, type);
11917 if (size > 8)
11918 return false;
11919
11920 /* No type info available for some library calls ... */
11921 if (!type)
11922 return GET_MODE_CLASS (mode) == MODE_INT
4dc19cc0 11923 || (TARGET_SOFT_FLOAT && SCALAR_FLOAT_MODE_P (mode));
8c17530e
UW
11924
11925 /* We accept small integral (and similar) types. */
11926 if (INTEGRAL_TYPE_P (type)
38899e29 11927 || POINTER_TYPE_P (type)
0d697034 11928 || TREE_CODE (type) == NULLPTR_TYPE
8c17530e
UW
11929 || TREE_CODE (type) == OFFSET_TYPE
11930 || (TARGET_SOFT_FLOAT && TREE_CODE (type) == REAL_TYPE))
11931 return true;
11932
11933 /* We also accept structs of size 1, 2, 4, 8 that are not
38899e29 11934 passed in floating-point registers. */
8c17530e
UW
11935 if (AGGREGATE_TYPE_P (type)
11936 && exact_log2 (size) >= 0
11937 && !s390_function_arg_float (mode, type))
11938 return true;
11939
11940 return false;
11941}
11942
994fe660
UW
11943/* Return 1 if a function argument of type TYPE and mode MODE
11944 is to be passed by reference. The ABI specifies that only
11945 structures of size 1, 2, 4, or 8 bytes are passed by value,
11946 all other structures (and complex numbers) are passed by
11947 reference. */
11948
8cd5a4e0 11949static bool
d5cc9181 11950s390_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
ef4bddc2 11951 machine_mode mode, const_tree type,
8cd5a4e0 11952 bool named ATTRIBUTE_UNUSED)
9db1d521
HP
11953{
11954 int size = s390_function_arg_size (mode, type);
085261c8
AK
11955
11956 if (s390_function_arg_vector (mode, type))
11957 return false;
11958
8c17530e
UW
11959 if (size > 8)
11960 return true;
9db1d521
HP
11961
11962 if (type)
11963 {
8c17530e 11964 if (AGGREGATE_TYPE_P (type) && exact_log2 (size) < 0)
085261c8 11965 return true;
9db1d521 11966
8c17530e
UW
11967 if (TREE_CODE (type) == COMPLEX_TYPE
11968 || TREE_CODE (type) == VECTOR_TYPE)
085261c8 11969 return true;
9db1d521 11970 }
c7453384 11971
085261c8 11972 return false;
9db1d521
HP
11973}
11974
11975/* Update the data in CUM to advance over an argument of mode MODE and
11976 data type TYPE. (TYPE is null for libcalls where that information
994fe660
UW
11977 may not be available.). The boolean NAMED specifies whether the
11978 argument is a named argument (as opposed to an unnamed argument
11979 matching an ellipsis). */
9db1d521 11980
3cb1da52 11981static void
ef4bddc2 11982s390_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
085261c8 11983 const_tree type, bool named)
9db1d521 11984{
d5cc9181
JR
11985 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
11986
085261c8
AK
11987 if (s390_function_arg_vector (mode, type))
11988 {
11989 /* We are called for unnamed vector stdarg arguments which are
11990 passed on the stack. In this case this hook does not have to
11991 do anything since stack arguments are tracked by common
11992 code. */
11993 if (!named)
11994 return;
11995 cum->vrs += 1;
11996 }
11997 else if (s390_function_arg_float (mode, type))
9db1d521 11998 {
82b1c974 11999 cum->fprs += 1;
9db1d521 12000 }
8c17530e 12001 else if (s390_function_arg_integer (mode, type))
9db1d521
HP
12002 {
12003 int size = s390_function_arg_size (mode, type);
9602b6a1 12004 cum->gprs += ((size + UNITS_PER_LONG - 1) / UNITS_PER_LONG);
9db1d521 12005 }
8c17530e 12006 else
8d933e31 12007 gcc_unreachable ();
9db1d521
HP
12008}
12009
994fe660
UW
12010/* Define where to put the arguments to a function.
12011 Value is zero to push the argument on the stack,
12012 or a hard register in which to store the argument.
12013
12014 MODE is the argument's machine mode.
12015 TYPE is the data type of the argument (as a tree).
12016 This is null for libcalls where that information may
12017 not be available.
12018 CUM is a variable of type CUMULATIVE_ARGS which gives info about
12019 the preceding args and about the function being called.
12020 NAMED is nonzero if this argument is a named parameter
c7453384 12021 (otherwise it is an extra parameter matching an ellipsis).
994fe660
UW
12022
12023 On S/390, we use general purpose registers 2 through 6 to
12024 pass integer, pointer, and certain structure arguments, and
12025 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
12026 to pass floating point arguments. All remaining arguments
12027 are pushed to the stack. */
9db1d521 12028
3cb1da52 12029static rtx
ef4bddc2 12030s390_function_arg (cumulative_args_t cum_v, machine_mode mode,
085261c8 12031 const_tree type, bool named)
9db1d521 12032{
d5cc9181
JR
12033 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
12034
45901378
AK
12035 if (!named)
12036 s390_check_type_for_vector_abi (type, true, false);
085261c8
AK
12037
12038 if (s390_function_arg_vector (mode, type))
12039 {
12040 /* Vector arguments being part of the ellipsis are passed on the
12041 stack. */
12042 if (!named || (cum->vrs + 1 > VEC_ARG_NUM_REG))
12043 return NULL_RTX;
12044
12045 return gen_rtx_REG (mode, cum->vrs + FIRST_VEC_ARG_REGNO);
12046 }
12047 else if (s390_function_arg_float (mode, type))
9db1d521 12048 {
29a79fcf 12049 if (cum->fprs + 1 > FP_ARG_NUM_REG)
085261c8 12050 return NULL_RTX;
9db1d521 12051 else
f1c25d3b 12052 return gen_rtx_REG (mode, cum->fprs + 16);
9db1d521 12053 }
8c17530e 12054 else if (s390_function_arg_integer (mode, type))
9db1d521
HP
12055 {
12056 int size = s390_function_arg_size (mode, type);
9602b6a1 12057 int n_gprs = (size + UNITS_PER_LONG - 1) / UNITS_PER_LONG;
9db1d521 12058
29a79fcf 12059 if (cum->gprs + n_gprs > GP_ARG_NUM_REG)
085261c8 12060 return NULL_RTX;
9602b6a1 12061 else if (n_gprs == 1 || UNITS_PER_WORD == UNITS_PER_LONG)
f1c25d3b 12062 return gen_rtx_REG (mode, cum->gprs + 2);
9602b6a1
AK
12063 else if (n_gprs == 2)
12064 {
12065 rtvec p = rtvec_alloc (2);
12066
12067 RTVEC_ELT (p, 0)
12068 = gen_rtx_EXPR_LIST (SImode, gen_rtx_REG (SImode, cum->gprs + 2),
12069 const0_rtx);
12070 RTVEC_ELT (p, 1)
12071 = gen_rtx_EXPR_LIST (SImode, gen_rtx_REG (SImode, cum->gprs + 3),
12072 GEN_INT (4));
12073
12074 return gen_rtx_PARALLEL (mode, p);
12075 }
9db1d521 12076 }
8c17530e
UW
12077
12078 /* After the real arguments, expand_call calls us once again
12079 with a void_type_node type. Whatever we return here is
12080 passed as operand 2 to the call expanders.
12081
12082 We don't need this feature ... */
12083 else if (type == void_type_node)
12084 return const0_rtx;
12085
8d933e31 12086 gcc_unreachable ();
8c17530e
UW
12087}
12088
12089/* Return true if return values of type TYPE should be returned
12090 in a memory buffer whose address is passed by the caller as
12091 hidden first argument. */
12092
12093static bool
586de218 12094s390_return_in_memory (const_tree type, const_tree fundecl ATTRIBUTE_UNUSED)
8c17530e
UW
12095{
12096 /* We accept small integral (and similar) types. */
12097 if (INTEGRAL_TYPE_P (type)
38899e29 12098 || POINTER_TYPE_P (type)
8c17530e
UW
12099 || TREE_CODE (type) == OFFSET_TYPE
12100 || TREE_CODE (type) == REAL_TYPE)
12101 return int_size_in_bytes (type) > 8;
12102
085261c8
AK
12103 /* vector types which fit into a VR. */
12104 if (TARGET_VX_ABI
12105 && VECTOR_TYPE_P (type)
12106 && int_size_in_bytes (type) <= 16)
12107 return false;
12108
8c17530e
UW
12109 /* Aggregates and similar constructs are always returned
12110 in memory. */
12111 if (AGGREGATE_TYPE_P (type)
12112 || TREE_CODE (type) == COMPLEX_TYPE
085261c8 12113 || VECTOR_TYPE_P (type))
8c17530e
UW
12114 return true;
12115
12116 /* ??? We get called on all sorts of random stuff from
12117 aggregate_value_p. We can't abort, but it's not clear
12118 what's safe to return. Pretend it's a struct I guess. */
12119 return true;
12120}
12121
cde0f3fd
PB
12122/* Function arguments and return values are promoted to word size. */
12123
ef4bddc2
RS
12124static machine_mode
12125s390_promote_function_mode (const_tree type, machine_mode mode,
cde0f3fd
PB
12126 int *punsignedp,
12127 const_tree fntype ATTRIBUTE_UNUSED,
12128 int for_return ATTRIBUTE_UNUSED)
12129{
12130 if (INTEGRAL_MODE_P (mode)
9602b6a1 12131 && GET_MODE_SIZE (mode) < UNITS_PER_LONG)
cde0f3fd 12132 {
5e617be8 12133 if (type != NULL_TREE && POINTER_TYPE_P (type))
cde0f3fd
PB
12134 *punsignedp = POINTERS_EXTEND_UNSIGNED;
12135 return Pmode;
12136 }
12137
12138 return mode;
12139}
12140
b46616fd
AK
12141/* Define where to return a (scalar) value of type RET_TYPE.
12142 If RET_TYPE is null, define where to return a (scalar)
8c17530e
UW
12143 value of mode MODE from a libcall. */
12144
b46616fd 12145static rtx
ef4bddc2 12146s390_function_and_libcall_value (machine_mode mode,
b46616fd
AK
12147 const_tree ret_type,
12148 const_tree fntype_or_decl,
12149 bool outgoing ATTRIBUTE_UNUSED)
8c17530e 12150{
085261c8
AK
12151 /* For vector return types it is important to use the RET_TYPE
12152 argument whenever available since the middle-end might have
12153 changed the mode to a scalar mode. */
12154 bool vector_ret_type_p = ((ret_type && VECTOR_TYPE_P (ret_type))
12155 || (!ret_type && VECTOR_MODE_P (mode)));
12156
b46616fd
AK
12157 /* For normal functions perform the promotion as
12158 promote_function_mode would do. */
12159 if (ret_type)
8c17530e 12160 {
b46616fd
AK
12161 int unsignedp = TYPE_UNSIGNED (ret_type);
12162 mode = promote_function_mode (ret_type, mode, &unsignedp,
12163 fntype_or_decl, 1);
8c17530e
UW
12164 }
12165
085261c8
AK
12166 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
12167 || SCALAR_FLOAT_MODE_P (mode)
12168 || (TARGET_VX_ABI && vector_ret_type_p));
12169 gcc_assert (GET_MODE_SIZE (mode) <= (TARGET_VX_ABI ? 16 : 8));
8c17530e 12170
085261c8
AK
12171 if (TARGET_VX_ABI && vector_ret_type_p)
12172 return gen_rtx_REG (mode, FIRST_VEC_ARG_REGNO);
12173 else if (TARGET_HARD_FLOAT && SCALAR_FLOAT_MODE_P (mode))
8c17530e 12174 return gen_rtx_REG (mode, 16);
9602b6a1
AK
12175 else if (GET_MODE_SIZE (mode) <= UNITS_PER_LONG
12176 || UNITS_PER_LONG == UNITS_PER_WORD)
8c17530e 12177 return gen_rtx_REG (mode, 2);
9602b6a1
AK
12178 else if (GET_MODE_SIZE (mode) == 2 * UNITS_PER_LONG)
12179 {
b46616fd
AK
12180 /* This case is triggered when returning a 64 bit value with
12181 -m31 -mzarch. Although the value would fit into a single
12182 register it has to be forced into a 32 bit register pair in
12183 order to match the ABI. */
9602b6a1
AK
12184 rtvec p = rtvec_alloc (2);
12185
12186 RTVEC_ELT (p, 0)
12187 = gen_rtx_EXPR_LIST (SImode, gen_rtx_REG (SImode, 2), const0_rtx);
12188 RTVEC_ELT (p, 1)
12189 = gen_rtx_EXPR_LIST (SImode, gen_rtx_REG (SImode, 3), GEN_INT (4));
12190
12191 return gen_rtx_PARALLEL (mode, p);
12192 }
12193
12194 gcc_unreachable ();
9db1d521
HP
12195}
12196
b46616fd
AK
12197/* Define where to return a scalar return value of type RET_TYPE. */
12198
12199static rtx
12200s390_function_value (const_tree ret_type, const_tree fn_decl_or_type,
12201 bool outgoing)
12202{
12203 return s390_function_and_libcall_value (TYPE_MODE (ret_type), ret_type,
12204 fn_decl_or_type, outgoing);
12205}
12206
12207/* Define where to return a scalar libcall return value of mode
12208 MODE. */
12209
12210static rtx
ef4bddc2 12211s390_libcall_value (machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
b46616fd
AK
12212{
12213 return s390_function_and_libcall_value (mode, NULL_TREE,
12214 NULL_TREE, true);
12215}
12216
9db1d521 12217
994fe660
UW
12218/* Create and return the va_list datatype.
12219
12220 On S/390, va_list is an array type equivalent to
12221
12222 typedef struct __va_list_tag
12223 {
12224 long __gpr;
12225 long __fpr;
12226 void *__overflow_arg_area;
12227 void *__reg_save_area;
994fe660
UW
12228 } va_list[1];
12229
12230 where __gpr and __fpr hold the number of general purpose
12231 or floating point arguments used up to now, respectively,
c7453384 12232 __overflow_arg_area points to the stack location of the
994fe660
UW
12233 next argument passed on the stack, and __reg_save_area
12234 always points to the start of the register area in the
12235 call frame of the current function. The function prologue
12236 saves all registers used for argument passing into this
12237 area if the function uses variable arguments. */
9db1d521 12238
c35d187f
RH
12239static tree
12240s390_build_builtin_va_list (void)
9db1d521
HP
12241{
12242 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
12243
47798692 12244 record = lang_hooks.types.make_type (RECORD_TYPE);
9db1d521
HP
12245
12246 type_decl =
4c4bde29
AH
12247 build_decl (BUILTINS_LOCATION,
12248 TYPE_DECL, get_identifier ("__va_list_tag"), record);
9db1d521 12249
4c4bde29
AH
12250 f_gpr = build_decl (BUILTINS_LOCATION,
12251 FIELD_DECL, get_identifier ("__gpr"),
9db1d521 12252 long_integer_type_node);
4c4bde29
AH
12253 f_fpr = build_decl (BUILTINS_LOCATION,
12254 FIELD_DECL, get_identifier ("__fpr"),
9db1d521 12255 long_integer_type_node);
4c4bde29
AH
12256 f_ovf = build_decl (BUILTINS_LOCATION,
12257 FIELD_DECL, get_identifier ("__overflow_arg_area"),
9db1d521 12258 ptr_type_node);
4c4bde29
AH
12259 f_sav = build_decl (BUILTINS_LOCATION,
12260 FIELD_DECL, get_identifier ("__reg_save_area"),
9db1d521
HP
12261 ptr_type_node);
12262
29a79fcf
UW
12263 va_list_gpr_counter_field = f_gpr;
12264 va_list_fpr_counter_field = f_fpr;
12265
9db1d521
HP
12266 DECL_FIELD_CONTEXT (f_gpr) = record;
12267 DECL_FIELD_CONTEXT (f_fpr) = record;
12268 DECL_FIELD_CONTEXT (f_ovf) = record;
12269 DECL_FIELD_CONTEXT (f_sav) = record;
12270
0fd2eac2 12271 TYPE_STUB_DECL (record) = type_decl;
9db1d521
HP
12272 TYPE_NAME (record) = type_decl;
12273 TYPE_FIELDS (record) = f_gpr;
910ad8de
NF
12274 DECL_CHAIN (f_gpr) = f_fpr;
12275 DECL_CHAIN (f_fpr) = f_ovf;
12276 DECL_CHAIN (f_ovf) = f_sav;
9db1d521
HP
12277
12278 layout_type (record);
12279
12280 /* The correct type is an array type of one element. */
12281 return build_array_type (record, build_index_type (size_zero_node));
12282}
12283
994fe660 12284/* Implement va_start by filling the va_list structure VALIST.
6c535c69
ZW
12285 STDARG_P is always true, and ignored.
12286 NEXTARG points to the first anonymous stack argument.
994fe660 12287
f710504c 12288 The following global variables are used to initialize
994fe660
UW
12289 the va_list structure:
12290
38173d38 12291 crtl->args.info:
994fe660 12292 holds number of gprs and fprs used for named arguments.
38173d38 12293 crtl->args.arg_offset_rtx:
994fe660
UW
12294 holds the offset of the first anonymous stack argument
12295 (relative to the virtual arg pointer). */
9db1d521 12296
d7bd8aeb 12297static void
9c808aad 12298s390_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
9db1d521
HP
12299{
12300 HOST_WIDE_INT n_gpr, n_fpr;
12301 int off;
12302 tree f_gpr, f_fpr, f_ovf, f_sav;
12303 tree gpr, fpr, ovf, sav, t;
12304
12305 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
910ad8de
NF
12306 f_fpr = DECL_CHAIN (f_gpr);
12307 f_ovf = DECL_CHAIN (f_fpr);
12308 f_sav = DECL_CHAIN (f_ovf);
9db1d521 12309
86710a8b 12310 valist = build_simple_mem_ref (valist);
47a25a46
RG
12311 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
12312 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
12313 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
12314 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
9db1d521
HP
12315
12316 /* Count number of gp and fp argument registers used. */
12317
38173d38
JH
12318 n_gpr = crtl->args.info.gprs;
12319 n_fpr = crtl->args.info.fprs;
9db1d521 12320
29a79fcf
UW
12321 if (cfun->va_list_gpr_size)
12322 {
726a989a
RB
12323 t = build2 (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
12324 build_int_cst (NULL_TREE, n_gpr));
29a79fcf
UW
12325 TREE_SIDE_EFFECTS (t) = 1;
12326 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
12327 }
9db1d521 12328
29a79fcf
UW
12329 if (cfun->va_list_fpr_size)
12330 {
726a989a 12331 t = build2 (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
47a25a46 12332 build_int_cst (NULL_TREE, n_fpr));
29a79fcf
UW
12333 TREE_SIDE_EFFECTS (t) = 1;
12334 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
12335 }
9db1d521 12336
4cb4721f
MK
12337 if (flag_split_stack
12338 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
12339 == NULL)
12340 && cfun->machine->split_stack_varargs_pointer == NULL_RTX)
12341 {
12342 rtx reg;
12343 rtx_insn *seq;
12344
12345 reg = gen_reg_rtx (Pmode);
12346 cfun->machine->split_stack_varargs_pointer = reg;
12347
12348 start_sequence ();
12349 emit_move_insn (reg, gen_rtx_REG (Pmode, 1));
12350 seq = get_insns ();
12351 end_sequence ();
12352
12353 push_topmost_sequence ();
12354 emit_insn_after (seq, entry_of_function ());
12355 pop_topmost_sequence ();
12356 }
12357
085261c8
AK
12358 /* Find the overflow area.
12359 FIXME: This currently is too pessimistic when the vector ABI is
12360 enabled. In that case we *always* set up the overflow area
12361 pointer. */
29a79fcf 12362 if (n_gpr + cfun->va_list_gpr_size > GP_ARG_NUM_REG
085261c8
AK
12363 || n_fpr + cfun->va_list_fpr_size > FP_ARG_NUM_REG
12364 || TARGET_VX_ABI)
29a79fcf 12365 {
4cb4721f
MK
12366 if (cfun->machine->split_stack_varargs_pointer == NULL_RTX)
12367 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
12368 else
12369 t = make_tree (TREE_TYPE (ovf), cfun->machine->split_stack_varargs_pointer);
9db1d521 12370
38173d38 12371 off = INTVAL (crtl->args.arg_offset_rtx);
29a79fcf
UW
12372 off = off < 0 ? 0 : off;
12373 if (TARGET_DEBUG_ARG)
12374 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
12375 (int)n_gpr, (int)n_fpr, off);
9db1d521 12376
5d49b6a7 12377 t = fold_build_pointer_plus_hwi (t, off);
9db1d521 12378
726a989a 12379 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
29a79fcf
UW
12380 TREE_SIDE_EFFECTS (t) = 1;
12381 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
12382 }
9db1d521
HP
12383
12384 /* Find the register save area. */
29a79fcf
UW
12385 if ((cfun->va_list_gpr_size && n_gpr < GP_ARG_NUM_REG)
12386 || (cfun->va_list_fpr_size && n_fpr < FP_ARG_NUM_REG))
12387 {
12388 t = make_tree (TREE_TYPE (sav), return_address_pointer_rtx);
5d49b6a7 12389 t = fold_build_pointer_plus_hwi (t, -RETURN_REGNUM * UNITS_PER_LONG);
f4aa3848 12390
726a989a 12391 t = build2 (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
29a79fcf
UW
12392 TREE_SIDE_EFFECTS (t) = 1;
12393 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
12394 }
9db1d521
HP
12395}
12396
c7453384 12397/* Implement va_arg by updating the va_list structure
994fe660 12398 VALIST as required to retrieve an argument of type
c7453384
EC
12399 TYPE, and returning that argument.
12400
994fe660 12401 Generates code equivalent to:
c7453384 12402
9db1d521
HP
12403 if (integral value) {
12404 if (size <= 4 && args.gpr < 5 ||
c7453384 12405 size > 4 && args.gpr < 4 )
9db1d521
HP
12406 ret = args.reg_save_area[args.gpr+8]
12407 else
12408 ret = *args.overflow_arg_area++;
085261c8
AK
12409 } else if (vector value) {
12410 ret = *args.overflow_arg_area;
12411 args.overflow_arg_area += size / 8;
9db1d521
HP
12412 } else if (float value) {
12413 if (args.fgpr < 2)
12414 ret = args.reg_save_area[args.fpr+64]
12415 else
12416 ret = *args.overflow_arg_area++;
12417 } else if (aggregate value) {
12418 if (args.gpr < 5)
12419 ret = *args.reg_save_area[args.gpr]
12420 else
12421 ret = **args.overflow_arg_area++;
12422 } */
12423
ab96de7e 12424static tree
f4aa3848 12425s390_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p,
726a989a 12426 gimple_seq *post_p ATTRIBUTE_UNUSED)
9db1d521
HP
12427{
12428 tree f_gpr, f_fpr, f_ovf, f_sav;
12429 tree gpr, fpr, ovf, sav, reg, t, u;
12430 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
e6b07173 12431 tree lab_false, lab_over = NULL_TREE;
085261c8
AK
12432 tree addr = create_tmp_var (ptr_type_node, "addr");
12433 bool left_align_p; /* How a value < UNITS_PER_LONG is aligned within
12434 a stack slot. */
9db1d521
HP
12435
12436 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
910ad8de
NF
12437 f_fpr = DECL_CHAIN (f_gpr);
12438 f_ovf = DECL_CHAIN (f_fpr);
12439 f_sav = DECL_CHAIN (f_ovf);
9db1d521 12440
47a25a46
RG
12441 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
12442 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
47a25a46 12443 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
9db1d521 12444
726a989a
RB
12445 /* The tree for args* cannot be shared between gpr/fpr and ovf since
12446 both appear on a lhs. */
12447 valist = unshare_expr (valist);
12448 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
12449
9db1d521
HP
12450 size = int_size_in_bytes (type);
12451
45901378
AK
12452 s390_check_type_for_vector_abi (type, true, false);
12453
8cd5a4e0 12454 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
9db1d521
HP
12455 {
12456 if (TARGET_DEBUG_ARG)
12457 {
12458 fprintf (stderr, "va_arg: aggregate type");
12459 debug_tree (type);
12460 }
12461
12462 /* Aggregates are passed by reference. */
12463 indirect_p = 1;
12464 reg = gpr;
12465 n_reg = 1;
ea506297 12466
b3d31392 12467 /* kernel stack layout on 31 bit: It is assumed here that no padding
ea506297
AK
12468 will be added by s390_frame_info because for va_args always an even
12469 number of gprs has to be saved r15-r2 = 14 regs. */
9602b6a1
AK
12470 sav_ofs = 2 * UNITS_PER_LONG;
12471 sav_scale = UNITS_PER_LONG;
12472 size = UNITS_PER_LONG;
29a79fcf 12473 max_reg = GP_ARG_NUM_REG - n_reg;
085261c8
AK
12474 left_align_p = false;
12475 }
12476 else if (s390_function_arg_vector (TYPE_MODE (type), type))
12477 {
12478 if (TARGET_DEBUG_ARG)
12479 {
12480 fprintf (stderr, "va_arg: vector type");
12481 debug_tree (type);
12482 }
12483
12484 indirect_p = 0;
12485 reg = NULL_TREE;
12486 n_reg = 0;
12487 sav_ofs = 0;
12488 sav_scale = 8;
12489 max_reg = 0;
12490 left_align_p = true;
9db1d521 12491 }
82b1c974 12492 else if (s390_function_arg_float (TYPE_MODE (type), type))
9db1d521
HP
12493 {
12494 if (TARGET_DEBUG_ARG)
12495 {
12496 fprintf (stderr, "va_arg: float type");
12497 debug_tree (type);
12498 }
12499
12500 /* FP args go in FP registers, if present. */
12501 indirect_p = 0;
12502 reg = fpr;
12503 n_reg = 1;
9602b6a1 12504 sav_ofs = 16 * UNITS_PER_LONG;
9db1d521 12505 sav_scale = 8;
29a79fcf 12506 max_reg = FP_ARG_NUM_REG - n_reg;
085261c8 12507 left_align_p = false;
9db1d521
HP
12508 }
12509 else
12510 {
12511 if (TARGET_DEBUG_ARG)
12512 {
12513 fprintf (stderr, "va_arg: other type");
12514 debug_tree (type);
12515 }
12516
12517 /* Otherwise into GP registers. */
12518 indirect_p = 0;
12519 reg = gpr;
9602b6a1 12520 n_reg = (size + UNITS_PER_LONG - 1) / UNITS_PER_LONG;
ea506297 12521
b3d31392
AK
12522 /* kernel stack layout on 31 bit: It is assumed here that no padding
12523 will be added by s390_frame_info because for va_args always an even
12524 number of gprs has to be saved r15-r2 = 14 regs. */
9602b6a1 12525 sav_ofs = 2 * UNITS_PER_LONG;
c7453384 12526
9602b6a1
AK
12527 if (size < UNITS_PER_LONG)
12528 sav_ofs += UNITS_PER_LONG - size;
9db1d521 12529
9602b6a1 12530 sav_scale = UNITS_PER_LONG;
29a79fcf 12531 max_reg = GP_ARG_NUM_REG - n_reg;
085261c8 12532 left_align_p = false;
9db1d521
HP
12533 }
12534
12535 /* Pull the value out of the saved registers ... */
12536
085261c8
AK
12537 if (reg != NULL_TREE)
12538 {
12539 /*
12540 if (reg > ((typeof (reg))max_reg))
12541 goto lab_false;
9db1d521 12542
085261c8 12543 addr = sav + sav_ofs + reg * save_scale;
9db1d521 12544
085261c8 12545 goto lab_over;
9db1d521 12546
085261c8
AK
12547 lab_false:
12548 */
12549
12550 lab_false = create_artificial_label (UNKNOWN_LOCATION);
12551 lab_over = create_artificial_label (UNKNOWN_LOCATION);
12552
12553 t = fold_convert (TREE_TYPE (reg), size_int (max_reg));
12554 t = build2 (GT_EXPR, boolean_type_node, reg, t);
12555 u = build1 (GOTO_EXPR, void_type_node, lab_false);
12556 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
12557 gimplify_and_add (t, pre_p);
9db1d521 12558
085261c8
AK
12559 t = fold_build_pointer_plus_hwi (sav, sav_ofs);
12560 u = build2 (MULT_EXPR, TREE_TYPE (reg), reg,
12561 fold_convert (TREE_TYPE (reg), size_int (sav_scale)));
12562 t = fold_build_pointer_plus (t, u);
9db1d521 12563
085261c8 12564 gimplify_assign (addr, t, pre_p);
9db1d521 12565
085261c8
AK
12566 gimple_seq_add_stmt (pre_p, gimple_build_goto (lab_over));
12567
12568 gimple_seq_add_stmt (pre_p, gimple_build_label (lab_false));
12569 }
9db1d521
HP
12570
12571 /* ... Otherwise out of the overflow area. */
12572
ab96de7e 12573 t = ovf;
085261c8 12574 if (size < UNITS_PER_LONG && !left_align_p)
5d49b6a7 12575 t = fold_build_pointer_plus_hwi (t, UNITS_PER_LONG - size);
ab96de7e
AS
12576
12577 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
12578
726a989a 12579 gimplify_assign (addr, t, pre_p);
ab96de7e 12580
085261c8
AK
12581 if (size < UNITS_PER_LONG && left_align_p)
12582 t = fold_build_pointer_plus_hwi (t, UNITS_PER_LONG);
12583 else
12584 t = fold_build_pointer_plus_hwi (t, size);
12585
726a989a 12586 gimplify_assign (ovf, t, pre_p);
ab96de7e 12587
085261c8
AK
12588 if (reg != NULL_TREE)
12589 gimple_seq_add_stmt (pre_p, gimple_build_label (lab_over));
ab96de7e
AS
12590
12591
12592 /* Increment register save count. */
12593
085261c8
AK
12594 if (n_reg > 0)
12595 {
12596 u = build2 (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
12597 fold_convert (TREE_TYPE (reg), size_int (n_reg)));
12598 gimplify_and_add (u, pre_p);
12599 }
ab96de7e
AS
12600
12601 if (indirect_p)
12602 {
5b21f0f3
RG
12603 t = build_pointer_type_for_mode (build_pointer_type (type),
12604 ptr_mode, true);
ab96de7e
AS
12605 addr = fold_convert (t, addr);
12606 addr = build_va_arg_indirect_ref (addr);
12607 }
12608 else
12609 {
5b21f0f3 12610 t = build_pointer_type_for_mode (type, ptr_mode, true);
ab96de7e
AS
12611 addr = fold_convert (t, addr);
12612 }
12613
12614 return build_va_arg_indirect_ref (addr);
12615}
12616
5a3fe9b6
AK
12617/* Emit rtl for the tbegin or tbegin_retry (RETRY != NULL_RTX)
12618 expanders.
12619 DEST - Register location where CC will be stored.
12620 TDB - Pointer to a 256 byte area where to store the transaction.
12621 diagnostic block. NULL if TDB is not needed.
12622 RETRY - Retry count value. If non-NULL a retry loop for CC2
12623 is emitted
12624 CLOBBER_FPRS_P - If true clobbers for all FPRs are emitted as part
12625 of the tbegin instruction pattern. */
12626
12627void
12628s390_expand_tbegin (rtx dest, rtx tdb, rtx retry, bool clobber_fprs_p)
12629{
2561451d 12630 rtx retry_plus_two = gen_reg_rtx (SImode);
5a3fe9b6 12631 rtx retry_reg = gen_reg_rtx (SImode);
19f8b229 12632 rtx_code_label *retry_label = NULL;
5a3fe9b6
AK
12633
12634 if (retry != NULL_RTX)
12635 {
12636 emit_move_insn (retry_reg, retry);
2561451d
AK
12637 emit_insn (gen_addsi3 (retry_plus_two, retry_reg, const2_rtx));
12638 emit_insn (gen_addsi3 (retry_reg, retry_reg, const1_rtx));
5a3fe9b6
AK
12639 retry_label = gen_label_rtx ();
12640 emit_label (retry_label);
12641 }
12642
12643 if (clobber_fprs_p)
c914ac45
AK
12644 {
12645 if (TARGET_VX)
12646 emit_insn (gen_tbegin_1_z13 (gen_rtx_CONST_INT (VOIDmode, TBEGIN_MASK),
12647 tdb));
12648 else
12649 emit_insn (gen_tbegin_1 (gen_rtx_CONST_INT (VOIDmode, TBEGIN_MASK),
12650 tdb));
12651 }
5a3fe9b6 12652 else
2561451d
AK
12653 emit_insn (gen_tbegin_nofloat_1 (gen_rtx_CONST_INT (VOIDmode, TBEGIN_MASK),
12654 tdb));
5a3fe9b6 12655
2561451d
AK
12656 emit_move_insn (dest, gen_rtx_UNSPEC (SImode,
12657 gen_rtvec (1, gen_rtx_REG (CCRAWmode,
12658 CC_REGNUM)),
12659 UNSPEC_CC_TO_INT));
5a3fe9b6
AK
12660 if (retry != NULL_RTX)
12661 {
86464cbd
AK
12662 const int CC0 = 1 << 3;
12663 const int CC1 = 1 << 2;
12664 const int CC3 = 1 << 0;
12665 rtx jump;
5a3fe9b6 12666 rtx count = gen_reg_rtx (SImode);
775c43d3 12667 rtx_code_label *leave_label = gen_label_rtx ();
86464cbd
AK
12668
12669 /* Exit for success and permanent failures. */
5a3fe9b6
AK
12670 jump = s390_emit_jump (leave_label,
12671 gen_rtx_EQ (VOIDmode,
12672 gen_rtx_REG (CCRAWmode, CC_REGNUM),
86464cbd
AK
12673 gen_rtx_CONST_INT (VOIDmode, CC0 | CC1 | CC3)));
12674 LABEL_NUSES (leave_label) = 1;
5a3fe9b6
AK
12675
12676 /* CC2 - transient failure. Perform retry with ppa. */
2561451d 12677 emit_move_insn (count, retry_plus_two);
5a3fe9b6
AK
12678 emit_insn (gen_subsi3 (count, count, retry_reg));
12679 emit_insn (gen_tx_assist (count));
12680 jump = emit_jump_insn (gen_doloop_si64 (retry_label,
12681 retry_reg,
12682 retry_reg));
12683 JUMP_LABEL (jump) = retry_label;
12684 LABEL_NUSES (retry_label) = 1;
86464cbd 12685 emit_label (leave_label);
5a3fe9b6 12686 }
5a3fe9b6
AK
12687}
12688
5a3fe9b6 12689
9b80b7bc
AK
12690/* Return the decl for the target specific builtin with the function
12691 code FCODE. */
12692
12693static tree
12694s390_builtin_decl (unsigned fcode, bool initialized_p ATTRIBUTE_UNUSED)
12695{
12696 if (fcode >= S390_BUILTIN_MAX)
12697 return error_mark_node;
12698
12699 return s390_builtin_decls[fcode];
12700}
12701
d56a43a0
AK
12702/* We call mcount before the function prologue. So a profiled leaf
12703 function should stay a leaf function. */
12704
12705static bool
12706s390_keep_leaf_when_profiled ()
12707{
12708 return true;
12709}
5a3fe9b6 12710
ab96de7e
AS
12711/* Output assembly code for the trampoline template to
12712 stdio stream FILE.
12713
12714 On S/390, we use gpr 1 internally in the trampoline code;
12715 gpr 0 is used to hold the static chain. */
12716
b81ecf6f
RH
12717static void
12718s390_asm_trampoline_template (FILE *file)
ab96de7e
AS
12719{
12720 rtx op[2];
12721 op[0] = gen_rtx_REG (Pmode, 0);
12722 op[1] = gen_rtx_REG (Pmode, 1);
12723
12724 if (TARGET_64BIT)
12725 {
cab78b15
AK
12726 output_asm_insn ("basr\t%1,0", op); /* 2 byte */
12727 output_asm_insn ("lmg\t%0,%1,14(%1)", op); /* 6 byte */
12728 output_asm_insn ("br\t%1", op); /* 2 byte */
ab96de7e
AS
12729 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 10));
12730 }
12731 else
12732 {
cab78b15
AK
12733 output_asm_insn ("basr\t%1,0", op); /* 2 byte */
12734 output_asm_insn ("lm\t%0,%1,6(%1)", op); /* 4 byte */
12735 output_asm_insn ("br\t%1", op); /* 2 byte */
ab96de7e
AS
12736 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 8));
12737 }
12738}
12739
12740/* Emit RTL insns to initialize the variable parts of a trampoline.
12741 FNADDR is an RTX for the address of the function's pure code.
12742 CXT is an RTX for the static chain value for the function. */
12743
b81ecf6f
RH
12744static void
12745s390_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
ab96de7e 12746{
b81ecf6f
RH
12747 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
12748 rtx mem;
3a892e44 12749
b81ecf6f 12750 emit_block_move (m_tramp, assemble_trampoline_template (),
cab78b15 12751 GEN_INT (2 * UNITS_PER_LONG), BLOCK_OP_NORMAL);
b81ecf6f 12752
cab78b15 12753 mem = adjust_address (m_tramp, Pmode, 2 * UNITS_PER_LONG);
b81ecf6f 12754 emit_move_insn (mem, cxt);
cab78b15 12755 mem = adjust_address (m_tramp, Pmode, 3 * UNITS_PER_LONG);
b81ecf6f 12756 emit_move_insn (mem, fnaddr);
ab96de7e
AS
12757}
12758
ab96de7e
AS
12759/* Output assembler code to FILE to increment profiler label # LABELNO
12760 for profiling a function entry. */
12761
12762void
12763s390_function_profiler (FILE *file, int labelno)
12764{
12765 rtx op[7];
12766
12767 char label[128];
12768 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
12769
12770 fprintf (file, "# function profiler \n");
12771
12772 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
12773 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
0a81f074 12774 op[1] = gen_rtx_MEM (Pmode, plus_constant (Pmode, op[1], UNITS_PER_LONG));
ab96de7e
AS
12775
12776 op[2] = gen_rtx_REG (Pmode, 1);
12777 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
12778 SYMBOL_REF_FLAGS (op[3]) = SYMBOL_FLAG_LOCAL;
12779
12780 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
12781 if (flag_pic)
12782 {
12783 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), UNSPEC_PLT);
12784 op[4] = gen_rtx_CONST (Pmode, op[4]);
12785 }
12786
12787 if (TARGET_64BIT)
12788 {
12789 output_asm_insn ("stg\t%0,%1", op);
12790 output_asm_insn ("larl\t%2,%3", op);
12791 output_asm_insn ("brasl\t%0,%4", op);
12792 output_asm_insn ("lg\t%0,%1", op);
12793 }
7e2507a5
MK
12794 else if (TARGET_CPU_ZARCH)
12795 {
12796 output_asm_insn ("st\t%0,%1", op);
12797 output_asm_insn ("larl\t%2,%3", op);
12798 output_asm_insn ("brasl\t%0,%4", op);
12799 output_asm_insn ("l\t%0,%1", op);
12800 }
ab96de7e
AS
12801 else if (!flag_pic)
12802 {
12803 op[6] = gen_label_rtx ();
12804
12805 output_asm_insn ("st\t%0,%1", op);
12806 output_asm_insn ("bras\t%2,%l6", op);
12807 output_asm_insn (".long\t%4", op);
12808 output_asm_insn (".long\t%3", op);
12809 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
12810 output_asm_insn ("l\t%0,0(%2)", op);
12811 output_asm_insn ("l\t%2,4(%2)", op);
12812 output_asm_insn ("basr\t%0,%0", op);
12813 output_asm_insn ("l\t%0,%1", op);
12814 }
12815 else
12816 {
12817 op[5] = gen_label_rtx ();
12818 op[6] = gen_label_rtx ();
12819
12820 output_asm_insn ("st\t%0,%1", op);
12821 output_asm_insn ("bras\t%2,%l6", op);
12822 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[5]));
12823 output_asm_insn (".long\t%4-%l5", op);
12824 output_asm_insn (".long\t%3-%l5", op);
12825 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
12826 output_asm_insn ("lr\t%0,%2", op);
12827 output_asm_insn ("a\t%0,0(%2)", op);
12828 output_asm_insn ("a\t%2,4(%2)", op);
12829 output_asm_insn ("basr\t%0,%0", op);
12830 output_asm_insn ("l\t%0,%1", op);
12831 }
12832}
12833
12834/* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
12835 into its SYMBOL_REF_FLAGS. */
12836
12837static void
12838s390_encode_section_info (tree decl, rtx rtl, int first)
12839{
12840 default_encode_section_info (decl, rtl, first);
12841
963fc8d0
AK
12842 if (TREE_CODE (decl) == VAR_DECL)
12843 {
e63d44c2
RD
12844 /* Store the alignment to be able to check if we can use
12845 a larl/load-relative instruction. We only handle the cases
54158a1a 12846 that can go wrong (i.e. no FUNC_DECLs). */
1397e163 12847 if (DECL_ALIGN (decl) == 0 || DECL_ALIGN (decl) % 16)
e63d44c2 12848 SYMBOL_FLAG_SET_NOTALIGN2 (XEXP (rtl, 0));
54158a1a
AK
12849 else if (DECL_ALIGN (decl) % 32)
12850 SYMBOL_FLAG_SET_NOTALIGN4 (XEXP (rtl, 0));
12851 else if (DECL_ALIGN (decl) % 64)
12852 SYMBOL_FLAG_SET_NOTALIGN8 (XEXP (rtl, 0));
963fc8d0
AK
12853 }
12854
12855 /* Literal pool references don't have a decl so they are handled
12856 differently here. We rely on the information in the MEM_ALIGN
e63d44c2 12857 entry to decide upon the alignment. */
963fc8d0
AK
12858 if (MEM_P (rtl)
12859 && GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF
54158a1a 12860 && TREE_CONSTANT_POOL_ADDRESS_P (XEXP (rtl, 0)))
e63d44c2 12861 {
1397e163 12862 if (MEM_ALIGN (rtl) == 0 || MEM_ALIGN (rtl) % 16)
e63d44c2 12863 SYMBOL_FLAG_SET_NOTALIGN2 (XEXP (rtl, 0));
54158a1a
AK
12864 else if (MEM_ALIGN (rtl) % 32)
12865 SYMBOL_FLAG_SET_NOTALIGN4 (XEXP (rtl, 0));
12866 else if (MEM_ALIGN (rtl) % 64)
12867 SYMBOL_FLAG_SET_NOTALIGN8 (XEXP (rtl, 0));
e63d44c2 12868 }
ab96de7e
AS
12869}
12870
12871/* Output thunk to FILE that implements a C++ virtual function call (with
12872 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
12873 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
12874 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
12875 relative to the resulting this pointer. */
12876
12877static void
12878s390_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
12879 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
12880 tree function)
12881{
12882 rtx op[10];
12883 int nonlocal = 0;
12884
81ef7e24
JJ
12885 /* Make sure unwind info is emitted for the thunk if needed. */
12886 final_start_function (emit_barrier (), file, 1);
12887
ab96de7e
AS
12888 /* Operand 0 is the target function. */
12889 op[0] = XEXP (DECL_RTL (function), 0);
12890 if (flag_pic && !SYMBOL_REF_LOCAL_P (op[0]))
12891 {
12892 nonlocal = 1;
12893 op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]),
12894 TARGET_64BIT ? UNSPEC_PLT : UNSPEC_GOT);
12895 op[0] = gen_rtx_CONST (Pmode, op[0]);
12896 }
12897
12898 /* Operand 1 is the 'this' pointer. */
12899 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
12900 op[1] = gen_rtx_REG (Pmode, 3);
12901 else
12902 op[1] = gen_rtx_REG (Pmode, 2);
12903
12904 /* Operand 2 is the delta. */
12905 op[2] = GEN_INT (delta);
12906
12907 /* Operand 3 is the vcall_offset. */
12908 op[3] = GEN_INT (vcall_offset);
12909
12910 /* Operand 4 is the temporary register. */
12911 op[4] = gen_rtx_REG (Pmode, 1);
12912
12913 /* Operands 5 to 8 can be used as labels. */
12914 op[5] = NULL_RTX;
12915 op[6] = NULL_RTX;
12916 op[7] = NULL_RTX;
12917 op[8] = NULL_RTX;
12918
12919 /* Operand 9 can be used for temporary register. */
12920 op[9] = NULL_RTX;
12921
12922 /* Generate code. */
12923 if (TARGET_64BIT)
12924 {
12925 /* Setup literal pool pointer if required. */
12926 if ((!DISP_IN_RANGE (delta)
ec24698e
UW
12927 && !CONST_OK_FOR_K (delta)
12928 && !CONST_OK_FOR_Os (delta))
ab96de7e 12929 || (!DISP_IN_RANGE (vcall_offset)
ec24698e
UW
12930 && !CONST_OK_FOR_K (vcall_offset)
12931 && !CONST_OK_FOR_Os (vcall_offset)))
ab96de7e
AS
12932 {
12933 op[5] = gen_label_rtx ();
12934 output_asm_insn ("larl\t%4,%5", op);
12935 }
12936
12937 /* Add DELTA to this pointer. */
12938 if (delta)
12939 {
b5c67a49 12940 if (CONST_OK_FOR_J (delta))
ab96de7e
AS
12941 output_asm_insn ("la\t%1,%2(%1)", op);
12942 else if (DISP_IN_RANGE (delta))
12943 output_asm_insn ("lay\t%1,%2(%1)", op);
b5c67a49 12944 else if (CONST_OK_FOR_K (delta))
ab96de7e 12945 output_asm_insn ("aghi\t%1,%2", op);
ec24698e
UW
12946 else if (CONST_OK_FOR_Os (delta))
12947 output_asm_insn ("agfi\t%1,%2", op);
ab96de7e
AS
12948 else
12949 {
12950 op[6] = gen_label_rtx ();
12951 output_asm_insn ("agf\t%1,%6-%5(%4)", op);
12952 }
12953 }
12954
12955 /* Perform vcall adjustment. */
12956 if (vcall_offset)
12957 {
12958 if (DISP_IN_RANGE (vcall_offset))
12959 {
12960 output_asm_insn ("lg\t%4,0(%1)", op);
12961 output_asm_insn ("ag\t%1,%3(%4)", op);
12962 }
b5c67a49 12963 else if (CONST_OK_FOR_K (vcall_offset))
ab96de7e
AS
12964 {
12965 output_asm_insn ("lghi\t%4,%3", op);
12966 output_asm_insn ("ag\t%4,0(%1)", op);
12967 output_asm_insn ("ag\t%1,0(%4)", op);
12968 }
ec24698e
UW
12969 else if (CONST_OK_FOR_Os (vcall_offset))
12970 {
12971 output_asm_insn ("lgfi\t%4,%3", op);
12972 output_asm_insn ("ag\t%4,0(%1)", op);
12973 output_asm_insn ("ag\t%1,0(%4)", op);
12974 }
ab96de7e
AS
12975 else
12976 {
12977 op[7] = gen_label_rtx ();
12978 output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
12979 output_asm_insn ("ag\t%4,0(%1)", op);
12980 output_asm_insn ("ag\t%1,0(%4)", op);
12981 }
12982 }
12983
12984 /* Jump to target. */
12985 output_asm_insn ("jg\t%0", op);
12986
12987 /* Output literal pool if required. */
12988 if (op[5])
12989 {
12990 output_asm_insn (".align\t4", op);
12991 targetm.asm_out.internal_label (file, "L",
12992 CODE_LABEL_NUMBER (op[5]));
12993 }
12994 if (op[6])
12995 {
12996 targetm.asm_out.internal_label (file, "L",
12997 CODE_LABEL_NUMBER (op[6]));
12998 output_asm_insn (".long\t%2", op);
12999 }
13000 if (op[7])
13001 {
13002 targetm.asm_out.internal_label (file, "L",
13003 CODE_LABEL_NUMBER (op[7]));
13004 output_asm_insn (".long\t%3", op);
13005 }
13006 }
13007 else
13008 {
13009 /* Setup base pointer if required. */
13010 if (!vcall_offset
13011 || (!DISP_IN_RANGE (delta)
ec24698e
UW
13012 && !CONST_OK_FOR_K (delta)
13013 && !CONST_OK_FOR_Os (delta))
ab96de7e 13014 || (!DISP_IN_RANGE (delta)
ec24698e
UW
13015 && !CONST_OK_FOR_K (vcall_offset)
13016 && !CONST_OK_FOR_Os (vcall_offset)))
ab96de7e
AS
13017 {
13018 op[5] = gen_label_rtx ();
13019 output_asm_insn ("basr\t%4,0", op);
13020 targetm.asm_out.internal_label (file, "L",
13021 CODE_LABEL_NUMBER (op[5]));
13022 }
13023
13024 /* Add DELTA to this pointer. */
13025 if (delta)
13026 {
b5c67a49 13027 if (CONST_OK_FOR_J (delta))
ab96de7e
AS
13028 output_asm_insn ("la\t%1,%2(%1)", op);
13029 else if (DISP_IN_RANGE (delta))
13030 output_asm_insn ("lay\t%1,%2(%1)", op);
b5c67a49 13031 else if (CONST_OK_FOR_K (delta))
ab96de7e 13032 output_asm_insn ("ahi\t%1,%2", op);
ec24698e
UW
13033 else if (CONST_OK_FOR_Os (delta))
13034 output_asm_insn ("afi\t%1,%2", op);
ab96de7e
AS
13035 else
13036 {
13037 op[6] = gen_label_rtx ();
13038 output_asm_insn ("a\t%1,%6-%5(%4)", op);
13039 }
13040 }
13041
13042 /* Perform vcall adjustment. */
13043 if (vcall_offset)
13044 {
b5c67a49 13045 if (CONST_OK_FOR_J (vcall_offset))
ab96de7e 13046 {
c4d50129 13047 output_asm_insn ("l\t%4,0(%1)", op);
ab96de7e
AS
13048 output_asm_insn ("a\t%1,%3(%4)", op);
13049 }
13050 else if (DISP_IN_RANGE (vcall_offset))
13051 {
c4d50129 13052 output_asm_insn ("l\t%4,0(%1)", op);
ab96de7e
AS
13053 output_asm_insn ("ay\t%1,%3(%4)", op);
13054 }
b5c67a49 13055 else if (CONST_OK_FOR_K (vcall_offset))
ab96de7e
AS
13056 {
13057 output_asm_insn ("lhi\t%4,%3", op);
13058 output_asm_insn ("a\t%4,0(%1)", op);
13059 output_asm_insn ("a\t%1,0(%4)", op);
13060 }
ec24698e
UW
13061 else if (CONST_OK_FOR_Os (vcall_offset))
13062 {
13063 output_asm_insn ("iilf\t%4,%3", op);
13064 output_asm_insn ("a\t%4,0(%1)", op);
13065 output_asm_insn ("a\t%1,0(%4)", op);
13066 }
ab96de7e
AS
13067 else
13068 {
13069 op[7] = gen_label_rtx ();
13070 output_asm_insn ("l\t%4,%7-%5(%4)", op);
13071 output_asm_insn ("a\t%4,0(%1)", op);
13072 output_asm_insn ("a\t%1,0(%4)", op);
13073 }
9db1d521 13074
ab96de7e
AS
13075 /* We had to clobber the base pointer register.
13076 Re-setup the base pointer (with a different base). */
13077 op[5] = gen_label_rtx ();
13078 output_asm_insn ("basr\t%4,0", op);
13079 targetm.asm_out.internal_label (file, "L",
13080 CODE_LABEL_NUMBER (op[5]));
13081 }
9db1d521 13082
ab96de7e
AS
13083 /* Jump to target. */
13084 op[8] = gen_label_rtx ();
9db1d521 13085
ab96de7e
AS
13086 if (!flag_pic)
13087 output_asm_insn ("l\t%4,%8-%5(%4)", op);
13088 else if (!nonlocal)
13089 output_asm_insn ("a\t%4,%8-%5(%4)", op);
13090 /* We cannot call through .plt, since .plt requires %r12 loaded. */
13091 else if (flag_pic == 1)
13092 {
13093 output_asm_insn ("a\t%4,%8-%5(%4)", op);
13094 output_asm_insn ("l\t%4,%0(%4)", op);
13095 }
13096 else if (flag_pic == 2)
13097 {
13098 op[9] = gen_rtx_REG (Pmode, 0);
13099 output_asm_insn ("l\t%9,%8-4-%5(%4)", op);
13100 output_asm_insn ("a\t%4,%8-%5(%4)", op);
13101 output_asm_insn ("ar\t%4,%9", op);
13102 output_asm_insn ("l\t%4,0(%4)", op);
13103 }
9db1d521 13104
ab96de7e 13105 output_asm_insn ("br\t%4", op);
9db1d521 13106
ab96de7e
AS
13107 /* Output literal pool. */
13108 output_asm_insn (".align\t4", op);
9db1d521 13109
ab96de7e
AS
13110 if (nonlocal && flag_pic == 2)
13111 output_asm_insn (".long\t%0", op);
13112 if (nonlocal)
13113 {
13114 op[0] = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
13115 SYMBOL_REF_FLAGS (op[0]) = SYMBOL_FLAG_LOCAL;
13116 }
63694b5e 13117
ab96de7e
AS
13118 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[8]));
13119 if (!flag_pic)
13120 output_asm_insn (".long\t%0", op);
13121 else
13122 output_asm_insn (".long\t%0-%5", op);
9db1d521 13123
ab96de7e
AS
13124 if (op[6])
13125 {
13126 targetm.asm_out.internal_label (file, "L",
13127 CODE_LABEL_NUMBER (op[6]));
13128 output_asm_insn (".long\t%2", op);
13129 }
13130 if (op[7])
13131 {
13132 targetm.asm_out.internal_label (file, "L",
13133 CODE_LABEL_NUMBER (op[7]));
13134 output_asm_insn (".long\t%3", op);
13135 }
9db1d521 13136 }
81ef7e24 13137 final_end_function ();
9db1d521
HP
13138}
13139
ab96de7e 13140static bool
095a2d76 13141s390_valid_pointer_mode (scalar_int_mode mode)
ab96de7e
AS
13142{
13143 return (mode == SImode || (TARGET_64BIT && mode == DImode));
13144}
994fe660 13145
9a6f71b4 13146/* Checks whether the given CALL_EXPR would use a caller
ab96de7e
AS
13147 saved register. This is used to decide whether sibling call
13148 optimization could be performed on the respective function
13149 call. */
fd3cd001 13150
ab96de7e 13151static bool
9a6f71b4 13152s390_call_saved_register_used (tree call_expr)
fd3cd001 13153{
d5cc9181
JR
13154 CUMULATIVE_ARGS cum_v;
13155 cumulative_args_t cum;
ab96de7e 13156 tree parameter;
ef4bddc2 13157 machine_mode mode;
ab96de7e
AS
13158 tree type;
13159 rtx parm_rtx;
9a6f71b4 13160 int reg, i;
fd3cd001 13161
d5cc9181
JR
13162 INIT_CUMULATIVE_ARGS (cum_v, NULL, NULL, 0, 0);
13163 cum = pack_cumulative_args (&cum_v);
fd3cd001 13164
9a6f71b4 13165 for (i = 0; i < call_expr_nargs (call_expr); i++)
ab96de7e 13166 {
9a6f71b4 13167 parameter = CALL_EXPR_ARG (call_expr, i);
8d933e31 13168 gcc_assert (parameter);
fd3cd001 13169
ab96de7e
AS
13170 /* For an undeclared variable passed as parameter we will get
13171 an ERROR_MARK node here. */
13172 if (TREE_CODE (parameter) == ERROR_MARK)
13173 return true;
fd3cd001 13174
8d933e31
AS
13175 type = TREE_TYPE (parameter);
13176 gcc_assert (type);
fd3cd001 13177
8d933e31
AS
13178 mode = TYPE_MODE (type);
13179 gcc_assert (mode);
fd3cd001 13180
085261c8
AK
13181 /* We assume that in the target function all parameters are
13182 named. This only has an impact on vector argument register
13183 usage none of which is call-saved. */
d5cc9181 13184 if (pass_by_reference (&cum_v, mode, type, true))
ab96de7e
AS
13185 {
13186 mode = Pmode;
13187 type = build_pointer_type (type);
13188 }
fd3cd001 13189
085261c8 13190 parm_rtx = s390_function_arg (cum, mode, type, true);
fd3cd001 13191
085261c8 13192 s390_function_arg_advance (cum, mode, type, true);
fd3cd001 13193
9602b6a1
AK
13194 if (!parm_rtx)
13195 continue;
13196
13197 if (REG_P (parm_rtx))
13198 {
e8de8fea
AK
13199 for (reg = 0;
13200 reg < HARD_REGNO_NREGS (REGNO (parm_rtx), GET_MODE (parm_rtx));
13201 reg++)
9602b6a1
AK
13202 if (!call_used_regs[reg + REGNO (parm_rtx)])
13203 return true;
13204 }
13205
13206 if (GET_CODE (parm_rtx) == PARALLEL)
ab96de7e 13207 {
9602b6a1 13208 int i;
e8de8fea 13209
9602b6a1
AK
13210 for (i = 0; i < XVECLEN (parm_rtx, 0); i++)
13211 {
13212 rtx r = XEXP (XVECEXP (parm_rtx, 0, i), 0);
9602b6a1
AK
13213
13214 gcc_assert (REG_P (r));
13215
e8de8fea
AK
13216 for (reg = 0;
13217 reg < HARD_REGNO_NREGS (REGNO (r), GET_MODE (r));
13218 reg++)
9602b6a1
AK
13219 if (!call_used_regs[reg + REGNO (r)])
13220 return true;
13221 }
ab96de7e 13222 }
9602b6a1 13223
ab96de7e
AS
13224 }
13225 return false;
13226}
fd3cd001 13227
ab96de7e
AS
13228/* Return true if the given call expression can be
13229 turned into a sibling call.
13230 DECL holds the declaration of the function to be called whereas
13231 EXP is the call expression itself. */
fd3cd001 13232
ab96de7e
AS
13233static bool
13234s390_function_ok_for_sibcall (tree decl, tree exp)
13235{
13236 /* The TPF epilogue uses register 1. */
13237 if (TARGET_TPF_PROFILING)
13238 return false;
fd3cd001 13239
ab96de7e
AS
13240 /* The 31 bit PLT code uses register 12 (GOT pointer - caller saved)
13241 which would have to be restored before the sibcall. */
7691ec4e 13242 if (!TARGET_64BIT && flag_pic && decl && !targetm.binds_local_p (decl))
ab96de7e 13243 return false;
fd3cd001 13244
ab96de7e
AS
13245 /* Register 6 on s390 is available as an argument register but unfortunately
13246 "caller saved". This makes functions needing this register for arguments
13247 not suitable for sibcalls. */
9a6f71b4 13248 return !s390_call_saved_register_used (exp);
ab96de7e 13249}
fd3cd001 13250
ab96de7e 13251/* Return the fixed registers used for condition codes. */
fd3cd001 13252
ab96de7e
AS
13253static bool
13254s390_fixed_condition_code_regs (unsigned int *p1, unsigned int *p2)
13255{
13256 *p1 = CC_REGNUM;
13257 *p2 = INVALID_REGNUM;
f4aa3848 13258
ab96de7e
AS
13259 return true;
13260}
fd3cd001 13261
ab96de7e
AS
13262/* This function is used by the call expanders of the machine description.
13263 It emits the call insn itself together with the necessary operations
13264 to adjust the target address and returns the emitted insn.
13265 ADDR_LOCATION is the target address rtx
13266 TLS_CALL the location of the thread-local symbol
13267 RESULT_REG the register where the result of the call should be stored
13268 RETADDR_REG the register where the return address should be stored
13269 If this parameter is NULL_RTX the call is considered
13270 to be a sibling call. */
fd3cd001 13271
775c43d3 13272rtx_insn *
ab96de7e
AS
13273s390_emit_call (rtx addr_location, rtx tls_call, rtx result_reg,
13274 rtx retaddr_reg)
9db1d521 13275{
ab96de7e 13276 bool plt_call = false;
775c43d3 13277 rtx_insn *insn;
ab96de7e
AS
13278 rtx call;
13279 rtx clobber;
13280 rtvec vec;
cadc42db 13281
ab96de7e
AS
13282 /* Direct function calls need special treatment. */
13283 if (GET_CODE (addr_location) == SYMBOL_REF)
9db1d521 13284 {
ab96de7e
AS
13285 /* When calling a global routine in PIC mode, we must
13286 replace the symbol itself with the PLT stub. */
13287 if (flag_pic && !SYMBOL_REF_LOCAL_P (addr_location))
13288 {
15288e9a 13289 if (TARGET_64BIT || retaddr_reg != NULL_RTX)
72e2cf16
AK
13290 {
13291 addr_location = gen_rtx_UNSPEC (Pmode,
13292 gen_rtvec (1, addr_location),
13293 UNSPEC_PLT);
13294 addr_location = gen_rtx_CONST (Pmode, addr_location);
13295 plt_call = true;
13296 }
13297 else
13298 /* For -fpic code the PLT entries might use r12 which is
13299 call-saved. Therefore we cannot do a sibcall when
13300 calling directly using a symbol ref. When reaching
13301 this point we decided (in s390_function_ok_for_sibcall)
13302 to do a sibcall for a function pointer but one of the
13303 optimizers was able to get rid of the function pointer
13304 by propagating the symbol ref into the call. This
13305 optimization is illegal for S/390 so we turn the direct
13306 call into a indirect call again. */
13307 addr_location = force_reg (Pmode, addr_location);
ab96de7e
AS
13308 }
13309
13310 /* Unless we can use the bras(l) insn, force the
13311 routine address into a register. */
13312 if (!TARGET_SMALL_EXEC && !TARGET_CPU_ZARCH)
13313 {
13314 if (flag_pic)
13315 addr_location = legitimize_pic_address (addr_location, 0);
13316 else
13317 addr_location = force_reg (Pmode, addr_location);
13318 }
9db1d521 13319 }
ab96de7e
AS
13320
13321 /* If it is already an indirect call or the code above moved the
13322 SYMBOL_REF to somewhere else make sure the address can be found in
13323 register 1. */
13324 if (retaddr_reg == NULL_RTX
13325 && GET_CODE (addr_location) != SYMBOL_REF
13326 && !plt_call)
9db1d521 13327 {
ab96de7e
AS
13328 emit_move_insn (gen_rtx_REG (Pmode, SIBCALL_REGNUM), addr_location);
13329 addr_location = gen_rtx_REG (Pmode, SIBCALL_REGNUM);
9db1d521 13330 }
9db1d521 13331
ab96de7e
AS
13332 addr_location = gen_rtx_MEM (QImode, addr_location);
13333 call = gen_rtx_CALL (VOIDmode, addr_location, const0_rtx);
4023fb28 13334
ab96de7e 13335 if (result_reg != NULL_RTX)
f7df4a84 13336 call = gen_rtx_SET (result_reg, call);
4023fb28 13337
ab96de7e
AS
13338 if (retaddr_reg != NULL_RTX)
13339 {
13340 clobber = gen_rtx_CLOBBER (VOIDmode, retaddr_reg);
c7453384 13341
ab96de7e
AS
13342 if (tls_call != NULL_RTX)
13343 vec = gen_rtvec (3, call, clobber,
13344 gen_rtx_USE (VOIDmode, tls_call));
13345 else
13346 vec = gen_rtvec (2, call, clobber);
4023fb28 13347
ab96de7e
AS
13348 call = gen_rtx_PARALLEL (VOIDmode, vec);
13349 }
4023fb28 13350
ab96de7e 13351 insn = emit_call_insn (call);
4023fb28 13352
ab96de7e
AS
13353 /* 31-bit PLT stubs and tls calls use the GOT register implicitly. */
13354 if ((!TARGET_64BIT && plt_call) || tls_call != NULL_RTX)
13355 {
13356 /* s390_function_ok_for_sibcall should
13357 have denied sibcalls in this case. */
8d933e31 13358 gcc_assert (retaddr_reg != NULL_RTX);
68c0ef75 13359 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), gen_rtx_REG (Pmode, 12));
ab96de7e
AS
13360 }
13361 return insn;
13362}
4023fb28 13363
5efd84c5 13364/* Implement TARGET_CONDITIONAL_REGISTER_USAGE. */
4023fb28 13365
5efd84c5 13366static void
ab96de7e
AS
13367s390_conditional_register_usage (void)
13368{
13369 int i;
4023fb28 13370
4023fb28
UW
13371 if (flag_pic)
13372 {
ab96de7e
AS
13373 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
13374 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
4023fb28 13375 }
ab96de7e 13376 if (TARGET_CPU_ZARCH)
4023fb28 13377 {
7633f08e
UW
13378 fixed_regs[BASE_REGNUM] = 0;
13379 call_used_regs[BASE_REGNUM] = 0;
ab96de7e
AS
13380 fixed_regs[RETURN_REGNUM] = 0;
13381 call_used_regs[RETURN_REGNUM] = 0;
4023fb28 13382 }
ab96de7e 13383 if (TARGET_64BIT)
4023fb28 13384 {
2cf4c39e 13385 for (i = FPR8_REGNUM; i <= FPR15_REGNUM; i++)
ab96de7e 13386 call_used_regs[i] = call_really_used_regs[i] = 0;
4023fb28
UW
13387 }
13388 else
13389 {
2cf4c39e
AK
13390 call_used_regs[FPR4_REGNUM] = call_really_used_regs[FPR4_REGNUM] = 0;
13391 call_used_regs[FPR6_REGNUM] = call_really_used_regs[FPR6_REGNUM] = 0;
ab96de7e 13392 }
4023fb28 13393
ab96de7e
AS
13394 if (TARGET_SOFT_FLOAT)
13395 {
2cf4c39e 13396 for (i = FPR0_REGNUM; i <= FPR15_REGNUM; i++)
ab96de7e 13397 call_used_regs[i] = fixed_regs[i] = 1;
4023fb28 13398 }
085261c8
AK
13399
13400 /* Disable v16 - v31 for non-vector target. */
13401 if (!TARGET_VX)
13402 {
13403 for (i = VR16_REGNUM; i <= VR31_REGNUM; i++)
13404 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
13405 }
4023fb28
UW
13406}
13407
ab96de7e 13408/* Corresponding function to eh_return expander. */
fb49053f 13409
ab96de7e
AS
13410static GTY(()) rtx s390_tpf_eh_return_symbol;
13411void
13412s390_emit_tpf_eh_return (rtx target)
fb49053f 13413{
775c43d3
DM
13414 rtx_insn *insn;
13415 rtx reg, orig_ra;
e23795ea 13416
ab96de7e
AS
13417 if (!s390_tpf_eh_return_symbol)
13418 s390_tpf_eh_return_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tpf_eh_return");
13419
13420 reg = gen_rtx_REG (Pmode, 2);
87cb0c0c 13421 orig_ra = gen_rtx_REG (Pmode, 3);
ab96de7e
AS
13422
13423 emit_move_insn (reg, target);
87cb0c0c 13424 emit_move_insn (orig_ra, get_hard_reg_initial_val (Pmode, RETURN_REGNUM));
ab96de7e
AS
13425 insn = s390_emit_call (s390_tpf_eh_return_symbol, NULL_RTX, reg,
13426 gen_rtx_REG (Pmode, RETURN_REGNUM));
13427 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), reg);
87cb0c0c 13428 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), orig_ra);
ab96de7e
AS
13429
13430 emit_move_insn (EH_RETURN_HANDLER_RTX, reg);
fd3cd001
UW
13431}
13432
ab96de7e
AS
13433/* Rework the prologue/epilogue to avoid saving/restoring
13434 registers unnecessarily. */
3062825f 13435
c590b625 13436static void
ab96de7e 13437s390_optimize_prologue (void)
483ab821 13438{
775c43d3 13439 rtx_insn *insn, *new_insn, *next_insn;
3062825f 13440
ab96de7e 13441 /* Do a final recompute of the frame-related data. */
6455a49e 13442 s390_optimize_register_info ();
3062825f 13443
ab96de7e
AS
13444 /* If all special registers are in fact used, there's nothing we
13445 can do, so no point in walking the insn list. */
3062825f 13446
f4aa3848 13447 if (cfun_frame_layout.first_save_gpr <= BASE_REGNUM
ab96de7e 13448 && cfun_frame_layout.last_save_gpr >= BASE_REGNUM
f4aa3848
AK
13449 && (TARGET_CPU_ZARCH
13450 || (cfun_frame_layout.first_save_gpr <= RETURN_REGNUM
ab96de7e
AS
13451 && cfun_frame_layout.last_save_gpr >= RETURN_REGNUM)))
13452 return;
3062825f 13453
ab96de7e 13454 /* Search for prologue/epilogue insns and replace them. */
3062825f 13455
ab96de7e
AS
13456 for (insn = get_insns (); insn; insn = next_insn)
13457 {
13458 int first, last, off;
13459 rtx set, base, offset;
6455a49e 13460 rtx pat;
3062825f 13461
ab96de7e 13462 next_insn = NEXT_INSN (insn);
89ce1c8f 13463
6455a49e 13464 if (! NONJUMP_INSN_P (insn) || ! RTX_FRAME_RELATED_P (insn))
ab96de7e 13465 continue;
3062825f 13466
6455a49e
AK
13467 pat = PATTERN (insn);
13468
13469 /* Remove ldgr/lgdr instructions used for saving and restore
13470 GPRs if possible. */
82c6f58a
AK
13471 if (TARGET_Z10)
13472 {
13473 rtx tmp_pat = pat;
6455a49e 13474
82c6f58a
AK
13475 if (INSN_CODE (insn) == CODE_FOR_stack_restore_from_fpr)
13476 tmp_pat = XVECEXP (pat, 0, 0);
6455a49e 13477
82c6f58a
AK
13478 if (GET_CODE (tmp_pat) == SET
13479 && GET_MODE (SET_SRC (tmp_pat)) == DImode
13480 && REG_P (SET_SRC (tmp_pat))
13481 && REG_P (SET_DEST (tmp_pat)))
13482 {
13483 int src_regno = REGNO (SET_SRC (tmp_pat));
13484 int dest_regno = REGNO (SET_DEST (tmp_pat));
13485 int gpr_regno;
13486 int fpr_regno;
13487
13488 if (!((GENERAL_REGNO_P (src_regno)
13489 && FP_REGNO_P (dest_regno))
13490 || (FP_REGNO_P (src_regno)
13491 && GENERAL_REGNO_P (dest_regno))))
13492 continue;
6455a49e 13493
82c6f58a
AK
13494 gpr_regno = GENERAL_REGNO_P (src_regno) ? src_regno : dest_regno;
13495 fpr_regno = FP_REGNO_P (src_regno) ? src_regno : dest_regno;
6455a49e 13496
82c6f58a
AK
13497 /* GPR must be call-saved, FPR must be call-clobbered. */
13498 if (!call_really_used_regs[fpr_regno]
13499 || call_really_used_regs[gpr_regno])
13500 continue;
13501
13502 /* It must not happen that what we once saved in an FPR now
13503 needs a stack slot. */
13504 gcc_assert (cfun_gpr_save_slot (gpr_regno) != SAVE_SLOT_STACK);
13505
13506 if (cfun_gpr_save_slot (gpr_regno) == SAVE_SLOT_NONE)
13507 {
13508 remove_insn (insn);
13509 continue;
13510 }
6455a49e
AK
13511 }
13512 }
13513
13514 if (GET_CODE (pat) == PARALLEL
13515 && store_multiple_operation (pat, VOIDmode))
3062825f 13516 {
6455a49e 13517 set = XVECEXP (pat, 0, 0);
ab96de7e 13518 first = REGNO (SET_SRC (set));
6455a49e 13519 last = first + XVECLEN (pat, 0) - 1;
ab96de7e
AS
13520 offset = const0_rtx;
13521 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
13522 off = INTVAL (offset);
3062825f 13523
ab96de7e
AS
13524 if (GET_CODE (base) != REG || off < 0)
13525 continue;
22a707a4
AK
13526 if (cfun_frame_layout.first_save_gpr != -1
13527 && (cfun_frame_layout.first_save_gpr < first
13528 || cfun_frame_layout.last_save_gpr > last))
13529 continue;
ab96de7e
AS
13530 if (REGNO (base) != STACK_POINTER_REGNUM
13531 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
13532 continue;
13533 if (first > BASE_REGNUM || last < BASE_REGNUM)
13534 continue;
13535
13536 if (cfun_frame_layout.first_save_gpr != -1)
3062825f 13537 {
775c43d3 13538 rtx s_pat = save_gprs (base,
ab96de7e 13539 off + (cfun_frame_layout.first_save_gpr
9602b6a1 13540 - first) * UNITS_PER_LONG,
ab96de7e
AS
13541 cfun_frame_layout.first_save_gpr,
13542 cfun_frame_layout.last_save_gpr);
775c43d3 13543 new_insn = emit_insn_before (s_pat, insn);
ab96de7e 13544 INSN_ADDRESSES_NEW (new_insn, -1);
3062825f 13545 }
3062825f 13546
ab96de7e
AS
13547 remove_insn (insn);
13548 continue;
3062825f
UW
13549 }
13550
22a707a4 13551 if (cfun_frame_layout.first_save_gpr == -1
6455a49e
AK
13552 && GET_CODE (pat) == SET
13553 && GENERAL_REG_P (SET_SRC (pat))
13554 && GET_CODE (SET_DEST (pat)) == MEM)
3062825f 13555 {
6455a49e 13556 set = pat;
ab96de7e
AS
13557 first = REGNO (SET_SRC (set));
13558 offset = const0_rtx;
13559 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
13560 off = INTVAL (offset);
3062825f 13561
ab96de7e
AS
13562 if (GET_CODE (base) != REG || off < 0)
13563 continue;
13564 if (REGNO (base) != STACK_POINTER_REGNUM
13565 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
13566 continue;
3062825f 13567
ab96de7e
AS
13568 remove_insn (insn);
13569 continue;
3062825f
UW
13570 }
13571
6455a49e
AK
13572 if (GET_CODE (pat) == PARALLEL
13573 && load_multiple_operation (pat, VOIDmode))
89ce1c8f 13574 {
6455a49e 13575 set = XVECEXP (pat, 0, 0);
ab96de7e 13576 first = REGNO (SET_DEST (set));
6455a49e 13577 last = first + XVECLEN (pat, 0) - 1;
ab96de7e
AS
13578 offset = const0_rtx;
13579 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
13580 off = INTVAL (offset);
89ce1c8f 13581
ab96de7e
AS
13582 if (GET_CODE (base) != REG || off < 0)
13583 continue;
6455a49e 13584
22a707a4
AK
13585 if (cfun_frame_layout.first_restore_gpr != -1
13586 && (cfun_frame_layout.first_restore_gpr < first
13587 || cfun_frame_layout.last_restore_gpr > last))
13588 continue;
ab96de7e
AS
13589 if (REGNO (base) != STACK_POINTER_REGNUM
13590 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
13591 continue;
13592 if (first > BASE_REGNUM || last < BASE_REGNUM)
13593 continue;
3062825f 13594
ab96de7e
AS
13595 if (cfun_frame_layout.first_restore_gpr != -1)
13596 {
775c43d3 13597 rtx rpat = restore_gprs (base,
ab96de7e 13598 off + (cfun_frame_layout.first_restore_gpr
9602b6a1 13599 - first) * UNITS_PER_LONG,
ab96de7e
AS
13600 cfun_frame_layout.first_restore_gpr,
13601 cfun_frame_layout.last_restore_gpr);
0621cf3c
RS
13602
13603 /* Remove REG_CFA_RESTOREs for registers that we no
13604 longer need to save. */
775c43d3
DM
13605 REG_NOTES (rpat) = REG_NOTES (insn);
13606 for (rtx *ptr = &REG_NOTES (rpat); *ptr; )
0621cf3c
RS
13607 if (REG_NOTE_KIND (*ptr) == REG_CFA_RESTORE
13608 && ((int) REGNO (XEXP (*ptr, 0))
13609 < cfun_frame_layout.first_restore_gpr))
13610 *ptr = XEXP (*ptr, 1);
13611 else
13612 ptr = &XEXP (*ptr, 1);
775c43d3 13613 new_insn = emit_insn_before (rpat, insn);
0621cf3c 13614 RTX_FRAME_RELATED_P (new_insn) = 1;
ab96de7e
AS
13615 INSN_ADDRESSES_NEW (new_insn, -1);
13616 }
89ce1c8f 13617
ab96de7e
AS
13618 remove_insn (insn);
13619 continue;
89ce1c8f
JJ
13620 }
13621
22a707a4 13622 if (cfun_frame_layout.first_restore_gpr == -1
6455a49e
AK
13623 && GET_CODE (pat) == SET
13624 && GENERAL_REG_P (SET_DEST (pat))
13625 && GET_CODE (SET_SRC (pat)) == MEM)
3062825f 13626 {
6455a49e 13627 set = pat;
ab96de7e
AS
13628 first = REGNO (SET_DEST (set));
13629 offset = const0_rtx;
13630 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
13631 off = INTVAL (offset);
c7453384 13632
ab96de7e
AS
13633 if (GET_CODE (base) != REG || off < 0)
13634 continue;
6455a49e 13635
ab96de7e
AS
13636 if (REGNO (base) != STACK_POINTER_REGNUM
13637 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
13638 continue;
29742ba4 13639
ab96de7e
AS
13640 remove_insn (insn);
13641 continue;
13642 }
13643 }
29742ba4
HP
13644}
13645
65b1d8ea
AK
13646/* On z10 and later the dynamic branch prediction must see the
13647 backward jump within a certain windows. If not it falls back to
13648 the static prediction. This function rearranges the loop backward
13649 branch in a way which makes the static prediction always correct.
13650 The function returns true if it added an instruction. */
b0f86a7e 13651static bool
775c43d3 13652s390_fix_long_loop_prediction (rtx_insn *insn)
b0f86a7e
AK
13653{
13654 rtx set = single_set (insn);
9b2ea071 13655 rtx code_label, label_ref;
e60365d3 13656 rtx_insn *uncond_jump;
775c43d3 13657 rtx_insn *cur_insn;
b0f86a7e
AK
13658 rtx tmp;
13659 int distance;
13660
13661 /* This will exclude branch on count and branch on index patterns
13662 since these are correctly statically predicted. */
13663 if (!set
13664 || SET_DEST (set) != pc_rtx
13665 || GET_CODE (SET_SRC(set)) != IF_THEN_ELSE)
13666 return false;
13667
177bc204
RS
13668 /* Skip conditional returns. */
13669 if (ANY_RETURN_P (XEXP (SET_SRC (set), 1))
13670 && XEXP (SET_SRC (set), 2) == pc_rtx)
13671 return false;
13672
b0f86a7e
AK
13673 label_ref = (GET_CODE (XEXP (SET_SRC (set), 1)) == LABEL_REF ?
13674 XEXP (SET_SRC (set), 1) : XEXP (SET_SRC (set), 2));
13675
13676 gcc_assert (GET_CODE (label_ref) == LABEL_REF);
13677
13678 code_label = XEXP (label_ref, 0);
13679
13680 if (INSN_ADDRESSES (INSN_UID (code_label)) == -1
13681 || INSN_ADDRESSES (INSN_UID (insn)) == -1
13682 || (INSN_ADDRESSES (INSN_UID (insn))
65b1d8ea 13683 - INSN_ADDRESSES (INSN_UID (code_label)) < PREDICT_DISTANCE))
b0f86a7e
AK
13684 return false;
13685
13686 for (distance = 0, cur_insn = PREV_INSN (insn);
65b1d8ea 13687 distance < PREDICT_DISTANCE - 6;
b0f86a7e
AK
13688 distance += get_attr_length (cur_insn), cur_insn = PREV_INSN (cur_insn))
13689 if (!cur_insn || JUMP_P (cur_insn) || LABEL_P (cur_insn))
13690 return false;
13691
9b2ea071 13692 rtx_code_label *new_label = gen_label_rtx ();
b0f86a7e 13693 uncond_jump = emit_jump_insn_after (
f7df4a84 13694 gen_rtx_SET (pc_rtx,
b0f86a7e
AK
13695 gen_rtx_LABEL_REF (VOIDmode, code_label)),
13696 insn);
13697 emit_label_after (new_label, uncond_jump);
13698
13699 tmp = XEXP (SET_SRC (set), 1);
13700 XEXP (SET_SRC (set), 1) = XEXP (SET_SRC (set), 2);
13701 XEXP (SET_SRC (set), 2) = tmp;
13702 INSN_CODE (insn) = -1;
13703
13704 XEXP (label_ref, 0) = new_label;
13705 JUMP_LABEL (insn) = new_label;
13706 JUMP_LABEL (uncond_jump) = code_label;
13707
13708 return true;
13709}
13710
d277db6b
WG
13711/* Returns 1 if INSN reads the value of REG for purposes not related
13712 to addressing of memory, and 0 otherwise. */
13713static int
775c43d3 13714s390_non_addr_reg_read_p (rtx reg, rtx_insn *insn)
d277db6b
WG
13715{
13716 return reg_referenced_p (reg, PATTERN (insn))
13717 && !reg_used_in_mem_p (REGNO (reg), PATTERN (insn));
13718}
13719
e3cba5e5
AK
13720/* Starting from INSN find_cond_jump looks downwards in the insn
13721 stream for a single jump insn which is the last user of the
13722 condition code set in INSN. */
775c43d3
DM
13723static rtx_insn *
13724find_cond_jump (rtx_insn *insn)
e3cba5e5
AK
13725{
13726 for (; insn; insn = NEXT_INSN (insn))
13727 {
13728 rtx ite, cc;
13729
13730 if (LABEL_P (insn))
13731 break;
13732
13733 if (!JUMP_P (insn))
13734 {
13735 if (reg_mentioned_p (gen_rtx_REG (CCmode, CC_REGNUM), insn))
13736 break;
13737 continue;
13738 }
13739
13740 /* This will be triggered by a return. */
13741 if (GET_CODE (PATTERN (insn)) != SET)
13742 break;
13743
13744 gcc_assert (SET_DEST (PATTERN (insn)) == pc_rtx);
13745 ite = SET_SRC (PATTERN (insn));
13746
13747 if (GET_CODE (ite) != IF_THEN_ELSE)
13748 break;
13749
13750 cc = XEXP (XEXP (ite, 0), 0);
13751 if (!REG_P (cc) || !CC_REGNO_P (REGNO (cc)))
13752 break;
13753
13754 if (find_reg_note (insn, REG_DEAD, cc))
13755 return insn;
13756 break;
13757 }
13758
775c43d3 13759 return NULL;
e3cba5e5
AK
13760}
13761
13762/* Swap the condition in COND and the operands in OP0 and OP1 so that
13763 the semantics does not change. If NULL_RTX is passed as COND the
13764 function tries to find the conditional jump starting with INSN. */
13765static void
775c43d3 13766s390_swap_cmp (rtx cond, rtx *op0, rtx *op1, rtx_insn *insn)
e3cba5e5
AK
13767{
13768 rtx tmp = *op0;
13769
13770 if (cond == NULL_RTX)
13771 {
e8a54173
DM
13772 rtx_insn *jump = find_cond_jump (NEXT_INSN (insn));
13773 rtx set = jump ? single_set (jump) : NULL_RTX;
e3cba5e5 13774
e8a54173 13775 if (set == NULL_RTX)
e3cba5e5
AK
13776 return;
13777
e8a54173 13778 cond = XEXP (SET_SRC (set), 0);
e3cba5e5
AK
13779 }
13780
13781 *op0 = *op1;
13782 *op1 = tmp;
13783 PUT_CODE (cond, swap_condition (GET_CODE (cond)));
13784}
d277db6b
WG
13785
13786/* On z10, instructions of the compare-and-branch family have the
13787 property to access the register occurring as second operand with
13788 its bits complemented. If such a compare is grouped with a second
13789 instruction that accesses the same register non-complemented, and
13790 if that register's value is delivered via a bypass, then the
13791 pipeline recycles, thereby causing significant performance decline.
13792 This function locates such situations and exchanges the two
b0f86a7e
AK
13793 operands of the compare. The function return true whenever it
13794 added an insn. */
13795static bool
775c43d3 13796s390_z10_optimize_cmp (rtx_insn *insn)
d277db6b 13797{
775c43d3 13798 rtx_insn *prev_insn, *next_insn;
b0f86a7e
AK
13799 bool insn_added_p = false;
13800 rtx cond, *op0, *op1;
d277db6b 13801
b0f86a7e 13802 if (GET_CODE (PATTERN (insn)) == PARALLEL)
d277db6b 13803 {
b0f86a7e
AK
13804 /* Handle compare and branch and branch on count
13805 instructions. */
13806 rtx pattern = single_set (insn);
e3cba5e5 13807
b0f86a7e
AK
13808 if (!pattern
13809 || SET_DEST (pattern) != pc_rtx
13810 || GET_CODE (SET_SRC (pattern)) != IF_THEN_ELSE)
13811 return false;
d277db6b 13812
b0f86a7e
AK
13813 cond = XEXP (SET_SRC (pattern), 0);
13814 op0 = &XEXP (cond, 0);
13815 op1 = &XEXP (cond, 1);
13816 }
13817 else if (GET_CODE (PATTERN (insn)) == SET)
13818 {
13819 rtx src, dest;
d277db6b 13820
b0f86a7e
AK
13821 /* Handle normal compare instructions. */
13822 src = SET_SRC (PATTERN (insn));
13823 dest = SET_DEST (PATTERN (insn));
e3cba5e5 13824
b0f86a7e
AK
13825 if (!REG_P (dest)
13826 || !CC_REGNO_P (REGNO (dest))
13827 || GET_CODE (src) != COMPARE)
13828 return false;
e3cba5e5 13829
b0f86a7e
AK
13830 /* s390_swap_cmp will try to find the conditional
13831 jump when passing NULL_RTX as condition. */
13832 cond = NULL_RTX;
13833 op0 = &XEXP (src, 0);
13834 op1 = &XEXP (src, 1);
13835 }
13836 else
13837 return false;
e3cba5e5 13838
b0f86a7e
AK
13839 if (!REG_P (*op0) || !REG_P (*op1))
13840 return false;
e3cba5e5 13841
2dfdbf2b
AK
13842 if (GET_MODE_CLASS (GET_MODE (*op0)) != MODE_INT)
13843 return false;
13844
b0f86a7e
AK
13845 /* Swap the COMPARE arguments and its mask if there is a
13846 conflicting access in the previous insn. */
33ab2bd4 13847 prev_insn = prev_active_insn (insn);
b0f86a7e
AK
13848 if (prev_insn != NULL_RTX && INSN_P (prev_insn)
13849 && reg_referenced_p (*op1, PATTERN (prev_insn)))
13850 s390_swap_cmp (cond, op0, op1, insn);
13851
13852 /* Check if there is a conflict with the next insn. If there
13853 was no conflict with the previous insn, then swap the
13854 COMPARE arguments and its mask. If we already swapped
13855 the operands, or if swapping them would cause a conflict
13856 with the previous insn, issue a NOP after the COMPARE in
13857 order to separate the two instuctions. */
33ab2bd4 13858 next_insn = next_active_insn (insn);
b0f86a7e
AK
13859 if (next_insn != NULL_RTX && INSN_P (next_insn)
13860 && s390_non_addr_reg_read_p (*op1, next_insn))
13861 {
e3cba5e5 13862 if (prev_insn != NULL_RTX && INSN_P (prev_insn)
b0f86a7e 13863 && s390_non_addr_reg_read_p (*op0, prev_insn))
e3cba5e5 13864 {
b0f86a7e
AK
13865 if (REGNO (*op1) == 0)
13866 emit_insn_after (gen_nop1 (), insn);
e3cba5e5 13867 else
b0f86a7e
AK
13868 emit_insn_after (gen_nop (), insn);
13869 insn_added_p = true;
d277db6b 13870 }
b0f86a7e
AK
13871 else
13872 s390_swap_cmp (cond, op0, op1, insn);
d277db6b 13873 }
b0f86a7e 13874 return insn_added_p;
d277db6b
WG
13875}
13876
539405d5
AK
13877/* Number of INSNs to be scanned backward in the last BB of the loop
13878 and forward in the first BB of the loop. This usually should be a
13879 bit more than the number of INSNs which could go into one
13880 group. */
13881#define S390_OSC_SCAN_INSN_NUM 5
13882
13883/* Scan LOOP for static OSC collisions and return true if a osc_break
13884 should be issued for this loop. */
13885static bool
13886s390_adjust_loop_scan_osc (struct loop* loop)
13887
13888{
13889 HARD_REG_SET modregs, newregs;
13890 rtx_insn *insn, *store_insn = NULL;
13891 rtx set;
13892 struct s390_address addr_store, addr_load;
13893 subrtx_iterator::array_type array;
13894 int insn_count;
13895
13896 CLEAR_HARD_REG_SET (modregs);
13897
13898 insn_count = 0;
13899 FOR_BB_INSNS_REVERSE (loop->latch, insn)
13900 {
13901 if (!INSN_P (insn) || INSN_CODE (insn) <= 0)
13902 continue;
13903
13904 insn_count++;
13905 if (insn_count > S390_OSC_SCAN_INSN_NUM)
13906 return false;
13907
13908 find_all_hard_reg_sets (insn, &newregs, true);
13909 IOR_HARD_REG_SET (modregs, newregs);
13910
13911 set = single_set (insn);
13912 if (!set)
13913 continue;
13914
13915 if (MEM_P (SET_DEST (set))
13916 && s390_decompose_address (XEXP (SET_DEST (set), 0), &addr_store))
13917 {
13918 store_insn = insn;
13919 break;
13920 }
13921 }
13922
13923 if (store_insn == NULL_RTX)
13924 return false;
13925
13926 insn_count = 0;
13927 FOR_BB_INSNS (loop->header, insn)
13928 {
13929 if (!INSN_P (insn) || INSN_CODE (insn) <= 0)
13930 continue;
13931
13932 if (insn == store_insn)
13933 return false;
13934
13935 insn_count++;
13936 if (insn_count > S390_OSC_SCAN_INSN_NUM)
13937 return false;
13938
13939 find_all_hard_reg_sets (insn, &newregs, true);
13940 IOR_HARD_REG_SET (modregs, newregs);
13941
13942 set = single_set (insn);
13943 if (!set)
13944 continue;
13945
13946 /* An intermediate store disrupts static OSC checking
13947 anyway. */
13948 if (MEM_P (SET_DEST (set))
13949 && s390_decompose_address (XEXP (SET_DEST (set), 0), NULL))
13950 return false;
13951
13952 FOR_EACH_SUBRTX (iter, array, SET_SRC (set), NONCONST)
13953 if (MEM_P (*iter)
13954 && s390_decompose_address (XEXP (*iter, 0), &addr_load)
13955 && rtx_equal_p (addr_load.base, addr_store.base)
13956 && rtx_equal_p (addr_load.indx, addr_store.indx)
13957 && rtx_equal_p (addr_load.disp, addr_store.disp))
13958 {
13959 if ((addr_load.base != NULL_RTX
13960 && TEST_HARD_REG_BIT (modregs, REGNO (addr_load.base)))
13961 || (addr_load.indx != NULL_RTX
13962 && TEST_HARD_REG_BIT (modregs, REGNO (addr_load.indx))))
13963 return true;
13964 }
13965 }
13966 return false;
13967}
13968
13969/* Look for adjustments which can be done on simple innermost
13970 loops. */
13971static void
13972s390_adjust_loops ()
13973{
13974 struct loop *loop = NULL;
13975
13976 df_analyze ();
13977 compute_bb_for_insn ();
13978
13979 /* Find the loops. */
13980 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
13981
13982 FOR_EACH_LOOP (loop, LI_ONLY_INNERMOST)
13983 {
13984 if (dump_file)
13985 {
13986 flow_loop_dump (loop, dump_file, NULL, 0);
13987 fprintf (dump_file, ";; OSC loop scan Loop: ");
13988 }
13989 if (loop->latch == NULL
13990 || pc_set (BB_END (loop->latch)) == NULL_RTX
13991 || !s390_adjust_loop_scan_osc (loop))
13992 {
13993 if (dump_file)
13994 {
13995 if (loop->latch == NULL)
13996 fprintf (dump_file, " muliple backward jumps\n");
13997 else
13998 {
13999 fprintf (dump_file, " header insn: %d latch insn: %d ",
14000 INSN_UID (BB_HEAD (loop->header)),
14001 INSN_UID (BB_END (loop->latch)));
14002 if (pc_set (BB_END (loop->latch)) == NULL_RTX)
14003 fprintf (dump_file, " loop does not end with jump\n");
14004 else
14005 fprintf (dump_file, " not instrumented\n");
14006 }
14007 }
14008 }
14009 else
14010 {
14011 rtx_insn *new_insn;
14012
14013 if (dump_file)
14014 fprintf (dump_file, " adding OSC break insn: ");
14015 new_insn = emit_insn_before (gen_osc_break (),
14016 BB_END (loop->latch));
14017 INSN_ADDRESSES_NEW (new_insn, -1);
14018 }
14019 }
14020
14021 loop_optimizer_finalize ();
14022
14023 df_finish_pass (false);
14024}
14025
ab96de7e 14026/* Perform machine-dependent processing. */
ed9676cf 14027
ab96de7e
AS
14028static void
14029s390_reorg (void)
ed9676cf 14030{
ab96de7e 14031 bool pool_overflow = false;
f8af0e30 14032 int hw_before, hw_after;
ed9676cf 14033
539405d5
AK
14034 if (s390_tune == PROCESSOR_2964_Z13)
14035 s390_adjust_loops ();
14036
ab96de7e
AS
14037 /* Make sure all splits have been performed; splits after
14038 machine_dependent_reorg might confuse insn length counts. */
14039 split_all_insns_noflow ();
38899e29 14040
ab96de7e
AS
14041 /* Install the main literal pool and the associated base
14042 register load insns.
38899e29 14043
ab96de7e
AS
14044 In addition, there are two problematic situations we need
14045 to correct:
ed9676cf 14046
ab96de7e
AS
14047 - the literal pool might be > 4096 bytes in size, so that
14048 some of its elements cannot be directly accessed
ed9676cf 14049
ab96de7e
AS
14050 - a branch target might be > 64K away from the branch, so that
14051 it is not possible to use a PC-relative instruction.
ed9676cf 14052
ab96de7e
AS
14053 To fix those, we split the single literal pool into multiple
14054 pool chunks, reloading the pool base register at various
14055 points throughout the function to ensure it always points to
14056 the pool chunk the following code expects, and / or replace
14057 PC-relative branches by absolute branches.
ed9676cf 14058
ab96de7e
AS
14059 However, the two problems are interdependent: splitting the
14060 literal pool can move a branch further away from its target,
14061 causing the 64K limit to overflow, and on the other hand,
14062 replacing a PC-relative branch by an absolute branch means
14063 we need to put the branch target address into the literal
14064 pool, possibly causing it to overflow.
ffdda752 14065
ab96de7e
AS
14066 So, we loop trying to fix up both problems until we manage
14067 to satisfy both conditions at the same time. Note that the
14068 loop is guaranteed to terminate as every pass of the loop
14069 strictly decreases the total number of PC-relative branches
14070 in the function. (This is not completely true as there
14071 might be branch-over-pool insns introduced by chunkify_start.
14072 Those never need to be split however.) */
ffdda752 14073
ab96de7e
AS
14074 for (;;)
14075 {
14076 struct constant_pool *pool = NULL;
a628ab6d 14077
ab96de7e
AS
14078 /* Collect the literal pool. */
14079 if (!pool_overflow)
14080 {
14081 pool = s390_mainpool_start ();
14082 if (!pool)
14083 pool_overflow = true;
14084 }
a628ab6d 14085
ab96de7e
AS
14086 /* If literal pool overflowed, start to chunkify it. */
14087 if (pool_overflow)
14088 pool = s390_chunkify_start ();
a628ab6d 14089
ab96de7e
AS
14090 /* Split out-of-range branches. If this has created new
14091 literal pool entries, cancel current chunk list and
14092 recompute it. zSeries machines have large branch
14093 instructions, so we never need to split a branch. */
14094 if (!TARGET_CPU_ZARCH && s390_split_branches ())
14095 {
14096 if (pool_overflow)
14097 s390_chunkify_cancel (pool);
14098 else
14099 s390_mainpool_cancel (pool);
a628ab6d 14100
ab96de7e
AS
14101 continue;
14102 }
14103
14104 /* If we made it up to here, both conditions are satisfied.
14105 Finish up literal pool related changes. */
14106 if (pool_overflow)
14107 s390_chunkify_finish (pool);
14108 else
14109 s390_mainpool_finish (pool);
14110
14111 /* We're done splitting branches. */
14112 cfun->machine->split_branches_pending_p = false;
14113 break;
a628ab6d 14114 }
a628ab6d 14115
d24959df
UW
14116 /* Generate out-of-pool execute target insns. */
14117 if (TARGET_CPU_ZARCH)
14118 {
775c43d3
DM
14119 rtx_insn *insn, *target;
14120 rtx label;
d24959df
UW
14121
14122 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
14123 {
14124 label = s390_execute_label (insn);
14125 if (!label)
14126 continue;
14127
14128 gcc_assert (label != const0_rtx);
14129
14130 target = emit_label (XEXP (label, 0));
14131 INSN_ADDRESSES_NEW (target, -1);
14132
14133 target = emit_insn (s390_execute_target (insn));
14134 INSN_ADDRESSES_NEW (target, -1);
14135 }
14136 }
14137
14138 /* Try to optimize prologue and epilogue further. */
ab96de7e 14139 s390_optimize_prologue ();
d277db6b 14140
65b1d8ea 14141 /* Walk over the insns and do some >=z10 specific changes. */
bacf8ec3 14142 if (s390_tune >= PROCESSOR_2097_Z10)
b0f86a7e 14143 {
775c43d3 14144 rtx_insn *insn;
b0f86a7e
AK
14145 bool insn_added_p = false;
14146
14147 /* The insn lengths and addresses have to be up to date for the
14148 following manipulations. */
14149 shorten_branches (get_insns ());
14150
14151 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
14152 {
14153 if (!INSN_P (insn) || INSN_CODE (insn) <= 0)
14154 continue;
14155
14156 if (JUMP_P (insn))
65b1d8ea 14157 insn_added_p |= s390_fix_long_loop_prediction (insn);
b0f86a7e 14158
65b1d8ea
AK
14159 if ((GET_CODE (PATTERN (insn)) == PARALLEL
14160 || GET_CODE (PATTERN (insn)) == SET)
14161 && s390_tune == PROCESSOR_2097_Z10)
b0f86a7e
AK
14162 insn_added_p |= s390_z10_optimize_cmp (insn);
14163 }
14164
14165 /* Adjust branches if we added new instructions. */
14166 if (insn_added_p)
14167 shorten_branches (get_insns ());
14168 }
f8af0e30
DV
14169
14170 s390_function_num_hotpatch_hw (current_function_decl, &hw_before, &hw_after);
14171 if (hw_after > 0)
14172 {
14173 rtx_insn *insn;
14174
2d38d809 14175 /* Insert NOPs for hotpatching. */
f8af0e30 14176 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1f8d3e42
DV
14177 /* Emit NOPs
14178 1. inside the area covered by debug information to allow setting
14179 breakpoints at the NOPs,
14180 2. before any insn which results in an asm instruction,
14181 3. before in-function labels to avoid jumping to the NOPs, for
14182 example as part of a loop,
14183 4. before any barrier in case the function is completely empty
14184 (__builtin_unreachable ()) and has neither internal labels nor
14185 active insns.
14186 */
14187 if (active_insn_p (insn) || BARRIER_P (insn) || LABEL_P (insn))
14188 break;
14189 /* Output a series of NOPs before the first active insn. */
14190 while (insn && hw_after > 0)
f8af0e30
DV
14191 {
14192 if (hw_after >= 3 && TARGET_CPU_ZARCH)
14193 {
1f8d3e42 14194 emit_insn_before (gen_nop_6_byte (), insn);
f8af0e30
DV
14195 hw_after -= 3;
14196 }
14197 else if (hw_after >= 2)
14198 {
1f8d3e42 14199 emit_insn_before (gen_nop_4_byte (), insn);
f8af0e30
DV
14200 hw_after -= 2;
14201 }
14202 else
14203 {
1f8d3e42 14204 emit_insn_before (gen_nop_2_byte (), insn);
f8af0e30
DV
14205 hw_after -= 1;
14206 }
14207 }
f8af0e30 14208 }
ab96de7e 14209}
ed9676cf 14210
3a892e44
AK
14211/* Return true if INSN is a fp load insn writing register REGNO. */
14212static inline bool
647d790d 14213s390_fpload_toreg (rtx_insn *insn, unsigned int regno)
3a892e44
AK
14214{
14215 rtx set;
14216 enum attr_type flag = s390_safe_attr_type (insn);
14217
14218 if (flag != TYPE_FLOADSF && flag != TYPE_FLOADDF)
14219 return false;
14220
14221 set = single_set (insn);
14222
14223 if (set == NULL_RTX)
14224 return false;
14225
14226 if (!REG_P (SET_DEST (set)) || !MEM_P (SET_SRC (set)))
14227 return false;
14228
14229 if (REGNO (SET_DEST (set)) != regno)
14230 return false;
14231
14232 return true;
14233}
14234
14235/* This value describes the distance to be avoided between an
5764ee3c 14236 arithmetic fp instruction and an fp load writing the same register.
3a892e44
AK
14237 Z10_EARLYLOAD_DISTANCE - 1 as well as Z10_EARLYLOAD_DISTANCE + 1 is
14238 fine but the exact value has to be avoided. Otherwise the FP
14239 pipeline will throw an exception causing a major penalty. */
14240#define Z10_EARLYLOAD_DISTANCE 7
14241
14242/* Rearrange the ready list in order to avoid the situation described
14243 for Z10_EARLYLOAD_DISTANCE. A problematic load instruction is
14244 moved to the very end of the ready list. */
14245static void
ce1ce33a 14246s390_z10_prevent_earlyload_conflicts (rtx_insn **ready, int *nready_p)
3a892e44
AK
14247{
14248 unsigned int regno;
14249 int nready = *nready_p;
ce1ce33a 14250 rtx_insn *tmp;
3a892e44 14251 int i;
775c43d3 14252 rtx_insn *insn;
3a892e44
AK
14253 rtx set;
14254 enum attr_type flag;
14255 int distance;
14256
14257 /* Skip DISTANCE - 1 active insns. */
14258 for (insn = last_scheduled_insn, distance = Z10_EARLYLOAD_DISTANCE - 1;
14259 distance > 0 && insn != NULL_RTX;
14260 distance--, insn = prev_active_insn (insn))
14261 if (CALL_P (insn) || JUMP_P (insn))
14262 return;
14263
14264 if (insn == NULL_RTX)
14265 return;
14266
14267 set = single_set (insn);
14268
14269 if (set == NULL_RTX || !REG_P (SET_DEST (set))
14270 || GET_MODE_CLASS (GET_MODE (SET_DEST (set))) != MODE_FLOAT)
14271 return;
14272
14273 flag = s390_safe_attr_type (insn);
14274
14275 if (flag == TYPE_FLOADSF || flag == TYPE_FLOADDF)
14276 return;
14277
14278 regno = REGNO (SET_DEST (set));
14279 i = nready - 1;
14280
14281 while (!s390_fpload_toreg (ready[i], regno) && i > 0)
14282 i--;
14283
14284 if (!i)
14285 return;
14286
14287 tmp = ready[i];
ce1ce33a 14288 memmove (&ready[1], &ready[0], sizeof (rtx_insn *) * i);
3a892e44
AK
14289 ready[0] = tmp;
14290}
14291
22ac2c2f
AK
14292
14293/* The s390_sched_state variable tracks the state of the current or
14294 the last instruction group.
14295
14296 0,1,2 number of instructions scheduled in the current group
14297 3 the last group is complete - normal insns
14298 4 the last group was a cracked/expanded insn */
14299
14300static int s390_sched_state;
14301
23902021
AK
14302#define S390_SCHED_STATE_NORMAL 3
14303#define S390_SCHED_STATE_CRACKED 4
22ac2c2f 14304
23902021
AK
14305#define S390_SCHED_ATTR_MASK_CRACKED 0x1
14306#define S390_SCHED_ATTR_MASK_EXPANDED 0x2
14307#define S390_SCHED_ATTR_MASK_ENDGROUP 0x4
14308#define S390_SCHED_ATTR_MASK_GROUPALONE 0x8
22ac2c2f
AK
14309
14310static unsigned int
84034c69 14311s390_get_sched_attrmask (rtx_insn *insn)
22ac2c2f
AK
14312{
14313 unsigned int mask = 0;
14314
23902021
AK
14315 switch (s390_tune)
14316 {
14317 case PROCESSOR_2827_ZEC12:
14318 if (get_attr_zEC12_cracked (insn))
14319 mask |= S390_SCHED_ATTR_MASK_CRACKED;
14320 if (get_attr_zEC12_expanded (insn))
14321 mask |= S390_SCHED_ATTR_MASK_EXPANDED;
14322 if (get_attr_zEC12_endgroup (insn))
14323 mask |= S390_SCHED_ATTR_MASK_ENDGROUP;
14324 if (get_attr_zEC12_groupalone (insn))
14325 mask |= S390_SCHED_ATTR_MASK_GROUPALONE;
14326 break;
14327 case PROCESSOR_2964_Z13:
2731a5b3 14328 case PROCESSOR_3906_Z14:
23902021
AK
14329 if (get_attr_z13_cracked (insn))
14330 mask |= S390_SCHED_ATTR_MASK_CRACKED;
14331 if (get_attr_z13_expanded (insn))
14332 mask |= S390_SCHED_ATTR_MASK_EXPANDED;
14333 if (get_attr_z13_endgroup (insn))
14334 mask |= S390_SCHED_ATTR_MASK_ENDGROUP;
14335 if (get_attr_z13_groupalone (insn))
14336 mask |= S390_SCHED_ATTR_MASK_GROUPALONE;
14337 break;
14338 default:
14339 gcc_unreachable ();
14340 }
14341 return mask;
14342}
14343
14344static unsigned int
14345s390_get_unit_mask (rtx_insn *insn, int *units)
14346{
14347 unsigned int mask = 0;
14348
14349 switch (s390_tune)
14350 {
14351 case PROCESSOR_2964_Z13:
2731a5b3 14352 case PROCESSOR_3906_Z14:
23902021
AK
14353 *units = 3;
14354 if (get_attr_z13_unit_lsu (insn))
14355 mask |= 1 << 0;
14356 if (get_attr_z13_unit_fxu (insn))
14357 mask |= 1 << 1;
14358 if (get_attr_z13_unit_vfu (insn))
14359 mask |= 1 << 2;
14360 break;
14361 default:
14362 gcc_unreachable ();
14363 }
22ac2c2f
AK
14364 return mask;
14365}
14366
14367/* Return the scheduling score for INSN. The higher the score the
14368 better. The score is calculated from the OOO scheduling attributes
14369 of INSN and the scheduling state s390_sched_state. */
14370static int
84034c69 14371s390_sched_score (rtx_insn *insn)
22ac2c2f
AK
14372{
14373 unsigned int mask = s390_get_sched_attrmask (insn);
14374 int score = 0;
14375
14376 switch (s390_sched_state)
14377 {
14378 case 0:
14379 /* Try to put insns into the first slot which would otherwise
14380 break a group. */
23902021
AK
14381 if ((mask & S390_SCHED_ATTR_MASK_CRACKED) != 0
14382 || (mask & S390_SCHED_ATTR_MASK_EXPANDED) != 0)
22ac2c2f 14383 score += 5;
23902021 14384 if ((mask & S390_SCHED_ATTR_MASK_GROUPALONE) != 0)
22ac2c2f 14385 score += 10;
1d92cba9 14386 /* fallthrough */
22ac2c2f
AK
14387 case 1:
14388 /* Prefer not cracked insns while trying to put together a
14389 group. */
23902021
AK
14390 if ((mask & S390_SCHED_ATTR_MASK_CRACKED) == 0
14391 && (mask & S390_SCHED_ATTR_MASK_EXPANDED) == 0
14392 && (mask & S390_SCHED_ATTR_MASK_GROUPALONE) == 0)
22ac2c2f 14393 score += 10;
23902021 14394 if ((mask & S390_SCHED_ATTR_MASK_ENDGROUP) == 0)
22ac2c2f
AK
14395 score += 5;
14396 break;
14397 case 2:
14398 /* Prefer not cracked insns while trying to put together a
14399 group. */
23902021
AK
14400 if ((mask & S390_SCHED_ATTR_MASK_CRACKED) == 0
14401 && (mask & S390_SCHED_ATTR_MASK_EXPANDED) == 0
14402 && (mask & S390_SCHED_ATTR_MASK_GROUPALONE) == 0)
22ac2c2f
AK
14403 score += 10;
14404 /* Prefer endgroup insns in the last slot. */
23902021 14405 if ((mask & S390_SCHED_ATTR_MASK_ENDGROUP) != 0)
22ac2c2f
AK
14406 score += 10;
14407 break;
23902021 14408 case S390_SCHED_STATE_NORMAL:
22ac2c2f 14409 /* Prefer not cracked insns if the last was not cracked. */
23902021
AK
14410 if ((mask & S390_SCHED_ATTR_MASK_CRACKED) == 0
14411 && (mask & S390_SCHED_ATTR_MASK_EXPANDED) == 0)
22ac2c2f 14412 score += 5;
23902021 14413 if ((mask & S390_SCHED_ATTR_MASK_GROUPALONE) != 0)
22ac2c2f
AK
14414 score += 10;
14415 break;
23902021 14416 case S390_SCHED_STATE_CRACKED:
22ac2c2f
AK
14417 /* Try to keep cracked insns together to prevent them from
14418 interrupting groups. */
23902021
AK
14419 if ((mask & S390_SCHED_ATTR_MASK_CRACKED) != 0
14420 || (mask & S390_SCHED_ATTR_MASK_EXPANDED) != 0)
22ac2c2f
AK
14421 score += 5;
14422 break;
14423 }
23902021 14424
6654e96f 14425 if (s390_tune >= PROCESSOR_2964_Z13)
23902021
AK
14426 {
14427 int units, i;
14428 unsigned unit_mask, m = 1;
14429
14430 unit_mask = s390_get_unit_mask (insn, &units);
14431 gcc_assert (units <= MAX_SCHED_UNITS);
14432
14433 /* Add a score in range 0..MAX_SCHED_MIX_SCORE depending on how long
14434 ago the last insn of this unit type got scheduled. This is
14435 supposed to help providing a proper instruction mix to the
14436 CPU. */
14437 for (i = 0; i < units; i++, m <<= 1)
14438 if (m & unit_mask)
14439 score += (last_scheduled_unit_distance[i] * MAX_SCHED_MIX_SCORE /
14440 MAX_SCHED_MIX_DISTANCE);
14441 }
22ac2c2f
AK
14442 return score;
14443}
14444
3a892e44 14445/* This function is called via hook TARGET_SCHED_REORDER before
631b20a7 14446 issuing one insn from list READY which contains *NREADYP entries.
3a892e44
AK
14447 For target z10 it reorders load instructions to avoid early load
14448 conflicts in the floating point pipeline */
14449static int
22ac2c2f 14450s390_sched_reorder (FILE *file, int verbose,
ce1ce33a 14451 rtx_insn **ready, int *nreadyp, int clock ATTRIBUTE_UNUSED)
3a892e44 14452{
bacf8ec3
DV
14453 if (s390_tune == PROCESSOR_2097_Z10
14454 && reload_completed
14455 && *nreadyp > 1)
14456 s390_z10_prevent_earlyload_conflicts (ready, nreadyp);
3a892e44 14457
bacf8ec3 14458 if (s390_tune >= PROCESSOR_2827_ZEC12
22ac2c2f
AK
14459 && reload_completed
14460 && *nreadyp > 1)
14461 {
14462 int i;
14463 int last_index = *nreadyp - 1;
14464 int max_index = -1;
14465 int max_score = -1;
ce1ce33a 14466 rtx_insn *tmp;
22ac2c2f
AK
14467
14468 /* Just move the insn with the highest score to the top (the
14469 end) of the list. A full sort is not needed since a conflict
14470 in the hazard recognition cannot happen. So the top insn in
14471 the ready list will always be taken. */
14472 for (i = last_index; i >= 0; i--)
14473 {
14474 int score;
14475
14476 if (recog_memoized (ready[i]) < 0)
14477 continue;
14478
14479 score = s390_sched_score (ready[i]);
14480 if (score > max_score)
14481 {
14482 max_score = score;
14483 max_index = i;
14484 }
14485 }
14486
14487 if (max_index != -1)
14488 {
14489 if (max_index != last_index)
14490 {
14491 tmp = ready[max_index];
14492 ready[max_index] = ready[last_index];
14493 ready[last_index] = tmp;
14494
14495 if (verbose > 5)
14496 fprintf (file,
23902021 14497 ";;\t\tBACKEND: move insn %d to the top of list\n",
22ac2c2f
AK
14498 INSN_UID (ready[last_index]));
14499 }
14500 else if (verbose > 5)
14501 fprintf (file,
23902021 14502 ";;\t\tBACKEND: best insn %d already on top\n",
22ac2c2f
AK
14503 INSN_UID (ready[last_index]));
14504 }
14505
14506 if (verbose > 5)
14507 {
14508 fprintf (file, "ready list ooo attributes - sched state: %d\n",
14509 s390_sched_state);
14510
14511 for (i = last_index; i >= 0; i--)
14512 {
23902021
AK
14513 unsigned int sched_mask;
14514 rtx_insn *insn = ready[i];
14515
14516 if (recog_memoized (insn) < 0)
22ac2c2f 14517 continue;
23902021
AK
14518
14519 sched_mask = s390_get_sched_attrmask (insn);
14520 fprintf (file, ";;\t\tBACKEND: insn %d score: %d: ",
14521 INSN_UID (insn),
14522 s390_sched_score (insn));
14523#define PRINT_SCHED_ATTR(M, ATTR) fprintf (file, "%s ",\
14524 ((M) & sched_mask) ? #ATTR : "");
14525 PRINT_SCHED_ATTR (S390_SCHED_ATTR_MASK_CRACKED, cracked);
14526 PRINT_SCHED_ATTR (S390_SCHED_ATTR_MASK_EXPANDED, expanded);
14527 PRINT_SCHED_ATTR (S390_SCHED_ATTR_MASK_ENDGROUP, endgroup);
14528 PRINT_SCHED_ATTR (S390_SCHED_ATTR_MASK_GROUPALONE, groupalone);
14529#undef PRINT_SCHED_ATTR
6654e96f 14530 if (s390_tune >= PROCESSOR_2964_Z13)
23902021
AK
14531 {
14532 unsigned int unit_mask, m = 1;
14533 int units, j;
14534
14535 unit_mask = s390_get_unit_mask (insn, &units);
14536 fprintf (file, "(units:");
14537 for (j = 0; j < units; j++, m <<= 1)
14538 if (m & unit_mask)
14539 fprintf (file, " u%d", j);
14540 fprintf (file, ")");
14541 }
22ac2c2f
AK
14542 fprintf (file, "\n");
14543 }
14544 }
14545 }
14546
3a892e44
AK
14547 return s390_issue_rate ();
14548}
14549
22ac2c2f 14550
3a892e44
AK
14551/* This function is called via hook TARGET_SCHED_VARIABLE_ISSUE after
14552 the scheduler has issued INSN. It stores the last issued insn into
14553 last_scheduled_insn in order to make it available for
14554 s390_sched_reorder. */
14555static int
ac44248e 14556s390_sched_variable_issue (FILE *file, int verbose, rtx_insn *insn, int more)
3a892e44
AK
14557{
14558 last_scheduled_insn = insn;
14559
bacf8ec3 14560 if (s390_tune >= PROCESSOR_2827_ZEC12
22ac2c2f
AK
14561 && reload_completed
14562 && recog_memoized (insn) >= 0)
14563 {
14564 unsigned int mask = s390_get_sched_attrmask (insn);
14565
23902021
AK
14566 if ((mask & S390_SCHED_ATTR_MASK_CRACKED) != 0
14567 || (mask & S390_SCHED_ATTR_MASK_EXPANDED) != 0)
14568 s390_sched_state = S390_SCHED_STATE_CRACKED;
14569 else if ((mask & S390_SCHED_ATTR_MASK_ENDGROUP) != 0
14570 || (mask & S390_SCHED_ATTR_MASK_GROUPALONE) != 0)
14571 s390_sched_state = S390_SCHED_STATE_NORMAL;
22ac2c2f
AK
14572 else
14573 {
14574 /* Only normal insns are left (mask == 0). */
14575 switch (s390_sched_state)
14576 {
14577 case 0:
14578 case 1:
14579 case 2:
23902021
AK
14580 case S390_SCHED_STATE_NORMAL:
14581 if (s390_sched_state == S390_SCHED_STATE_NORMAL)
22ac2c2f
AK
14582 s390_sched_state = 1;
14583 else
14584 s390_sched_state++;
14585
14586 break;
23902021
AK
14587 case S390_SCHED_STATE_CRACKED:
14588 s390_sched_state = S390_SCHED_STATE_NORMAL;
22ac2c2f
AK
14589 break;
14590 }
14591 }
23902021 14592
6654e96f 14593 if (s390_tune >= PROCESSOR_2964_Z13)
23902021
AK
14594 {
14595 int units, i;
14596 unsigned unit_mask, m = 1;
14597
14598 unit_mask = s390_get_unit_mask (insn, &units);
14599 gcc_assert (units <= MAX_SCHED_UNITS);
14600
14601 for (i = 0; i < units; i++, m <<= 1)
14602 if (m & unit_mask)
14603 last_scheduled_unit_distance[i] = 0;
14604 else if (last_scheduled_unit_distance[i] < MAX_SCHED_MIX_DISTANCE)
14605 last_scheduled_unit_distance[i]++;
14606 }
14607
22ac2c2f
AK
14608 if (verbose > 5)
14609 {
23902021
AK
14610 unsigned int sched_mask;
14611
14612 sched_mask = s390_get_sched_attrmask (insn);
14613
14614 fprintf (file, ";;\t\tBACKEND: insn %d: ", INSN_UID (insn));
14615#define PRINT_SCHED_ATTR(M, ATTR) fprintf (file, "%s ", ((M) & sched_mask) ? #ATTR : "");
14616 PRINT_SCHED_ATTR (S390_SCHED_ATTR_MASK_CRACKED, cracked);
14617 PRINT_SCHED_ATTR (S390_SCHED_ATTR_MASK_EXPANDED, expanded);
14618 PRINT_SCHED_ATTR (S390_SCHED_ATTR_MASK_ENDGROUP, endgroup);
14619 PRINT_SCHED_ATTR (S390_SCHED_ATTR_MASK_GROUPALONE, groupalone);
14620#undef PRINT_SCHED_ATTR
14621
6654e96f 14622 if (s390_tune >= PROCESSOR_2964_Z13)
23902021
AK
14623 {
14624 unsigned int unit_mask, m = 1;
14625 int units, j;
14626
14627 unit_mask = s390_get_unit_mask (insn, &units);
14628 fprintf (file, "(units:");
14629 for (j = 0; j < units; j++, m <<= 1)
14630 if (m & unit_mask)
14631 fprintf (file, " %d", j);
14632 fprintf (file, ")");
14633 }
14634 fprintf (file, " sched state: %d\n", s390_sched_state);
14635
6654e96f 14636 if (s390_tune >= PROCESSOR_2964_Z13)
23902021
AK
14637 {
14638 int units, j;
14639
14640 s390_get_unit_mask (insn, &units);
14641
14642 fprintf (file, ";;\t\tBACKEND: units unused for: ");
14643 for (j = 0; j < units; j++)
14644 fprintf (file, "%d:%d ", j, last_scheduled_unit_distance[j]);
14645 fprintf (file, "\n");
14646 }
22ac2c2f
AK
14647 }
14648 }
14649
3a892e44
AK
14650 if (GET_CODE (PATTERN (insn)) != USE
14651 && GET_CODE (PATTERN (insn)) != CLOBBER)
14652 return more - 1;
14653 else
14654 return more;
14655}
ed9676cf 14656
244e6c5c
AK
14657static void
14658s390_sched_init (FILE *file ATTRIBUTE_UNUSED,
14659 int verbose ATTRIBUTE_UNUSED,
14660 int max_ready ATTRIBUTE_UNUSED)
14661{
775c43d3 14662 last_scheduled_insn = NULL;
23902021 14663 memset (last_scheduled_unit_distance, 0, MAX_SCHED_UNITS * sizeof (int));
22ac2c2f 14664 s390_sched_state = 0;
244e6c5c
AK
14665}
14666
40ac4f73 14667/* This target hook implementation for TARGET_LOOP_UNROLL_ADJUST calculates
65b1d8ea
AK
14668 a new number struct loop *loop should be unrolled if tuned for cpus with
14669 a built-in stride prefetcher.
14670 The loop is analyzed for memory accesses by calling check_dpu for
40ac4f73
CB
14671 each rtx of the loop. Depending on the loop_depth and the amount of
14672 memory accesses a new number <=nunroll is returned to improve the
9c582551 14673 behavior of the hardware prefetch unit. */
40ac4f73
CB
14674static unsigned
14675s390_loop_unroll_adjust (unsigned nunroll, struct loop *loop)
14676{
14677 basic_block *bbs;
775c43d3 14678 rtx_insn *insn;
40ac4f73
CB
14679 unsigned i;
14680 unsigned mem_count = 0;
14681
bacf8ec3 14682 if (s390_tune < PROCESSOR_2097_Z10)
40ac4f73
CB
14683 return nunroll;
14684
14685 /* Count the number of memory references within the loop body. */
14686 bbs = get_loop_body (loop);
9dc7a9da 14687 subrtx_iterator::array_type array;
40ac4f73 14688 for (i = 0; i < loop->num_nodes; i++)
9dc7a9da
RS
14689 FOR_BB_INSNS (bbs[i], insn)
14690 if (INSN_P (insn) && INSN_CODE (insn) != -1)
14691 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
14692 if (MEM_P (*iter))
14693 mem_count += 1;
40ac4f73
CB
14694 free (bbs);
14695
14696 /* Prevent division by zero, and we do not need to adjust nunroll in this case. */
14697 if (mem_count == 0)
14698 return nunroll;
14699
14700 switch (loop_depth(loop))
14701 {
14702 case 1:
14703 return MIN (nunroll, 28 / mem_count);
14704 case 2:
14705 return MIN (nunroll, 22 / mem_count);
14706 default:
14707 return MIN (nunroll, 16 / mem_count);
14708 }
14709}
14710
ec47b086
DV
14711/* Restore the current options. This is a hook function and also called
14712 internally. */
14713
4099494d 14714static void
ec47b086
DV
14715s390_function_specific_restore (struct gcc_options *opts,
14716 struct cl_target_option *ptr ATTRIBUTE_UNUSED)
4099494d 14717{
ec47b086
DV
14718 opts->x_s390_cost_pointer = (long)processor_table[opts->x_s390_tune].cost;
14719}
4099494d 14720
ec47b086 14721static void
6638efce
AK
14722s390_option_override_internal (bool main_args_p,
14723 struct gcc_options *opts,
ec47b086
DV
14724 const struct gcc_options *opts_set)
14725{
6638efce
AK
14726 const char *prefix;
14727 const char *suffix;
14728
14729 /* Set up prefix/suffix so the error messages refer to either the command
14730 line argument, or the attribute(target). */
14731 if (main_args_p)
14732 {
14733 prefix = "-m";
14734 suffix = "";
14735 }
14736 else
14737 {
14738 prefix = "option(\"";
14739 suffix = "\")";
14740 }
14741
14742
4099494d 14743 /* Architecture mode defaults according to ABI. */
ec47b086 14744 if (!(opts_set->x_target_flags & MASK_ZARCH))
4099494d
RS
14745 {
14746 if (TARGET_64BIT)
ec47b086 14747 opts->x_target_flags |= MASK_ZARCH;
4099494d 14748 else
ec47b086 14749 opts->x_target_flags &= ~MASK_ZARCH;
4099494d
RS
14750 }
14751
ec47b086
DV
14752 /* Set the march default in case it hasn't been specified on cmdline. */
14753 if (!opts_set->x_s390_arch)
6638efce
AK
14754 opts->x_s390_arch = PROCESSOR_2064_Z900;
14755 else if (opts->x_s390_arch == PROCESSOR_9672_G5
14756 || opts->x_s390_arch == PROCESSOR_9672_G6)
14757 warning (OPT_Wdeprecated, "%sarch=%s%s is deprecated and will be removed "
14758 "in future releases; use at least %sarch=z900%s",
14759 prefix, opts->x_s390_arch == PROCESSOR_9672_G5 ? "g5" : "g6",
14760 suffix, prefix, suffix);
14761
ec47b086 14762 opts->x_s390_arch_flags = processor_flags_table[(int) opts->x_s390_arch];
4099494d
RS
14763
14764 /* Determine processor to tune for. */
ec47b086
DV
14765 if (!opts_set->x_s390_tune)
14766 opts->x_s390_tune = opts->x_s390_arch;
6638efce
AK
14767 else if (opts->x_s390_tune == PROCESSOR_9672_G5
14768 || opts->x_s390_tune == PROCESSOR_9672_G6)
14769 warning (OPT_Wdeprecated, "%stune=%s%s is deprecated and will be removed "
14770 "in future releases; use at least %stune=z900%s",
14771 prefix, opts->x_s390_tune == PROCESSOR_9672_G5 ? "g5" : "g6",
14772 suffix, prefix, suffix);
14773
ec47b086 14774 opts->x_s390_tune_flags = processor_flags_table[opts->x_s390_tune];
4099494d
RS
14775
14776 /* Sanity checks. */
ec47b086
DV
14777 if (opts->x_s390_arch == PROCESSOR_NATIVE
14778 || opts->x_s390_tune == PROCESSOR_NATIVE)
cb0edc39 14779 gcc_unreachable ();
ec47b086
DV
14780 if (TARGET_ZARCH_P (opts->x_target_flags) && !TARGET_CPU_ZARCH_P (opts))
14781 error ("z/Architecture mode not supported on %s",
14782 processor_table[(int)opts->x_s390_arch].name);
14783 if (TARGET_64BIT && !TARGET_ZARCH_P (opts->x_target_flags))
4099494d
RS
14784 error ("64-bit ABI not supported in ESA/390 mode");
14785
4099494d
RS
14786 /* Enable hardware transactions if available and not explicitly
14787 disabled by user. E.g. with -m31 -march=zEC12 -mzarch */
ec47b086
DV
14788 if (!TARGET_OPT_HTM_P (opts_set->x_target_flags))
14789 {
14790 if (TARGET_CPU_HTM_P (opts) && TARGET_ZARCH_P (opts->x_target_flags))
14791 opts->x_target_flags |= MASK_OPT_HTM;
14792 else
14793 opts->x_target_flags &= ~MASK_OPT_HTM;
14794 }
4099494d 14795
ec47b086 14796 if (TARGET_OPT_VX_P (opts_set->x_target_flags))
55ac540c 14797 {
ec47b086 14798 if (TARGET_OPT_VX_P (opts->x_target_flags))
55ac540c 14799 {
ec47b086 14800 if (!TARGET_CPU_VX_P (opts))
55ac540c 14801 error ("hardware vector support not available on %s",
ec47b086
DV
14802 processor_table[(int)opts->x_s390_arch].name);
14803 if (TARGET_SOFT_FLOAT_P (opts->x_target_flags))
55ac540c
AK
14804 error ("hardware vector support not available with -msoft-float");
14805 }
14806 }
ec47b086
DV
14807 else
14808 {
14809 if (TARGET_CPU_VX_P (opts))
14810 /* Enable vector support if available and not explicitly disabled
14811 by user. E.g. with -m31 -march=z13 -mzarch */
14812 opts->x_target_flags |= MASK_OPT_VX;
14813 else
14814 opts->x_target_flags &= ~MASK_OPT_VX;
14815 }
55ac540c 14816
ec47b086
DV
14817 /* Use hardware DFP if available and not explicitly disabled by
14818 user. E.g. with -m31 -march=z10 -mzarch */
14819 if (!TARGET_HARD_DFP_P (opts_set->x_target_flags))
14820 {
14821 if (TARGET_DFP_P (opts))
14822 opts->x_target_flags |= MASK_HARD_DFP;
14823 else
14824 opts->x_target_flags &= ~MASK_HARD_DFP;
14825 }
14826
14827 if (TARGET_HARD_DFP_P (opts->x_target_flags) && !TARGET_DFP_P (opts))
4099494d 14828 {
ec47b086 14829 if (TARGET_HARD_DFP_P (opts_set->x_target_flags))
4099494d 14830 {
ec47b086 14831 if (!TARGET_CPU_DFP_P (opts))
4099494d 14832 error ("hardware decimal floating point instructions"
ec47b086
DV
14833 " not available on %s",
14834 processor_table[(int)opts->x_s390_arch].name);
14835 if (!TARGET_ZARCH_P (opts->x_target_flags))
4099494d
RS
14836 error ("hardware decimal floating point instructions"
14837 " not available in ESA/390 mode");
14838 }
14839 else
ec47b086 14840 opts->x_target_flags &= ~MASK_HARD_DFP;
4099494d
RS
14841 }
14842
ec47b086
DV
14843 if (TARGET_SOFT_FLOAT_P (opts_set->x_target_flags)
14844 && TARGET_SOFT_FLOAT_P (opts->x_target_flags))
4099494d 14845 {
ec47b086
DV
14846 if (TARGET_HARD_DFP_P (opts_set->x_target_flags)
14847 && TARGET_HARD_DFP_P (opts->x_target_flags))
4099494d
RS
14848 error ("-mhard-dfp can%'t be used in conjunction with -msoft-float");
14849
ec47b086 14850 opts->x_target_flags &= ~MASK_HARD_DFP;
4099494d
RS
14851 }
14852
ec47b086
DV
14853 if (TARGET_BACKCHAIN_P (opts->x_target_flags)
14854 && TARGET_PACKED_STACK_P (opts->x_target_flags)
14855 && TARGET_HARD_FLOAT_P (opts->x_target_flags))
4099494d
RS
14856 error ("-mbackchain -mpacked-stack -mhard-float are not supported "
14857 "in combination");
14858
ec47b086 14859 if (opts->x_s390_stack_size)
4099494d 14860 {
ec47b086 14861 if (opts->x_s390_stack_guard >= opts->x_s390_stack_size)
4099494d 14862 error ("stack size must be greater than the stack guard value");
ec47b086 14863 else if (opts->x_s390_stack_size > 1 << 16)
4099494d
RS
14864 error ("stack size must not be greater than 64k");
14865 }
ec47b086 14866 else if (opts->x_s390_stack_guard)
4099494d
RS
14867 error ("-mstack-guard implies use of -mstack-size");
14868
14869#ifdef TARGET_DEFAULT_LONG_DOUBLE_128
ec47b086
DV
14870 if (!TARGET_LONG_DOUBLE_128_P (opts_set->x_target_flags))
14871 opts->x_target_flags |= MASK_LONG_DOUBLE_128;
4099494d
RS
14872#endif
14873
ec47b086 14874 if (opts->x_s390_tune >= PROCESSOR_2097_Z10)
4099494d
RS
14875 {
14876 maybe_set_param_value (PARAM_MAX_UNROLLED_INSNS, 100,
ec47b086
DV
14877 opts->x_param_values,
14878 opts_set->x_param_values);
4099494d 14879 maybe_set_param_value (PARAM_MAX_UNROLL_TIMES, 32,
ec47b086
DV
14880 opts->x_param_values,
14881 opts_set->x_param_values);
4099494d 14882 maybe_set_param_value (PARAM_MAX_COMPLETELY_PEELED_INSNS, 2000,
ec47b086
DV
14883 opts->x_param_values,
14884 opts_set->x_param_values);
4099494d 14885 maybe_set_param_value (PARAM_MAX_COMPLETELY_PEEL_TIMES, 64,
ec47b086
DV
14886 opts->x_param_values,
14887 opts_set->x_param_values);
4099494d
RS
14888 }
14889
14890 maybe_set_param_value (PARAM_MAX_PENDING_LIST_LENGTH, 256,
ec47b086
DV
14891 opts->x_param_values,
14892 opts_set->x_param_values);
4099494d
RS
14893 /* values for loop prefetching */
14894 maybe_set_param_value (PARAM_L1_CACHE_LINE_SIZE, 256,
ec47b086
DV
14895 opts->x_param_values,
14896 opts_set->x_param_values);
4099494d 14897 maybe_set_param_value (PARAM_L1_CACHE_SIZE, 128,
ec47b086
DV
14898 opts->x_param_values,
14899 opts_set->x_param_values);
4099494d
RS
14900 /* s390 has more than 2 levels and the size is much larger. Since
14901 we are always running virtualized assume that we only get a small
14902 part of the caches above l1. */
14903 maybe_set_param_value (PARAM_L2_CACHE_SIZE, 1500,
ec47b086
DV
14904 opts->x_param_values,
14905 opts_set->x_param_values);
4099494d 14906 maybe_set_param_value (PARAM_PREFETCH_MIN_INSN_TO_MEM_RATIO, 2,
ec47b086
DV
14907 opts->x_param_values,
14908 opts_set->x_param_values);
4099494d 14909 maybe_set_param_value (PARAM_SIMULTANEOUS_PREFETCHES, 6,
ec47b086
DV
14910 opts->x_param_values,
14911 opts_set->x_param_values);
14912
14913 /* Use the alternative scheduling-pressure algorithm by default. */
14914 maybe_set_param_value (PARAM_SCHED_PRESSURE_ALGORITHM, 2,
14915 opts->x_param_values,
14916 opts_set->x_param_values);
14917
6cc61b5a
RD
14918 maybe_set_param_value (PARAM_MIN_VECT_LOOP_BOUND, 2,
14919 opts->x_param_values,
14920 opts_set->x_param_values);
14921
ec47b086
DV
14922 /* Call target specific restore function to do post-init work. At the moment,
14923 this just sets opts->x_s390_cost_pointer. */
14924 s390_function_specific_restore (opts, NULL);
14925}
14926
14927static void
14928s390_option_override (void)
14929{
14930 unsigned int i;
14931 cl_deferred_option *opt;
14932 vec<cl_deferred_option> *v =
14933 (vec<cl_deferred_option> *) s390_deferred_options;
14934
14935 if (v)
14936 FOR_EACH_VEC_ELT (*v, i, opt)
14937 {
14938 switch (opt->opt_index)
14939 {
14940 case OPT_mhotpatch_:
14941 {
14942 int val1;
14943 int val2;
14944 char s[256];
14945 char *t;
14946
14947 strncpy (s, opt->arg, 256);
14948 s[255] = 0;
14949 t = strchr (s, ',');
14950 if (t != NULL)
14951 {
14952 *t = 0;
14953 t++;
14954 val1 = integral_argument (s);
14955 val2 = integral_argument (t);
14956 }
14957 else
14958 {
14959 val1 = -1;
14960 val2 = -1;
14961 }
14962 if (val1 == -1 || val2 == -1)
14963 {
14964 /* argument is not a plain number */
14965 error ("arguments to %qs should be non-negative integers",
14966 "-mhotpatch=n,m");
14967 break;
14968 }
14969 else if (val1 > s390_hotpatch_hw_max
14970 || val2 > s390_hotpatch_hw_max)
14971 {
14972 error ("argument to %qs is too large (max. %d)",
14973 "-mhotpatch=n,m", s390_hotpatch_hw_max);
14974 break;
14975 }
14976 s390_hotpatch_hw_before_label = val1;
14977 s390_hotpatch_hw_after_label = val2;
14978 break;
14979 }
14980 default:
14981 gcc_unreachable ();
14982 }
14983 }
14984
14985 /* Set up function hooks. */
14986 init_machine_status = s390_init_machine_status;
14987
6638efce 14988 s390_option_override_internal (true, &global_options, &global_options_set);
ec47b086
DV
14989
14990 /* Save the initial options in case the user does function specific
14991 options. */
14992 target_option_default_node = build_target_option_node (&global_options);
14993 target_option_current_node = target_option_default_node;
4099494d
RS
14994
14995 /* This cannot reside in s390_option_optimization_table since HAVE_prefetch
14996 requires the arch flags to be evaluated already. Since prefetching
14997 is beneficial on s390, we enable it if available. */
14998 if (flag_prefetch_loop_arrays < 0 && HAVE_prefetch && optimize >= 3)
14999 flag_prefetch_loop_arrays = 1;
15000
935b5226
AK
15001 if (!s390_pic_data_is_text_relative && !flag_pic)
15002 error ("-mno-pic-data-is-text-relative cannot be used without -fpic/-fPIC");
15003
4099494d
RS
15004 if (TARGET_TPF)
15005 {
15006 /* Don't emit DWARF3/4 unless specifically selected. The TPF
15007 debuggers do not yet support DWARF 3/4. */
15008 if (!global_options_set.x_dwarf_strict)
15009 dwarf_strict = 1;
15010 if (!global_options_set.x_dwarf_version)
15011 dwarf_version = 2;
15012 }
15013
15014 /* Register a target-specific optimization-and-lowering pass
15015 to run immediately before prologue and epilogue generation.
15016
15017 Registering the pass must be done at start up. It's
15018 convenient to do it here. */
15019 opt_pass *new_pass = new pass_s390_early_mach (g);
15020 struct register_pass_info insert_pass_s390_early_mach =
15021 {
15022 new_pass, /* pass */
15023 "pro_and_epilogue", /* reference_pass_name */
15024 1, /* ref_pass_instance_number */
15025 PASS_POS_INSERT_BEFORE /* po_op */
15026 };
15027 register_pass (&insert_pass_s390_early_mach);
15028}
15029
ec47b086
DV
15030#if S390_USE_TARGET_ATTRIBUTE
15031/* Inner function to process the attribute((target(...))), take an argument and
15032 set the current options from the argument. If we have a list, recursively go
15033 over the list. */
15034
15035static bool
15036s390_valid_target_attribute_inner_p (tree args,
15037 struct gcc_options *opts,
15038 struct gcc_options *new_opts_set,
15039 bool force_pragma)
15040{
15041 char *next_optstr;
15042 bool ret = true;
15043
15044#define S390_ATTRIB(S,O,A) { S, sizeof (S)-1, O, A, 0 }
15045#define S390_PRAGMA(S,O,A) { S, sizeof (S)-1, O, A, 1 }
15046 static const struct
15047 {
15048 const char *string;
15049 size_t len;
15050 int opt;
15051 int has_arg;
15052 int only_as_pragma;
15053 } attrs[] = {
15054 /* enum options */
15055 S390_ATTRIB ("arch=", OPT_march_, 1),
15056 S390_ATTRIB ("tune=", OPT_mtune_, 1),
15057 /* uinteger options */
15058 S390_ATTRIB ("stack-guard=", OPT_mstack_guard_, 1),
15059 S390_ATTRIB ("stack-size=", OPT_mstack_size_, 1),
15060 S390_ATTRIB ("branch-cost=", OPT_mbranch_cost_, 1),
15061 S390_ATTRIB ("warn-framesize=", OPT_mwarn_framesize_, 1),
15062 /* flag options */
15063 S390_ATTRIB ("backchain", OPT_mbackchain, 0),
15064 S390_ATTRIB ("hard-dfp", OPT_mhard_dfp, 0),
15065 S390_ATTRIB ("hard-float", OPT_mhard_float, 0),
15066 S390_ATTRIB ("htm", OPT_mhtm, 0),
15067 S390_ATTRIB ("vx", OPT_mvx, 0),
15068 S390_ATTRIB ("packed-stack", OPT_mpacked_stack, 0),
15069 S390_ATTRIB ("small-exec", OPT_msmall_exec, 0),
15070 S390_ATTRIB ("soft-float", OPT_msoft_float, 0),
15071 S390_ATTRIB ("mvcle", OPT_mmvcle, 0),
15072 S390_PRAGMA ("zvector", OPT_mzvector, 0),
15073 /* boolean options */
15074 S390_ATTRIB ("warn-dynamicstack", OPT_mwarn_dynamicstack, 0),
15075 };
15076#undef S390_ATTRIB
15077#undef S390_PRAGMA
15078
15079 /* If this is a list, recurse to get the options. */
15080 if (TREE_CODE (args) == TREE_LIST)
15081 {
15082 bool ret = true;
15083 int num_pragma_values;
15084 int i;
15085
15086 /* Note: attribs.c:decl_attributes prepends the values from
15087 current_target_pragma to the list of target attributes. To determine
15088 whether we're looking at a value of the attribute or the pragma we
15089 assume that the first [list_length (current_target_pragma)] values in
15090 the list are the values from the pragma. */
15091 num_pragma_values = (!force_pragma && current_target_pragma != NULL)
15092 ? list_length (current_target_pragma) : 0;
15093 for (i = 0; args; args = TREE_CHAIN (args), i++)
15094 {
15095 bool is_pragma;
15096
15097 is_pragma = (force_pragma || i < num_pragma_values);
15098 if (TREE_VALUE (args)
15099 && !s390_valid_target_attribute_inner_p (TREE_VALUE (args),
15100 opts, new_opts_set,
15101 is_pragma))
15102 {
15103 ret = false;
15104 }
15105 }
15106 return ret;
15107 }
15108
15109 else if (TREE_CODE (args) != STRING_CST)
15110 {
15111 error ("attribute %<target%> argument not a string");
15112 return false;
15113 }
15114
15115 /* Handle multiple arguments separated by commas. */
15116 next_optstr = ASTRDUP (TREE_STRING_POINTER (args));
15117
15118 while (next_optstr && *next_optstr != '\0')
15119 {
15120 char *p = next_optstr;
15121 char *orig_p = p;
15122 char *comma = strchr (next_optstr, ',');
15123 size_t len, opt_len;
15124 int opt;
15125 bool opt_set_p;
15126 char ch;
15127 unsigned i;
15128 int mask = 0;
15129 enum cl_var_type var_type;
15130 bool found;
15131
15132 if (comma)
15133 {
15134 *comma = '\0';
15135 len = comma - next_optstr;
15136 next_optstr = comma + 1;
15137 }
15138 else
15139 {
15140 len = strlen (p);
15141 next_optstr = NULL;
15142 }
15143
15144 /* Recognize no-xxx. */
15145 if (len > 3 && p[0] == 'n' && p[1] == 'o' && p[2] == '-')
15146 {
15147 opt_set_p = false;
15148 p += 3;
15149 len -= 3;
15150 }
15151 else
15152 opt_set_p = true;
15153
15154 /* Find the option. */
15155 ch = *p;
15156 found = false;
15157 for (i = 0; i < ARRAY_SIZE (attrs); i++)
15158 {
15159 opt_len = attrs[i].len;
15160 if (ch == attrs[i].string[0]
15161 && ((attrs[i].has_arg) ? len > opt_len : len == opt_len)
15162 && memcmp (p, attrs[i].string, opt_len) == 0)
15163 {
15164 opt = attrs[i].opt;
15165 if (!opt_set_p && cl_options[opt].cl_reject_negative)
15166 continue;
15167 mask = cl_options[opt].var_value;
15168 var_type = cl_options[opt].var_type;
15169 found = true;
15170 break;
15171 }
15172 }
15173
15174 /* Process the option. */
15175 if (!found)
15176 {
15177 error ("attribute(target(\"%s\")) is unknown", orig_p);
15178 return false;
15179 }
15180 else if (attrs[i].only_as_pragma && !force_pragma)
15181 {
15182 /* Value is not allowed for the target attribute. */
f3981e7e 15183 error ("value %qs is not supported by attribute %<target%>",
ec47b086
DV
15184 attrs[i].string);
15185 return false;
15186 }
15187
15188 else if (var_type == CLVC_BIT_SET || var_type == CLVC_BIT_CLEAR)
15189 {
15190 if (var_type == CLVC_BIT_CLEAR)
15191 opt_set_p = !opt_set_p;
15192
15193 if (opt_set_p)
15194 opts->x_target_flags |= mask;
15195 else
15196 opts->x_target_flags &= ~mask;
15197 new_opts_set->x_target_flags |= mask;
15198 }
15199
15200 else if (cl_options[opt].var_type == CLVC_BOOLEAN)
15201 {
15202 int value;
15203
15204 if (cl_options[opt].cl_uinteger)
15205 {
15206 /* Unsigned integer argument. Code based on the function
15207 decode_cmdline_option () in opts-common.c. */
15208 value = integral_argument (p + opt_len);
15209 }
15210 else
15211 value = (opt_set_p) ? 1 : 0;
15212
15213 if (value != -1)
15214 {
15215 struct cl_decoded_option decoded;
15216
15217 /* Value range check; only implemented for numeric and boolean
15218 options at the moment. */
15219 generate_option (opt, NULL, value, CL_TARGET, &decoded);
15220 s390_handle_option (opts, new_opts_set, &decoded, input_location);
15221 set_option (opts, new_opts_set, opt, value,
15222 p + opt_len, DK_UNSPECIFIED, input_location,
15223 global_dc);
15224 }
15225 else
15226 {
15227 error ("attribute(target(\"%s\")) is unknown", orig_p);
15228 ret = false;
15229 }
15230 }
15231
15232 else if (cl_options[opt].var_type == CLVC_ENUM)
15233 {
15234 bool arg_ok;
15235 int value;
15236
15237 arg_ok = opt_enum_arg_to_value (opt, p + opt_len, &value, CL_TARGET);
15238 if (arg_ok)
15239 set_option (opts, new_opts_set, opt, value,
15240 p + opt_len, DK_UNSPECIFIED, input_location,
15241 global_dc);
15242 else
15243 {
15244 error ("attribute(target(\"%s\")) is unknown", orig_p);
15245 ret = false;
15246 }
15247 }
15248
15249 else
15250 gcc_unreachable ();
15251 }
15252 return ret;
15253}
15254
15255/* Return a TARGET_OPTION_NODE tree of the target options listed or NULL. */
15256
15257tree
15258s390_valid_target_attribute_tree (tree args,
15259 struct gcc_options *opts,
15260 const struct gcc_options *opts_set,
15261 bool force_pragma)
15262{
15263 tree t = NULL_TREE;
15264 struct gcc_options new_opts_set;
15265
15266 memset (&new_opts_set, 0, sizeof (new_opts_set));
15267
15268 /* Process each of the options on the chain. */
15269 if (! s390_valid_target_attribute_inner_p (args, opts, &new_opts_set,
15270 force_pragma))
15271 return error_mark_node;
15272
15273 /* If some option was set (even if it has not changed), rerun
15274 s390_option_override_internal, and then save the options away. */
15275 if (new_opts_set.x_target_flags
15276 || new_opts_set.x_s390_arch
15277 || new_opts_set.x_s390_tune
15278 || new_opts_set.x_s390_stack_guard
15279 || new_opts_set.x_s390_stack_size
15280 || new_opts_set.x_s390_branch_cost
15281 || new_opts_set.x_s390_warn_framesize
15282 || new_opts_set.x_s390_warn_dynamicstack_p)
15283 {
15284 const unsigned char *src = (const unsigned char *)opts_set;
15285 unsigned char *dest = (unsigned char *)&new_opts_set;
15286 unsigned int i;
15287
15288 /* Merge the original option flags into the new ones. */
15289 for (i = 0; i < sizeof(*opts_set); i++)
15290 dest[i] |= src[i];
15291
15292 /* Do any overrides, such as arch=xxx, or tune=xxx support. */
6638efce 15293 s390_option_override_internal (false, opts, &new_opts_set);
ec47b086
DV
15294 /* Save the current options unless we are validating options for
15295 #pragma. */
15296 t = build_target_option_node (opts);
15297 }
15298 return t;
15299}
15300
15301/* Hook to validate attribute((target("string"))). */
15302
15303static bool
15304s390_valid_target_attribute_p (tree fndecl,
15305 tree ARG_UNUSED (name),
15306 tree args,
15307 int ARG_UNUSED (flags))
15308{
15309 struct gcc_options func_options;
15310 tree new_target, new_optimize;
15311 bool ret = true;
15312
15313 /* attribute((target("default"))) does nothing, beyond
15314 affecting multi-versioning. */
15315 if (TREE_VALUE (args)
15316 && TREE_CODE (TREE_VALUE (args)) == STRING_CST
15317 && TREE_CHAIN (args) == NULL_TREE
15318 && strcmp (TREE_STRING_POINTER (TREE_VALUE (args)), "default") == 0)
15319 return true;
15320
15321 tree old_optimize = build_optimization_node (&global_options);
15322
15323 /* Get the optimization options of the current function. */
15324 tree func_optimize = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl);
15325
15326 if (!func_optimize)
15327 func_optimize = old_optimize;
15328
15329 /* Init func_options. */
15330 memset (&func_options, 0, sizeof (func_options));
15331 init_options_struct (&func_options, NULL);
15332 lang_hooks.init_options_struct (&func_options);
15333
15334 cl_optimization_restore (&func_options, TREE_OPTIMIZATION (func_optimize));
15335
15336 /* Initialize func_options to the default before its target options can
15337 be set. */
15338 cl_target_option_restore (&func_options,
15339 TREE_TARGET_OPTION (target_option_default_node));
15340
15341 new_target = s390_valid_target_attribute_tree (args, &func_options,
15342 &global_options_set,
15343 (args ==
15344 current_target_pragma));
15345 new_optimize = build_optimization_node (&func_options);
15346 if (new_target == error_mark_node)
15347 ret = false;
15348 else if (fndecl && new_target)
15349 {
15350 DECL_FUNCTION_SPECIFIC_TARGET (fndecl) = new_target;
15351 if (old_optimize != new_optimize)
15352 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl) = new_optimize;
15353 }
15354 return ret;
15355}
15356
15357/* Restore targets globals from NEW_TREE and invalidate s390_previous_fndecl
15358 cache. */
15359
15360void
15361s390_activate_target_options (tree new_tree)
15362{
15363 cl_target_option_restore (&global_options, TREE_TARGET_OPTION (new_tree));
15364 if (TREE_TARGET_GLOBALS (new_tree))
15365 restore_target_globals (TREE_TARGET_GLOBALS (new_tree));
15366 else if (new_tree == target_option_default_node)
15367 restore_target_globals (&default_target_globals);
15368 else
15369 TREE_TARGET_GLOBALS (new_tree) = save_target_globals_default_opts ();
15370 s390_previous_fndecl = NULL_TREE;
15371}
15372
15373/* Establish appropriate back-end context for processing the function
15374 FNDECL. The argument might be NULL to indicate processing at top
15375 level, outside of any function scope. */
15376static void
15377s390_set_current_function (tree fndecl)
15378{
15379 /* Only change the context if the function changes. This hook is called
15380 several times in the course of compiling a function, and we don't want to
15381 slow things down too much or call target_reinit when it isn't safe. */
15382 if (fndecl == s390_previous_fndecl)
15383 return;
15384
15385 tree old_tree;
15386 if (s390_previous_fndecl == NULL_TREE)
15387 old_tree = target_option_current_node;
15388 else if (DECL_FUNCTION_SPECIFIC_TARGET (s390_previous_fndecl))
15389 old_tree = DECL_FUNCTION_SPECIFIC_TARGET (s390_previous_fndecl);
15390 else
15391 old_tree = target_option_default_node;
15392
15393 if (fndecl == NULL_TREE)
15394 {
15395 if (old_tree != target_option_current_node)
15396 s390_activate_target_options (target_option_current_node);
15397 return;
15398 }
15399
15400 tree new_tree = DECL_FUNCTION_SPECIFIC_TARGET (fndecl);
15401 if (new_tree == NULL_TREE)
15402 new_tree = target_option_default_node;
15403
15404 if (old_tree != new_tree)
15405 s390_activate_target_options (new_tree);
15406 s390_previous_fndecl = fndecl;
15407}
15408#endif
15409
b5e3200c
JG
15410/* Implement TARGET_USE_BY_PIECES_INFRASTRUCTURE_P. */
15411
15412static bool
445d7826 15413s390_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size,
b5e3200c
JG
15414 unsigned int align ATTRIBUTE_UNUSED,
15415 enum by_pieces_operation op ATTRIBUTE_UNUSED,
15416 bool speed_p ATTRIBUTE_UNUSED)
15417{
15418 return (size == 1 || size == 2
15419 || size == 4 || (TARGET_ZARCH && size == 8));
15420}
15421
35bc11c3
AK
15422/* Implement TARGET_ATOMIC_ASSIGN_EXPAND_FENV hook. */
15423
15424static void
15425s390_atomic_assign_expand_fenv (tree *hold, tree *clear, tree *update)
15426{
3af82a61
AK
15427 tree sfpc = s390_builtin_decls[S390_BUILTIN_s390_sfpc];
15428 tree efpc = s390_builtin_decls[S390_BUILTIN_s390_efpc];
35bc11c3 15429 tree call_efpc = build_call_expr (efpc, 0);
f2c0c243 15430 tree fenv_var = create_tmp_var_raw (unsigned_type_node);
35bc11c3
AK
15431
15432#define FPC_EXCEPTION_MASK HOST_WIDE_INT_UC (0xf8000000)
15433#define FPC_FLAGS_MASK HOST_WIDE_INT_UC (0x00f80000)
15434#define FPC_DXC_MASK HOST_WIDE_INT_UC (0x0000ff00)
15435#define FPC_EXCEPTION_MASK_SHIFT HOST_WIDE_INT_UC (24)
15436#define FPC_FLAGS_SHIFT HOST_WIDE_INT_UC (16)
15437#define FPC_DXC_SHIFT HOST_WIDE_INT_UC (8)
15438
15439 /* Generates the equivalent of feholdexcept (&fenv_var)
15440
15441 fenv_var = __builtin_s390_efpc ();
15442 __builtin_s390_sfpc (fenv_var & mask) */
15443 tree old_fpc = build2 (MODIFY_EXPR, unsigned_type_node, fenv_var, call_efpc);
15444 tree new_fpc =
15445 build2 (BIT_AND_EXPR, unsigned_type_node, fenv_var,
15446 build_int_cst (unsigned_type_node,
15447 ~(FPC_DXC_MASK | FPC_FLAGS_MASK |
15448 FPC_EXCEPTION_MASK)));
15449 tree set_new_fpc = build_call_expr (sfpc, 1, new_fpc);
15450 *hold = build2 (COMPOUND_EXPR, void_type_node, old_fpc, set_new_fpc);
15451
15452 /* Generates the equivalent of feclearexcept (FE_ALL_EXCEPT)
15453
15454 __builtin_s390_sfpc (__builtin_s390_efpc () & mask) */
15455 new_fpc = build2 (BIT_AND_EXPR, unsigned_type_node, call_efpc,
15456 build_int_cst (unsigned_type_node,
15457 ~(FPC_DXC_MASK | FPC_FLAGS_MASK)));
15458 *clear = build_call_expr (sfpc, 1, new_fpc);
15459
15460 /* Generates the equivalent of feupdateenv (fenv_var)
15461
15462 old_fpc = __builtin_s390_efpc ();
15463 __builtin_s390_sfpc (fenv_var);
15464 __atomic_feraiseexcept ((old_fpc & FPC_FLAGS_MASK) >> FPC_FLAGS_SHIFT); */
15465
f2c0c243 15466 old_fpc = create_tmp_var_raw (unsigned_type_node);
35bc11c3
AK
15467 tree store_old_fpc = build2 (MODIFY_EXPR, void_type_node,
15468 old_fpc, call_efpc);
15469
15470 set_new_fpc = build_call_expr (sfpc, 1, fenv_var);
15471
15472 tree raise_old_except = build2 (BIT_AND_EXPR, unsigned_type_node, old_fpc,
15473 build_int_cst (unsigned_type_node,
15474 FPC_FLAGS_MASK));
15475 raise_old_except = build2 (RSHIFT_EXPR, unsigned_type_node, raise_old_except,
15476 build_int_cst (unsigned_type_node,
15477 FPC_FLAGS_SHIFT));
15478 tree atomic_feraiseexcept
15479 = builtin_decl_implicit (BUILT_IN_ATOMIC_FERAISEEXCEPT);
15480 raise_old_except = build_call_expr (atomic_feraiseexcept,
15481 1, raise_old_except);
15482
15483 *update = build2 (COMPOUND_EXPR, void_type_node,
15484 build2 (COMPOUND_EXPR, void_type_node,
15485 store_old_fpc, set_new_fpc),
15486 raise_old_except);
15487
15488#undef FPC_EXCEPTION_MASK
15489#undef FPC_FLAGS_MASK
15490#undef FPC_DXC_MASK
15491#undef FPC_EXCEPTION_MASK_SHIFT
15492#undef FPC_FLAGS_SHIFT
15493#undef FPC_DXC_SHIFT
15494}
15495
085261c8
AK
15496/* Return the vector mode to be used for inner mode MODE when doing
15497 vectorization. */
15498static machine_mode
15499s390_preferred_simd_mode (machine_mode mode)
15500{
15501 if (TARGET_VX)
15502 switch (mode)
15503 {
4e10a5a7 15504 case E_DFmode:
085261c8 15505 return V2DFmode;
4e10a5a7 15506 case E_DImode:
085261c8 15507 return V2DImode;
4e10a5a7 15508 case E_SImode:
085261c8 15509 return V4SImode;
4e10a5a7 15510 case E_HImode:
085261c8 15511 return V8HImode;
4e10a5a7 15512 case E_QImode:
085261c8
AK
15513 return V16QImode;
15514 default:;
15515 }
15516 return word_mode;
15517}
15518
15519/* Our hardware does not require vectors to be strictly aligned. */
15520static bool
15521s390_support_vector_misalignment (machine_mode mode ATTRIBUTE_UNUSED,
15522 const_tree type ATTRIBUTE_UNUSED,
15523 int misalignment ATTRIBUTE_UNUSED,
15524 bool is_packed ATTRIBUTE_UNUSED)
15525{
920cc696
AK
15526 if (TARGET_VX)
15527 return true;
15528
15529 return default_builtin_support_vector_misalignment (mode, type, misalignment,
15530 is_packed);
085261c8
AK
15531}
15532
15533/* The vector ABI requires vector types to be aligned on an 8 byte
15534 boundary (our stack alignment). However, we allow this to be
15535 overriden by the user, while this definitely breaks the ABI. */
15536static HOST_WIDE_INT
15537s390_vector_alignment (const_tree type)
15538{
15539 if (!TARGET_VX_ABI)
15540 return default_vector_alignment (type);
15541
15542 if (TYPE_USER_ALIGN (type))
15543 return TYPE_ALIGN (type);
15544
15545 return MIN (64, tree_to_shwi (TYPE_SIZE (type)));
15546}
15547
7763d972
DV
15548#ifdef HAVE_AS_MACHINE_MACHINEMODE
15549/* Implement TARGET_ASM_FILE_START. */
15550static void
15551s390_asm_file_start (void)
15552{
587b7f7a 15553 default_file_start ();
7763d972
DV
15554 s390_asm_output_machine_for_arch (asm_out_file);
15555}
15556#endif
15557
45901378
AK
15558/* Implement TARGET_ASM_FILE_END. */
15559static void
15560s390_asm_file_end (void)
15561{
15562#ifdef HAVE_AS_GNU_ATTRIBUTE
15563 varpool_node *vnode;
15564 cgraph_node *cnode;
15565
15566 FOR_EACH_VARIABLE (vnode)
15567 if (TREE_PUBLIC (vnode->decl))
15568 s390_check_type_for_vector_abi (TREE_TYPE (vnode->decl), false, false);
15569
15570 FOR_EACH_FUNCTION (cnode)
15571 if (TREE_PUBLIC (cnode->decl))
15572 s390_check_type_for_vector_abi (TREE_TYPE (cnode->decl), false, false);
15573
15574
15575 if (s390_vector_abi != 0)
15576 fprintf (asm_out_file, "\t.gnu_attribute 8, %d\n",
15577 s390_vector_abi);
15578#endif
15579 file_end_indicate_exec_stack ();
4cb4721f
MK
15580
15581 if (flag_split_stack)
15582 file_end_indicate_split_stack ();
45901378 15583}
085261c8 15584
cb4c41dd
AK
15585/* Return true if TYPE is a vector bool type. */
15586static inline bool
15587s390_vector_bool_type_p (const_tree type)
15588{
15589 return TYPE_VECTOR_OPAQUE (type);
15590}
15591
15592/* Return the diagnostic message string if the binary operation OP is
15593 not permitted on TYPE1 and TYPE2, NULL otherwise. */
15594static const char*
15595s390_invalid_binary_op (int op ATTRIBUTE_UNUSED, const_tree type1, const_tree type2)
15596{
15597 bool bool1_p, bool2_p;
15598 bool plusminus_p;
15599 bool muldiv_p;
15600 bool compare_p;
15601 machine_mode mode1, mode2;
15602
15603 if (!TARGET_ZVECTOR)
15604 return NULL;
15605
15606 if (!VECTOR_TYPE_P (type1) || !VECTOR_TYPE_P (type2))
15607 return NULL;
15608
15609 bool1_p = s390_vector_bool_type_p (type1);
15610 bool2_p = s390_vector_bool_type_p (type2);
15611
15612 /* Mixing signed and unsigned types is forbidden for all
15613 operators. */
15614 if (!bool1_p && !bool2_p
15615 && TYPE_UNSIGNED (type1) != TYPE_UNSIGNED (type2))
bd2c6270 15616 return N_("types differ in signedness");
cb4c41dd
AK
15617
15618 plusminus_p = (op == PLUS_EXPR || op == MINUS_EXPR);
15619 muldiv_p = (op == MULT_EXPR || op == RDIV_EXPR || op == TRUNC_DIV_EXPR
15620 || op == CEIL_DIV_EXPR || op == FLOOR_DIV_EXPR
15621 || op == ROUND_DIV_EXPR);
15622 compare_p = (op == LT_EXPR || op == LE_EXPR || op == GT_EXPR || op == GE_EXPR
15623 || op == EQ_EXPR || op == NE_EXPR);
15624
15625 if (bool1_p && bool2_p && (plusminus_p || muldiv_p))
15626 return N_("binary operator does not support two vector bool operands");
15627
15628 if (bool1_p != bool2_p && (muldiv_p || compare_p))
15629 return N_("binary operator does not support vector bool operand");
15630
15631 mode1 = TYPE_MODE (type1);
15632 mode2 = TYPE_MODE (type2);
15633
15634 if (bool1_p != bool2_p && plusminus_p
15635 && (GET_MODE_CLASS (mode1) == MODE_VECTOR_FLOAT
15636 || GET_MODE_CLASS (mode2) == MODE_VECTOR_FLOAT))
15637 return N_("binary operator does not support mixing vector "
15638 "bool with floating point vector operands");
15639
15640 return NULL;
15641}
15642
638108bd
JG
15643/* Implement TARGET_C_EXCESS_PRECISION.
15644
15645 FIXME: For historical reasons, float_t and double_t are typedef'ed to
15646 double on s390, causing operations on float_t to operate in a higher
15647 precision than is necessary. However, it is not the case that SFmode
15648 operations have implicit excess precision, and we generate more optimal
15649 code if we let the compiler know no implicit extra precision is added.
15650
15651 That means when we are compiling with -fexcess-precision=fast, the value
15652 we set for FLT_EVAL_METHOD will be out of line with the actual precision of
15653 float_t (though they would be correct for -fexcess-precision=standard).
15654
15655 A complete fix would modify glibc to remove the unnecessary typedef
15656 of float_t to double. */
15657
15658static enum flt_eval_method
15659s390_excess_precision (enum excess_precision_type type)
15660{
15661 switch (type)
15662 {
15663 case EXCESS_PRECISION_TYPE_IMPLICIT:
15664 case EXCESS_PRECISION_TYPE_FAST:
15665 /* The fastest type to promote to will always be the native type,
15666 whether that occurs with implicit excess precision or
15667 otherwise. */
15668 return FLT_EVAL_METHOD_PROMOTE_TO_FLOAT;
15669 case EXCESS_PRECISION_TYPE_STANDARD:
15670 /* Otherwise, when we are in a standards compliant mode, to
15671 ensure consistency with the implementation in glibc, report that
15672 float is evaluated to the range and precision of double. */
15673 return FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE;
15674 default:
15675 gcc_unreachable ();
15676 }
15677 return FLT_EVAL_METHOD_UNPREDICTABLE;
15678}
15679
4997a71d
JJ
15680/* Implement the TARGET_ASAN_SHADOW_OFFSET hook. */
15681
15682static unsigned HOST_WIDE_INT
15683s390_asan_shadow_offset (void)
15684{
15685 return TARGET_64BIT ? HOST_WIDE_INT_1U << 52 : HOST_WIDE_INT_UC (0x20000000);
15686}
15687
ab96de7e 15688/* Initialize GCC target structure. */
38899e29 15689
ab96de7e
AS
15690#undef TARGET_ASM_ALIGNED_HI_OP
15691#define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
15692#undef TARGET_ASM_ALIGNED_DI_OP
15693#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
15694#undef TARGET_ASM_INTEGER
15695#define TARGET_ASM_INTEGER s390_assemble_integer
ed9676cf 15696
ab96de7e
AS
15697#undef TARGET_ASM_OPEN_PAREN
15698#define TARGET_ASM_OPEN_PAREN ""
38899e29 15699
ab96de7e
AS
15700#undef TARGET_ASM_CLOSE_PAREN
15701#define TARGET_ASM_CLOSE_PAREN ""
ed9676cf 15702
c5387660
JM
15703#undef TARGET_OPTION_OVERRIDE
15704#define TARGET_OPTION_OVERRIDE s390_option_override
15705
1202f33e
JJ
15706#ifdef TARGET_THREAD_SSP_OFFSET
15707#undef TARGET_STACK_PROTECT_GUARD
15708#define TARGET_STACK_PROTECT_GUARD hook_tree_void_null
15709#endif
15710
ab96de7e
AS
15711#undef TARGET_ENCODE_SECTION_INFO
15712#define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
ed9676cf 15713
9602b6a1
AK
15714#undef TARGET_SCALAR_MODE_SUPPORTED_P
15715#define TARGET_SCALAR_MODE_SUPPORTED_P s390_scalar_mode_supported_p
15716
ab96de7e
AS
15717#ifdef HAVE_AS_TLS
15718#undef TARGET_HAVE_TLS
15719#define TARGET_HAVE_TLS true
15720#endif
15721#undef TARGET_CANNOT_FORCE_CONST_MEM
15722#define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
ed9676cf 15723
ab96de7e
AS
15724#undef TARGET_DELEGITIMIZE_ADDRESS
15725#define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
ed9676cf 15726
506d7b68
PB
15727#undef TARGET_LEGITIMIZE_ADDRESS
15728#define TARGET_LEGITIMIZE_ADDRESS s390_legitimize_address
15729
ab96de7e
AS
15730#undef TARGET_RETURN_IN_MEMORY
15731#define TARGET_RETURN_IN_MEMORY s390_return_in_memory
38899e29 15732
5a3fe9b6
AK
15733#undef TARGET_INIT_BUILTINS
15734#define TARGET_INIT_BUILTINS s390_init_builtins
15735#undef TARGET_EXPAND_BUILTIN
15736#define TARGET_EXPAND_BUILTIN s390_expand_builtin
9b80b7bc
AK
15737#undef TARGET_BUILTIN_DECL
15738#define TARGET_BUILTIN_DECL s390_builtin_decl
5a3fe9b6 15739
0f8ab434
AS
15740#undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
15741#define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA s390_output_addr_const_extra
15742
ab96de7e
AS
15743#undef TARGET_ASM_OUTPUT_MI_THUNK
15744#define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
15745#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3101faab 15746#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
ed9676cf 15747
638108bd
JG
15748#undef TARGET_C_EXCESS_PRECISION
15749#define TARGET_C_EXCESS_PRECISION s390_excess_precision
15750
ab96de7e
AS
15751#undef TARGET_SCHED_ADJUST_PRIORITY
15752#define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
15753#undef TARGET_SCHED_ISSUE_RATE
15754#define TARGET_SCHED_ISSUE_RATE s390_issue_rate
15755#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
15756#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
38899e29 15757
3a892e44
AK
15758#undef TARGET_SCHED_VARIABLE_ISSUE
15759#define TARGET_SCHED_VARIABLE_ISSUE s390_sched_variable_issue
15760#undef TARGET_SCHED_REORDER
15761#define TARGET_SCHED_REORDER s390_sched_reorder
244e6c5c
AK
15762#undef TARGET_SCHED_INIT
15763#define TARGET_SCHED_INIT s390_sched_init
3a892e44 15764
ab96de7e
AS
15765#undef TARGET_CANNOT_COPY_INSN_P
15766#define TARGET_CANNOT_COPY_INSN_P s390_cannot_copy_insn_p
15767#undef TARGET_RTX_COSTS
15768#define TARGET_RTX_COSTS s390_rtx_costs
15769#undef TARGET_ADDRESS_COST
15770#define TARGET_ADDRESS_COST s390_address_cost
ccaed3ba
AS
15771#undef TARGET_REGISTER_MOVE_COST
15772#define TARGET_REGISTER_MOVE_COST s390_register_move_cost
15773#undef TARGET_MEMORY_MOVE_COST
15774#define TARGET_MEMORY_MOVE_COST s390_memory_move_cost
7f5fc633
AK
15775#undef TARGET_VECTORIZE_BUILTIN_VECTORIZATION_COST
15776#define TARGET_VECTORIZE_BUILTIN_VECTORIZATION_COST \
15777 s390_builtin_vectorization_cost
38899e29 15778
ab96de7e
AS
15779#undef TARGET_MACHINE_DEPENDENT_REORG
15780#define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
8a512b77 15781
ab96de7e
AS
15782#undef TARGET_VALID_POINTER_MODE
15783#define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
38899e29 15784
ab96de7e
AS
15785#undef TARGET_BUILD_BUILTIN_VA_LIST
15786#define TARGET_BUILD_BUILTIN_VA_LIST s390_build_builtin_va_list
d7bd8aeb
JJ
15787#undef TARGET_EXPAND_BUILTIN_VA_START
15788#define TARGET_EXPAND_BUILTIN_VA_START s390_va_start
4997a71d
JJ
15789#undef TARGET_ASAN_SHADOW_OFFSET
15790#define TARGET_ASAN_SHADOW_OFFSET s390_asan_shadow_offset
ab96de7e
AS
15791#undef TARGET_GIMPLIFY_VA_ARG_EXPR
15792#define TARGET_GIMPLIFY_VA_ARG_EXPR s390_gimplify_va_arg
4798630c 15793
cde0f3fd
PB
15794#undef TARGET_PROMOTE_FUNCTION_MODE
15795#define TARGET_PROMOTE_FUNCTION_MODE s390_promote_function_mode
ab96de7e
AS
15796#undef TARGET_PASS_BY_REFERENCE
15797#define TARGET_PASS_BY_REFERENCE s390_pass_by_reference
4798630c 15798
ab96de7e
AS
15799#undef TARGET_FUNCTION_OK_FOR_SIBCALL
15800#define TARGET_FUNCTION_OK_FOR_SIBCALL s390_function_ok_for_sibcall
3cb1da52
NF
15801#undef TARGET_FUNCTION_ARG
15802#define TARGET_FUNCTION_ARG s390_function_arg
15803#undef TARGET_FUNCTION_ARG_ADVANCE
15804#define TARGET_FUNCTION_ARG_ADVANCE s390_function_arg_advance
b46616fd
AK
15805#undef TARGET_FUNCTION_VALUE
15806#define TARGET_FUNCTION_VALUE s390_function_value
15807#undef TARGET_LIBCALL_VALUE
15808#define TARGET_LIBCALL_VALUE s390_libcall_value
085261c8
AK
15809#undef TARGET_STRICT_ARGUMENT_NAMING
15810#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
4798630c 15811
d56a43a0
AK
15812#undef TARGET_KEEP_LEAF_WHEN_PROFILED
15813#define TARGET_KEEP_LEAF_WHEN_PROFILED s390_keep_leaf_when_profiled
15814
ab96de7e
AS
15815#undef TARGET_FIXED_CONDITION_CODE_REGS
15816#define TARGET_FIXED_CONDITION_CODE_REGS s390_fixed_condition_code_regs
4798630c 15817
ab96de7e
AS
15818#undef TARGET_CC_MODES_COMPATIBLE
15819#define TARGET_CC_MODES_COMPATIBLE s390_cc_modes_compatible
4798630c 15820
e7e64a25 15821#undef TARGET_INVALID_WITHIN_DOLOOP
ac44248e 15822#define TARGET_INVALID_WITHIN_DOLOOP hook_constcharptr_const_rtx_insn_null
c08b81aa 15823
fdbe66f2
EB
15824#ifdef HAVE_AS_TLS
15825#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
15826#define TARGET_ASM_OUTPUT_DWARF_DTPREL s390_output_dwarf_dtprel
15827#endif
15828
085261c8
AK
15829#undef TARGET_DWARF_FRAME_REG_MODE
15830#define TARGET_DWARF_FRAME_REG_MODE s390_dwarf_frame_reg_mode
15831
7269aee7 15832#ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
608063c3
JB
15833#undef TARGET_MANGLE_TYPE
15834#define TARGET_MANGLE_TYPE s390_mangle_type
7269aee7
AH
15835#endif
15836
4dc19cc0
AK
15837#undef TARGET_SCALAR_MODE_SUPPORTED_P
15838#define TARGET_SCALAR_MODE_SUPPORTED_P s390_scalar_mode_supported_p
15839
085261c8
AK
15840#undef TARGET_VECTOR_MODE_SUPPORTED_P
15841#define TARGET_VECTOR_MODE_SUPPORTED_P s390_vector_mode_supported_p
15842
5df97412
AS
15843#undef TARGET_PREFERRED_RELOAD_CLASS
15844#define TARGET_PREFERRED_RELOAD_CLASS s390_preferred_reload_class
15845
833cd70a
AK
15846#undef TARGET_SECONDARY_RELOAD
15847#define TARGET_SECONDARY_RELOAD s390_secondary_reload
15848
c7ff6e7a
AK
15849#undef TARGET_LIBGCC_CMP_RETURN_MODE
15850#define TARGET_LIBGCC_CMP_RETURN_MODE s390_libgcc_cmp_return_mode
15851
15852#undef TARGET_LIBGCC_SHIFT_COUNT_MODE
15853#define TARGET_LIBGCC_SHIFT_COUNT_MODE s390_libgcc_shift_count_mode
15854
c6c3dba9
PB
15855#undef TARGET_LEGITIMATE_ADDRESS_P
15856#define TARGET_LEGITIMATE_ADDRESS_P s390_legitimate_address_p
15857
1a627b35
RS
15858#undef TARGET_LEGITIMATE_CONSTANT_P
15859#define TARGET_LEGITIMATE_CONSTANT_P s390_legitimate_constant_p
15860
3597e113
VM
15861#undef TARGET_LRA_P
15862#define TARGET_LRA_P s390_lra_p
15863
7b5cbb57
AS
15864#undef TARGET_CAN_ELIMINATE
15865#define TARGET_CAN_ELIMINATE s390_can_eliminate
15866
5efd84c5
NF
15867#undef TARGET_CONDITIONAL_REGISTER_USAGE
15868#define TARGET_CONDITIONAL_REGISTER_USAGE s390_conditional_register_usage
15869
40ac4f73
CB
15870#undef TARGET_LOOP_UNROLL_ADJUST
15871#define TARGET_LOOP_UNROLL_ADJUST s390_loop_unroll_adjust
15872
b81ecf6f
RH
15873#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
15874#define TARGET_ASM_TRAMPOLINE_TEMPLATE s390_asm_trampoline_template
15875#undef TARGET_TRAMPOLINE_INIT
15876#define TARGET_TRAMPOLINE_INIT s390_trampoline_init
15877
a0003c78
DV
15878/* PR 79421 */
15879#undef TARGET_CUSTOM_FUNCTION_DESCRIPTORS
15880#define TARGET_CUSTOM_FUNCTION_DESCRIPTORS 1
15881
9602b6a1
AK
15882#undef TARGET_UNWIND_WORD_MODE
15883#define TARGET_UNWIND_WORD_MODE s390_unwind_word_mode
15884
c354951b
AK
15885#undef TARGET_CANONICALIZE_COMPARISON
15886#define TARGET_CANONICALIZE_COMPARISON s390_canonicalize_comparison
15887
6455a49e
AK
15888#undef TARGET_HARD_REGNO_SCRATCH_OK
15889#define TARGET_HARD_REGNO_SCRATCH_OK s390_hard_regno_scratch_ok
15890
d0de9e13
DV
15891#undef TARGET_ATTRIBUTE_TABLE
15892#define TARGET_ATTRIBUTE_TABLE s390_attribute_table
15893
94091f43
DV
15894#undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
15895#define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P hook_bool_const_tree_true
d0de9e13 15896
177bc204
RS
15897#undef TARGET_SET_UP_BY_PROLOGUE
15898#define TARGET_SET_UP_BY_PROLOGUE s300_set_up_by_prologue
15899
4cb4721f
MK
15900#undef TARGET_EXTRA_LIVE_ON_ENTRY
15901#define TARGET_EXTRA_LIVE_ON_ENTRY s390_live_on_entry
15902
b5e3200c
JG
15903#undef TARGET_USE_BY_PIECES_INFRASTRUCTURE_P
15904#define TARGET_USE_BY_PIECES_INFRASTRUCTURE_P \
15905 s390_use_by_pieces_infrastructure_p
15906
35bc11c3
AK
15907#undef TARGET_ATOMIC_ASSIGN_EXPAND_FENV
15908#define TARGET_ATOMIC_ASSIGN_EXPAND_FENV s390_atomic_assign_expand_fenv
15909
085261c8
AK
15910#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
15911#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN s390_invalid_arg_for_unprototyped_fn
15912
15913#undef TARGET_VECTORIZE_PREFERRED_SIMD_MODE
15914#define TARGET_VECTORIZE_PREFERRED_SIMD_MODE s390_preferred_simd_mode
15915
15916#undef TARGET_VECTORIZE_SUPPORT_VECTOR_MISALIGNMENT
15917#define TARGET_VECTORIZE_SUPPORT_VECTOR_MISALIGNMENT s390_support_vector_misalignment
15918
15919#undef TARGET_VECTOR_ALIGNMENT
15920#define TARGET_VECTOR_ALIGNMENT s390_vector_alignment
15921
cb4c41dd
AK
15922#undef TARGET_INVALID_BINARY_OP
15923#define TARGET_INVALID_BINARY_OP s390_invalid_binary_op
15924
7763d972
DV
15925#ifdef HAVE_AS_MACHINE_MACHINEMODE
15926#undef TARGET_ASM_FILE_START
15927#define TARGET_ASM_FILE_START s390_asm_file_start
15928#endif
15929
45901378
AK
15930#undef TARGET_ASM_FILE_END
15931#define TARGET_ASM_FILE_END s390_asm_file_end
15932
ec47b086
DV
15933#if S390_USE_TARGET_ATTRIBUTE
15934#undef TARGET_SET_CURRENT_FUNCTION
15935#define TARGET_SET_CURRENT_FUNCTION s390_set_current_function
15936
15937#undef TARGET_OPTION_VALID_ATTRIBUTE_P
15938#define TARGET_OPTION_VALID_ATTRIBUTE_P s390_valid_target_attribute_p
15939#endif
15940
15941#undef TARGET_OPTION_RESTORE
15942#define TARGET_OPTION_RESTORE s390_function_specific_restore
15943
ab96de7e 15944struct gcc_target targetm = TARGET_INITIALIZER;
38899e29 15945
29742ba4 15946#include "gt-s390.h"