]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/s390/predicates.md
Update copyright years.
[thirdparty/gcc.git] / gcc / config / s390 / predicates.md
1 ;; Predicate definitions for S/390 and zSeries.
2 ;; Copyright (C) 2005-2021 Free Software Foundation, Inc.
3 ;; Contributed by Hartmut Penner (hpenner@de.ibm.com) and
4 ;; Ulrich Weigand (uweigand@de.ibm.com).
5 ;;
6 ;; This file is part of GCC.
7 ;;
8 ;; GCC is free software; you can redistribute it and/or modify
9 ;; it under the terms of the GNU General Public License as published by
10 ;; the Free Software Foundation; either version 3, or (at your option)
11 ;; any later version.
12 ;;
13 ;; GCC is distributed in the hope that it will be useful,
14 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
15 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 ;; GNU General Public License for more details.
17 ;;
18 ;; You should have received a copy of the GNU General Public License
19 ;; along with GCC; see the file COPYING3. If not see
20 ;; <http://www.gnu.org/licenses/>.
21
22 ;; OP is the current operation.
23 ;; MODE is the current operation mode.
24
25 ;; operands --------------------------------------------------------------
26
27 ;; Return true if OP a const 0 operand (int/float/vector).
28 (define_predicate "const0_operand"
29 (and (match_code "const_int,const_wide_int,const_double,const_vector")
30 (match_test "op == CONST0_RTX (mode)")))
31
32 ;; Return true if OP an all ones operand (int/vector).
33 (define_predicate "all_ones_operand"
34 (and (match_code "const_int, const_wide_int, const_vector")
35 (match_test "INTEGRAL_MODE_P (GET_MODE (op))")
36 (match_test "op == CONSTM1_RTX (mode)")))
37
38 ;; Return true if OP is a 4 bit mask operand
39 (define_predicate "const_mask_operand"
40 (and (match_code "const_int")
41 (match_test "UINTVAL (op) < 16")))
42
43 ;; Return true if OP is constant.
44
45 (define_special_predicate "consttable_operand"
46 (and (match_code "symbol_ref, label_ref, const, const_int, const_wide_int, const_double, const_vector")
47 (match_test "CONSTANT_P (op)")))
48
49 ; An operand used as vector permutation pattern
50
51 ; This in particular accepts constants which would otherwise be
52 ; rejected. These constants require special post reload handling
53
54 (define_special_predicate "permute_pattern_operand"
55 (and (match_code "const_vector,mem,reg,subreg")
56 (match_test "GET_MODE (op) == V16QImode")
57 (match_test "!MEM_P (op) || s390_mem_constraint (\"R\", op)")))
58
59 ;; Return true if OP is a valid S-type operand.
60
61 (define_predicate "s_operand"
62 (and (match_code "subreg, mem")
63 (match_operand 0 "general_operand"))
64 {
65 /* Just like memory_operand, allow (subreg (mem ...))
66 after reload. */
67 if (reload_completed
68 && GET_CODE (op) == SUBREG
69 && GET_CODE (SUBREG_REG (op)) == MEM)
70 op = SUBREG_REG (op);
71
72 if (GET_CODE (op) != MEM)
73 return false;
74 if (!s390_legitimate_address_without_index_p (op))
75 return false;
76
77 return true;
78 })
79
80 ;; Return true of the address of the mem operand plus 16 is still a
81 ;; valid Q constraint address.
82
83 (define_predicate "plus16_Q_operand"
84 (and (match_code "mem")
85 (match_operand 0 "general_operand"))
86 {
87 rtx addr = XEXP (op, 0);
88 if (REG_P (addr))
89 return true;
90
91 if (GET_CODE (addr) != PLUS
92 || !REG_P (XEXP (addr, 0))
93 || !CONST_INT_P (XEXP (addr, 1)))
94 return false;
95
96 return SHORT_DISP_IN_RANGE (INTVAL (XEXP (addr, 1)) + 16);
97 })
98
99 ;; Return true if OP is a valid operand for the BRAS instruction.
100 ;; Allow SYMBOL_REFs and @PLT stubs.
101
102 (define_special_predicate "bras_sym_operand"
103 (ior (and (match_code "symbol_ref")
104 (match_test "!flag_pic || SYMBOL_REF_LOCAL_P (op)"))
105 (and (match_code "const")
106 (and (match_test "GET_CODE (XEXP (op, 0)) == UNSPEC")
107 (match_test "XINT (XEXP (op, 0), 1) == UNSPEC_PLT")))))
108
109 ;; Return true if OP is a PLUS that is not a legitimate
110 ;; operand for the LA instruction.
111
112 (define_predicate "s390_plus_operand"
113 (and (match_code "plus")
114 (and (match_test "mode == Pmode")
115 (match_test "!legitimate_la_operand_p (op)"))))
116
117 ;; Return true if OP is a valid operand as scalar shift count or setmem.
118
119 (define_predicate "setmem_operand"
120 (match_code "reg, subreg, plus, const_int")
121 {
122 HOST_WIDE_INT offset;
123 rtx base;
124
125 if (GET_MODE (op) != VOIDmode
126 && GET_MODE_CLASS (GET_MODE (op)) != MODE_INT)
127 return false;
128
129 /* Extract base register and offset. */
130 if (!s390_decompose_addrstyle_without_index (op, &base, &offset))
131 return false;
132
133 /* Don't allow any non-base hard registers. Doing so without
134 confusing reload and/or regrename would be tricky, and doesn't
135 buy us much anyway. */
136 if (base && REGNO (base) < FIRST_PSEUDO_REGISTER && !ADDR_REG_P (base))
137 return false;
138
139 /* Unfortunately we have to reject constants that are invalid
140 for an address, or else reload will get confused. */
141 if (!DISP_IN_RANGE (offset))
142 return false;
143
144 return true;
145 })
146
147 ; An integer operand with the lowest order 6 bits all ones.
148 (define_predicate "const_int_6bitset_operand"
149 (and (match_code "const_int")
150 (match_test "(INTVAL (op) & 63) == 63")))
151 (define_predicate "nonzero_shift_count_operand"
152 (and (match_code "const_int")
153 (match_test "IN_RANGE (INTVAL (op), 1, GET_MODE_BITSIZE (mode) - 1)")))
154
155 ;; Return true if OP a valid operand for the LARL instruction.
156
157 (define_predicate "larl_operand"
158 (match_code "label_ref, symbol_ref, const")
159 {
160 /* Allow labels and local symbols. */
161 if (GET_CODE (op) == LABEL_REF)
162 return true;
163 if (SYMBOL_REF_P (op))
164 return (!SYMBOL_FLAG_NOTALIGN2_P (op)
165 && SYMBOL_REF_TLS_MODEL (op) == 0
166 && s390_rel_address_ok_p (op));
167
168 /* Everything else must have a CONST, so strip it. */
169 if (GET_CODE (op) != CONST)
170 return false;
171 op = XEXP (op, 0);
172
173 /* Allow adding *even* in-range constants. */
174 if (GET_CODE (op) == PLUS)
175 {
176 if (GET_CODE (XEXP (op, 1)) != CONST_INT
177 || (INTVAL (XEXP (op, 1)) & 1) != 0)
178 return false;
179 if (INTVAL (XEXP (op, 1)) >= HOST_WIDE_INT_1 << 31
180 || INTVAL (XEXP (op, 1)) < -(HOST_WIDE_INT_1 << 31))
181 return false;
182 op = XEXP (op, 0);
183 }
184
185 /* Labels and local symbols allowed here as well. */
186 if (GET_CODE (op) == LABEL_REF)
187 return true;
188 if (SYMBOL_REF_P (op))
189 return (!SYMBOL_FLAG_NOTALIGN2_P (op)
190 && SYMBOL_REF_TLS_MODEL (op) == 0
191 && s390_rel_address_ok_p (op));
192
193
194 /* Now we must have a @GOTENT offset or @PLT stub
195 or an @INDNTPOFF TLS offset. */
196 if (GET_CODE (op) == UNSPEC
197 && XINT (op, 1) == UNSPEC_GOTENT)
198 return true;
199 if (GET_CODE (op) == UNSPEC
200 && XINT (op, 1) == UNSPEC_PLT)
201 return true;
202 if (GET_CODE (op) == UNSPEC
203 && XINT (op, 1) == UNSPEC_INDNTPOFF)
204 return true;
205
206 return false;
207 })
208
209 ; Predicate that always allows wraparound of the one-bit range.
210 (define_predicate "contiguous_bitmask_operand"
211 (match_code "const_int")
212 {
213 return s390_contiguous_bitmask_p (INTVAL (op), true,
214 GET_MODE_BITSIZE (mode), NULL, NULL);
215 })
216
217 ; Same without wraparound.
218 (define_predicate "contiguous_bitmask_nowrap_operand"
219 (match_code "const_int")
220 {
221 return s390_contiguous_bitmask_p
222 (INTVAL (op), false, GET_MODE_BITSIZE (mode), NULL, NULL);
223 })
224
225 ;; Return true if OP is legitimate for any LOC instruction.
226
227 (define_predicate "loc_operand"
228 (ior (match_operand 0 "nonimmediate_operand")
229 (and (match_code "const_int")
230 (match_test "INTVAL (op) <= 32767 && INTVAL (op) >= -32768"))))
231
232 (define_predicate "reload_const_wide_int_operand"
233 (and (match_code "const_wide_int")
234 (match_test "legitimate_reload_constant_p (op)")))
235
236
237 ;; operators --------------------------------------------------------------
238
239 ;; Return nonzero if OP is a valid comparison operator
240 ;; for a branch condition.
241
242 (define_predicate "s390_comparison"
243 (match_code "eq, ne, lt, gt, le, ge, ltu, gtu, leu, geu,
244 uneq, unlt, ungt, unle, unge, ltgt,
245 unordered, ordered")
246 {
247 if (GET_CODE (XEXP (op, 0)) != REG
248 || REGNO (XEXP (op, 0)) != CC_REGNUM
249 || (XEXP (op, 1) != const0_rtx
250 && !(CONST_INT_P (XEXP (op, 1))
251 && GET_MODE (XEXP (op, 0)) == CCRAWmode
252 && INTVAL (XEXP (op, 1)) >= 0
253 && INTVAL (XEXP (op, 1)) <= 15)))
254 return false;
255
256 return (s390_branch_condition_mask (op) >= 0);
257 })
258
259 ;; Return true if op is the cc register.
260 (define_predicate "cc_reg_operand"
261 (and (match_code "reg")
262 (match_test "REGNO (op) == CC_REGNUM")))
263
264 (define_predicate "s390_signed_integer_comparison"
265 (match_code "eq, ne, lt, gt, le, ge")
266 {
267 return (s390_compare_and_branch_condition_mask (op) >= 0);
268 })
269
270 (define_predicate "s390_unsigned_integer_comparison"
271 (match_code "eq, ne, ltu, gtu, leu, geu")
272 {
273 return (s390_compare_and_branch_condition_mask (op) >= 0);
274 })
275
276 ;; Return nonzero if OP is a valid comparison operator for the
277 ;; cstore expanders -- respectively cstorecc4 and integer cstore.
278 (define_predicate "s390_eqne_operator"
279 (match_code "eq, ne"))
280
281 (define_predicate "s390_scond_operator"
282 (match_code "ltu, gtu, leu, geu"))
283
284 (define_predicate "s390_brx_operator"
285 (match_code "le, gt"))
286
287 ;; Return nonzero if OP is a valid comparison operator
288 ;; for an ALC condition.
289
290 (define_predicate "s390_alc_comparison"
291 (match_code "zero_extend, sign_extend, ltu, gtu, leu, geu")
292 {
293 while (GET_CODE (op) == ZERO_EXTEND || GET_CODE (op) == SIGN_EXTEND)
294 op = XEXP (op, 0);
295
296 if (!COMPARISON_P (op))
297 return false;
298
299 if (GET_CODE (XEXP (op, 0)) != REG
300 || REGNO (XEXP (op, 0)) != CC_REGNUM
301 || (XEXP (op, 1) != const0_rtx
302 && !(CONST_INT_P (XEXP (op, 1))
303 && GET_MODE (XEXP (op, 0)) == CCRAWmode
304 && INTVAL (XEXP (op, 1)) >= 0
305 && INTVAL (XEXP (op, 1)) <= 15)))
306 return false;
307
308 switch (GET_MODE (XEXP (op, 0)))
309 {
310 case E_CCL1mode:
311 return GET_CODE (op) == LTU;
312
313 case E_CCL2mode:
314 return GET_CODE (op) == LEU;
315
316 case E_CCL3mode:
317 return GET_CODE (op) == GEU;
318
319 case E_CCUmode:
320 return GET_CODE (op) == GTU;
321
322 case E_CCURmode:
323 return GET_CODE (op) == LTU;
324
325 case E_CCSmode:
326 return GET_CODE (op) == UNGT;
327
328 case E_CCSRmode:
329 return GET_CODE (op) == UNLT;
330
331 default:
332 return false;
333 }
334 })
335
336 ;; Return nonzero if OP is a valid comparison operator
337 ;; for an SLB condition.
338
339 (define_predicate "s390_slb_comparison"
340 (match_code "zero_extend, sign_extend, ltu, gtu, leu, geu")
341 {
342 while (GET_CODE (op) == ZERO_EXTEND || GET_CODE (op) == SIGN_EXTEND)
343 op = XEXP (op, 0);
344
345 if (!COMPARISON_P (op))
346 return false;
347
348 if (GET_CODE (XEXP (op, 0)) != REG
349 || REGNO (XEXP (op, 0)) != CC_REGNUM
350 || XEXP (op, 1) != const0_rtx)
351 return false;
352
353 switch (GET_MODE (XEXP (op, 0)))
354 {
355 case E_CCL1mode:
356 return GET_CODE (op) == GEU;
357
358 case E_CCL2mode:
359 return GET_CODE (op) == GTU;
360
361 case E_CCL3mode:
362 return GET_CODE (op) == LTU;
363
364 case E_CCUmode:
365 return GET_CODE (op) == LEU;
366
367 case E_CCURmode:
368 return GET_CODE (op) == GEU;
369
370 case E_CCSmode:
371 return GET_CODE (op) == LE;
372
373 case E_CCSRmode:
374 return GET_CODE (op) == GE;
375
376 default:
377 return false;
378 }
379 })
380
381 ;; Return true if OP is a load multiple operation. It is known to be a
382 ;; PARALLEL and the first section will be tested.
383
384 (define_special_predicate "load_multiple_operation"
385 (match_code "parallel")
386 {
387 machine_mode elt_mode;
388 int count = XVECLEN (op, 0);
389 unsigned int dest_regno;
390 rtx src_addr;
391 int i, off;
392
393 /* Perform a quick check so we don't blow up below. */
394 if (count <= 1
395 || GET_CODE (XVECEXP (op, 0, 0)) != SET
396 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
397 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
398 return false;
399
400 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
401 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
402 elt_mode = GET_MODE (SET_DEST (XVECEXP (op, 0, 0)));
403
404 /* Check, is base, or base + displacement. */
405
406 if (GET_CODE (src_addr) == REG)
407 off = 0;
408 else if (GET_CODE (src_addr) == PLUS
409 && GET_CODE (XEXP (src_addr, 0)) == REG
410 && GET_CODE (XEXP (src_addr, 1)) == CONST_INT)
411 {
412 off = INTVAL (XEXP (src_addr, 1));
413 src_addr = XEXP (src_addr, 0);
414 }
415 else
416 return false;
417
418 for (i = 1; i < count; i++)
419 {
420 rtx elt = XVECEXP (op, 0, i);
421
422 if (GET_CODE (elt) != SET
423 || GET_CODE (SET_DEST (elt)) != REG
424 || GET_MODE (SET_DEST (elt)) != elt_mode
425 || REGNO (SET_DEST (elt)) != dest_regno + i
426 || GET_CODE (SET_SRC (elt)) != MEM
427 || GET_MODE (SET_SRC (elt)) != elt_mode
428 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
429 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
430 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
431 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1))
432 != off + i * GET_MODE_SIZE (elt_mode))
433 return false;
434 }
435
436 return true;
437 })
438
439 ;; For an execute pattern the target instruction is embedded into the
440 ;; RTX but will not get checked for validity by recog automatically.
441 ;; The execute_operation predicate extracts the target RTX and invokes
442 ;; recog.
443 (define_special_predicate "execute_operation"
444 (match_code "parallel")
445 {
446 rtx pattern = op;
447 rtx_insn *insn;
448 int icode;
449
450 /* This is redundant but since this predicate is evaluated
451 first when recognizing the insn we can prevent the more
452 expensive code below from being executed for many cases. */
453 if (GET_CODE (XVECEXP (pattern, 0, 0)) != UNSPEC
454 || XINT (XVECEXP (pattern, 0, 0), 1) != UNSPEC_EXECUTE)
455 return false;
456
457 /* Keep in sync with s390_execute_target. */
458 if (XVECLEN (pattern, 0) == 2)
459 {
460 pattern = copy_rtx (XVECEXP (pattern, 0, 1));
461 }
462 else
463 {
464 rtvec vec = rtvec_alloc (XVECLEN (pattern, 0) - 1);
465 int i;
466
467 for (i = 0; i < XVECLEN (pattern, 0) - 1; i++)
468 RTVEC_ELT (vec, i) = copy_rtx (XVECEXP (pattern, 0, i + 1));
469
470 pattern = gen_rtx_PARALLEL (VOIDmode, vec);
471 }
472
473 /* Since we do not have the wrapping insn here we have to build one. */
474 insn = make_insn_raw (pattern);
475 icode = recog_memoized (insn);
476 if (icode < 0)
477 return false;
478
479 extract_constrain_insn (insn);
480
481 return which_alternative >= 0;
482 })
483
484 ;; Return true if OP is a store multiple operation. It is known to be a
485 ;; PARALLEL and the first section will be tested.
486
487 (define_special_predicate "store_multiple_operation"
488 (match_code "parallel")
489 {
490 machine_mode elt_mode;
491 int count = XVECLEN (op, 0);
492 unsigned int src_regno;
493 rtx dest_addr;
494 int i, off;
495
496 /* Perform a quick check so we don't blow up below. */
497 if (count <= 1
498 || GET_CODE (XVECEXP (op, 0, 0)) != SET
499 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
500 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
501 return false;
502
503 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
504 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
505 elt_mode = GET_MODE (SET_SRC (XVECEXP (op, 0, 0)));
506
507 /* Check, is base, or base + displacement. */
508
509 if (GET_CODE (dest_addr) == REG)
510 off = 0;
511 else if (GET_CODE (dest_addr) == PLUS
512 && GET_CODE (XEXP (dest_addr, 0)) == REG
513 && GET_CODE (XEXP (dest_addr, 1)) == CONST_INT)
514 {
515 off = INTVAL (XEXP (dest_addr, 1));
516 dest_addr = XEXP (dest_addr, 0);
517 }
518 else
519 return false;
520
521 for (i = 1; i < count; i++)
522 {
523 rtx elt = XVECEXP (op, 0, i);
524
525 if (GET_CODE (elt) != SET
526 || GET_CODE (SET_SRC (elt)) != REG
527 || GET_MODE (SET_SRC (elt)) != elt_mode
528 || REGNO (SET_SRC (elt)) != src_regno + i
529 || GET_CODE (SET_DEST (elt)) != MEM
530 || GET_MODE (SET_DEST (elt)) != elt_mode
531 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
532 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
533 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
534 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1))
535 != off + i * GET_MODE_SIZE (elt_mode))
536 return false;
537 }
538 return true;
539 })
540
541 (define_predicate "const_shift_by_byte_operand"
542 (match_code "const_int")
543 {
544 unsigned HOST_WIDE_INT val = INTVAL (op);
545 return val <= 128 && val % 8 == 0;
546 })
547
548 ;; Certain operations (e.g. CS) cannot access SYMBOL_REF directly, it needs to
549 ;; be loaded into some register first. In theory, if we put a SYMBOL_REF into
550 ;; a corresponding insn anyway, reload will generate a load for it, but, when
551 ;; coupled with constant propagation, this will lead to an inefficient code
552 ;; (see PR 80080).
553
554 (define_predicate "nonsym_memory_operand"
555 (match_code "mem")
556 {
557 return memory_operand (op, mode) && !contains_symbol_ref_p (op);
558 })
559
560 ;; Check for a valid shift count operand with an implicit
561 ;; shift truncation mask of 63.
562
563 (define_predicate "shift_count_operand"
564 (and (match_code "reg, subreg, and, plus, const_int")
565 (match_test "CONST_INT_P (op) || GET_MODE (op) == E_QImode"))
566 {
567 return s390_valid_shift_count (op, 63);
568 }
569 )
570
571 ;; This is used as operand predicate. As we do not know
572 ;; the mode of the first operand here and the shift truncation
573 ;; mask depends on the mode, we cannot check the mask.
574 ;; This is supposed to happen in the insn condition which
575 ;; calls s390_valid_shift_count with the proper mode size.
576 ;; We need two separate predicates for non-vector and vector
577 ;; shifts since the (less restrictive) insn condition is checked
578 ;; after the more restrictive operand predicate which will
579 ;; disallow the operand before we can check the condition.
580
581 (define_predicate "shift_count_operand_vec"
582 (and (match_code "reg, subreg, and, plus, const_int")
583 (match_test "CONST_INT_P (op) || GET_MODE (op) == E_QImode"))
584 {
585 return s390_valid_shift_count (op, 0);
586 }
587 )
588
589 ; An integer constant which can be used in a signed add with overflow
590 ; pattern without being reloaded.
591 (define_predicate "addv_const_operand"
592 (and (match_code "const_int")
593 (match_test "INTVAL (op) >= -32768 && INTVAL (op) <= 32767")))