]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/i386/predicates.md
i386: Improve vector [GL]E{,U} comparison against vector constants [PR107546]
[thirdparty/gcc.git] / gcc / config / i386 / predicates.md
CommitLineData
8fe75e43 1;; Predicate definitions for IA-32 and x86-64.
7adcbafe 2;; Copyright (C) 2004-2022 Free Software Foundation, Inc.
8fe75e43
RH
3;;
4;; This file is part of GCC.
5;;
6;; GCC is free software; you can redistribute it and/or modify
7;; it under the terms of the GNU General Public License as published by
2f83c7d6 8;; the Free Software Foundation; either version 3, or (at your option)
8fe75e43
RH
9;; any later version.
10;;
11;; GCC is distributed in the hope that it will be useful,
12;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14;; GNU General Public License for more details.
15;;
16;; You should have received a copy of the GNU General Public License
2f83c7d6
NC
17;; along with GCC; see the file COPYING3. If not see
18;; <http://www.gnu.org/licenses/>.
8fe75e43 19
19ed9d7b 20;; Return true if OP is either a i387 or SSE fp register.
8fe75e43
RH
21(define_predicate "any_fp_register_operand"
22 (and (match_code "reg")
23 (match_test "ANY_FP_REGNO_P (REGNO (op))")))
24
19ed9d7b 25;; Return true if OP is an i387 fp register.
8fe75e43
RH
26(define_predicate "fp_register_operand"
27 (and (match_code "reg")
66aaf16f 28 (match_test "STACK_REGNO_P (REGNO (op))")))
8fe75e43 29
85a77221
AI
30;; True if the operand is a GENERAL class register.
31(define_predicate "general_reg_operand"
32 (and (match_code "reg")
00cbba89 33 (match_test "GENERAL_REGNO_P (REGNO (op))")))
85a77221 34
cb1fd5b4
UB
35;; True if the operand is a nonimmediate operand with GENERAL class register.
36(define_predicate "nonimmediate_gr_operand"
37 (if_then_else (match_code "reg")
38 (match_test "GENERAL_REGNO_P (REGNO (op))")
39 (match_operand 0 "nonimmediate_operand")))
40
cd3fe55a
UB
41;; True if the operand is a general operand with GENERAL class register.
42(define_predicate "general_gr_operand"
43 (if_then_else (match_code "reg")
44 (match_test "GENERAL_REGNO_P (REGNO (op))")
45 (match_operand 0 "general_operand")))
46
8fe75e43
RH
47;; True if the operand is an MMX register.
48(define_predicate "mmx_reg_operand"
49 (and (match_code "reg")
50 (match_test "MMX_REGNO_P (REGNO (op))")))
770b37b9 51
b74ebb2a
L
52;; Match register operands, but include memory operands for
53;; !TARGET_MMX_WITH_SSE.
54(define_predicate "register_mmxmem_operand"
55 (ior (match_operand 0 "register_operand")
56 (and (not (match_test "TARGET_MMX_WITH_SSE"))
57 (match_operand 0 "memory_operand"))))
58
770b37b9
UB
59;; True if the operand is an SSE register.
60(define_predicate "sse_reg_operand"
61 (and (match_code "reg")
62 (match_test "SSE_REGNO_P (REGNO (op))")))
8fe75e43 63
55d2ee57
UB
64;; Return true if op is a QImode register.
65(define_predicate "any_QIreg_operand"
66 (and (match_code "reg")
67 (match_test "ANY_QI_REGNO_P (REGNO (op))")))
8fe75e43 68
55d2ee57
UB
69;; Return true if op is one of QImode registers: %[abcd][hl].
70(define_predicate "QIreg_operand"
71 (and (match_code "reg")
72 (match_test "QI_REGNO_P (REGNO (op))")))
73
74;; Return true if op is a QImode register operand other than %[abcd][hl].
75(define_predicate "ext_QIreg_operand"
76 (and (match_test "TARGET_64BIT")
77 (match_code "reg")
78 (not (match_test "QI_REGNO_P (REGNO (op))"))))
79
80;; Return true if op is the AX register.
81(define_predicate "ax_reg_operand"
82 (and (match_code "reg")
83 (match_test "REGNO (op) == AX_REG")))
84
85;; Return true if op is the flags register.
86(define_predicate "flags_reg_operand"
87 (and (match_code "reg")
88 (match_test "REGNO (op) == FLAGS_REG")))
cb105922 89
388cb292 90;; True if the operand is a MASK register.
91(define_predicate "mask_reg_operand"
92 (and (match_code "reg")
93 (match_test "MASK_REGNO_P (REGNO (op))")))
94
15643a0d
JJ
95;; Match a DI, SI, HI or QImode nonimmediate_operand.
96(define_special_predicate "int_nonimmediate_operand"
97 (and (match_operand 0 "nonimmediate_operand")
98 (ior (and (match_test "TARGET_64BIT")
99 (match_test "GET_MODE (op) == DImode"))
100 (match_test "GET_MODE (op) == SImode")
101 (match_test "GET_MODE (op) == HImode")
102 (match_test "GET_MODE (op) == QImode"))))
103
c8802daf
UB
104;; Match register operands, but include memory operands for TARGET_SSE_MATH.
105(define_predicate "register_ssemem_operand"
106 (if_then_else
107 (match_test "SSE_FLOAT_MODE_P (mode) && TARGET_SSE_MATH")
108 (match_operand 0 "nonimmediate_operand")
109 (match_operand 0 "register_operand")))
110
ebae28e9
UB
111;; Match nonimmediate operands, but exclude memory operands
112;; for TARGET_SSE_MATH if TARGET_MIX_SSE_I387 is not enabled.
113(define_predicate "nonimm_ssenomem_operand"
8b38916a
UB
114 (if_then_else
115 (and (match_test "SSE_FLOAT_MODE_P (mode) && TARGET_SSE_MATH")
116 (not (match_test "TARGET_MIX_SSE_I387")))
117 (match_operand 0 "register_operand")
118 (match_operand 0 "nonimmediate_operand")))
119
120;; The above predicate, suitable for x87 arithmetic operators.
121(define_predicate "x87nonimm_ssenomem_operand"
ebae28e9
UB
122 (if_then_else
123 (and (match_test "SSE_FLOAT_MODE_P (mode) && TARGET_SSE_MATH")
124 (not (match_test "TARGET_MIX_SSE_I387 && X87_ENABLE_ARITH (mode)")))
125 (match_operand 0 "register_operand")
126 (match_operand 0 "nonimmediate_operand")))
8c292b10 127
846e2ad8
UB
128;; Match register operands, include memory operand for TARGET_SSE4_1.
129(define_predicate "register_sse4nonimm_operand"
130 (if_then_else (match_test "TARGET_SSE4_1")
131 (match_operand 0 "nonimmediate_operand")
132 (match_operand 0 "register_operand")))
133
d5e254e1
IE
134;; Return true if VALUE is symbol reference
135(define_predicate "symbol_operand"
136 (match_code "symbol_ref"))
137
9a5381f7 138;; Return true if VALUE is an ENDBR opcode in immediate field.
139(define_predicate "ix86_endbr_immediate_operand"
140 (match_code "const_int")
141{
142 if (flag_cf_protection & CF_BRANCH)
143 {
144 unsigned HOST_WIDE_INT imm = UINTVAL (op);
145 unsigned HOST_WIDE_INT val = TARGET_64BIT ? 0xfa1e0ff3 : 0xfb1e0ff3;
146
147 if (imm == val)
d6345481 148 return true;
9a5381f7 149
150 /* NB: Encoding is byte based. */
151 if (TARGET_64BIT)
152 for (; imm >= val; imm >>= 8)
153 if (imm == val)
d6345481 154 return true;
9a5381f7 155 }
156
d6345481 157 return false;
9a5381f7 158})
159
19ed9d7b 160;; Return true if VALUE can be stored in a sign extended immediate field.
8fe75e43
RH
161(define_predicate "x86_64_immediate_operand"
162 (match_code "const_int,symbol_ref,label_ref,const")
163{
9a5381f7 164 if (ix86_endbr_immediate_operand (op, VOIDmode))
165 return false;
166
8fe75e43
RH
167 if (!TARGET_64BIT)
168 return immediate_operand (op, mode);
169
170 switch (GET_CODE (op))
171 {
172 case CONST_INT:
44d0de8d 173 {
1a58b548 174 HOST_WIDE_INT val = INTVAL (op);
44d0de8d
UB
175 return trunc_int_for_mode (val, SImode) == val;
176 }
8fe75e43 177 case SYMBOL_REF:
e3d62871
UB
178 /* TLS symbols are not constant. */
179 if (SYMBOL_REF_TLS_MODEL (op))
180 return false;
181
182 /* Load the external function address via the GOT slot. */
183 if (ix86_force_load_from_GOT_p (op))
184 return false;
185
8fe75e43
RH
186 /* For certain code models, the symbolic references are known to fit.
187 in CM_SMALL_PIC model we know it fits if it is local to the shared
188 library. Don't count TLS SYMBOL_REFs here, since they should fit
189 only if inside of UNSPEC handled below. */
7dcbf659
JH
190 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL
191 || (ix86_cmodel == CM_MEDIUM && !SYMBOL_REF_FAR_ADDR_P (op)));
8fe75e43
RH
192
193 case LABEL_REF:
194 /* For certain code models, the code is near as well. */
195 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM
196 || ix86_cmodel == CM_KERNEL);
197
198 case CONST:
199 /* We also may accept the offsetted memory references in certain
200 special cases. */
201 if (GET_CODE (XEXP (op, 0)) == UNSPEC)
202 switch (XINT (XEXP (op, 0), 1))
203 {
204 case UNSPEC_GOTPCREL:
205 case UNSPEC_DTPOFF:
206 case UNSPEC_GOTNTPOFF:
207 case UNSPEC_NTPOFF:
19ed9d7b 208 return true;
8fe75e43
RH
209 default:
210 break;
211 }
212
213 if (GET_CODE (XEXP (op, 0)) == PLUS)
214 {
215 rtx op1 = XEXP (XEXP (op, 0), 0);
216 rtx op2 = XEXP (XEXP (op, 0), 1);
8fe75e43 217
3e7f831c 218 if (ix86_cmodel == CM_LARGE && GET_CODE (op1) != UNSPEC)
19ed9d7b 219 return false;
7656aee4 220 if (!CONST_INT_P (op2))
19ed9d7b 221 return false;
4368a420 222
1a58b548 223 HOST_WIDE_INT offset = INTVAL (op2);
4368a420
UB
224 if (trunc_int_for_mode (offset, SImode) != offset)
225 return false;
226
8fe75e43
RH
227 switch (GET_CODE (op1))
228 {
229 case SYMBOL_REF:
fd4aca96
RH
230 /* TLS symbols are not constant. */
231 if (SYMBOL_REF_TLS_MODEL (op1))
19ed9d7b 232 return false;
e3d62871
UB
233
234 /* Load the external function address via the GOT slot. */
235 if (ix86_force_load_from_GOT_p (op1))
236 return false;
237
8fe75e43
RH
238 /* For CM_SMALL assume that latest object is 16MB before
239 end of 31bits boundary. We may also accept pretty
240 large negative constants knowing that all objects are
241 in the positive half of address space. */
7dcbf659
JH
242 if ((ix86_cmodel == CM_SMALL
243 || (ix86_cmodel == CM_MEDIUM
244 && !SYMBOL_REF_FAR_ADDR_P (op1)))
4368a420 245 && offset < 16*1024*1024)
19ed9d7b 246 return true;
8fe75e43
RH
247 /* For CM_KERNEL we know that all object resist in the
248 negative half of 32bits address space. We may not
249 accept negative offsets, since they may be just off
250 and we may accept pretty large positive ones. */
251 if (ix86_cmodel == CM_KERNEL
4368a420 252 && offset > 0)
19ed9d7b 253 return true;
8fe75e43
RH
254 break;
255
256 case LABEL_REF:
257 /* These conditions are similar to SYMBOL_REF ones, just the
258 constraints for code models differ. */
259 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
4368a420 260 && offset < 16*1024*1024)
19ed9d7b 261 return true;
8fe75e43 262 if (ix86_cmodel == CM_KERNEL
4368a420 263 && offset > 0)
19ed9d7b 264 return true;
8fe75e43
RH
265 break;
266
267 case UNSPEC:
268 switch (XINT (op1, 1))
269 {
270 case UNSPEC_DTPOFF:
271 case UNSPEC_NTPOFF:
4368a420 272 return true;
8fe75e43
RH
273 }
274 break;
275
276 default:
277 break;
278 }
279 }
280 break;
281
282 default:
7637e42c 283 gcc_unreachable ();
8fe75e43
RH
284 }
285
19ed9d7b 286 return false;
8fe75e43
RH
287})
288
19ed9d7b 289;; Return true if VALUE can be stored in the zero extended immediate field.
8fe75e43 290(define_predicate "x86_64_zext_immediate_operand"
44d0de8d 291 (match_code "const_int,symbol_ref,label_ref,const")
8fe75e43 292{
9a5381f7 293 if (ix86_endbr_immediate_operand (op, VOIDmode))
294 return false;
295
8fe75e43
RH
296 switch (GET_CODE (op))
297 {
8fe75e43 298 case CONST_INT:
44d0de8d 299 return !(INTVAL (op) & ~(HOST_WIDE_INT) 0xffffffff);
8fe75e43
RH
300
301 case SYMBOL_REF:
8fe75e43 302 /* TLS symbols are not constant. */
fd4aca96 303 if (SYMBOL_REF_TLS_MODEL (op))
8fe75e43 304 return false;
e3d62871
UB
305
306 /* Load the external function address via the GOT slot. */
307 if (ix86_force_load_from_GOT_p (op))
308 return false;
309
310 /* For certain code models, the symbolic references are known to fit. */
7dcbf659
JH
311 return (ix86_cmodel == CM_SMALL
312 || (ix86_cmodel == CM_MEDIUM
313 && !SYMBOL_REF_FAR_ADDR_P (op)));
8fe75e43
RH
314
315 case LABEL_REF:
316 /* For certain code models, the code is near as well. */
317 return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM;
318
319 case CONST:
320 /* We also may accept the offsetted memory references in certain
321 special cases. */
322 if (GET_CODE (XEXP (op, 0)) == PLUS)
323 {
324 rtx op1 = XEXP (XEXP (op, 0), 0);
325 rtx op2 = XEXP (XEXP (op, 0), 1);
326
327 if (ix86_cmodel == CM_LARGE)
19ed9d7b 328 return false;
4368a420
UB
329 if (!CONST_INT_P (op2))
330 return false;
331
1a58b548 332 HOST_WIDE_INT offset = INTVAL (op2);
4368a420
UB
333 if (trunc_int_for_mode (offset, SImode) != offset)
334 return false;
335
8fe75e43
RH
336 switch (GET_CODE (op1))
337 {
338 case SYMBOL_REF:
fd4aca96
RH
339 /* TLS symbols are not constant. */
340 if (SYMBOL_REF_TLS_MODEL (op1))
19ed9d7b 341 return false;
e3d62871
UB
342
343 /* Load the external function address via the GOT slot. */
344 if (ix86_force_load_from_GOT_p (op1))
345 return false;
346
8fe75e43
RH
347 /* For small code model we may accept pretty large positive
348 offsets, since one bit is available for free. Negative
349 offsets are limited by the size of NULL pointer area
350 specified by the ABI. */
7dcbf659
JH
351 if ((ix86_cmodel == CM_SMALL
352 || (ix86_cmodel == CM_MEDIUM
353 && !SYMBOL_REF_FAR_ADDR_P (op1)))
4368a420 354 && offset > -0x10000)
19ed9d7b 355 return true;
8fe75e43
RH
356 /* ??? For the kernel, we may accept adjustment of
357 -0x10000000, since we know that it will just convert
358 negative address space to positive, but perhaps this
359 is not worthwhile. */
360 break;
361
362 case LABEL_REF:
363 /* These conditions are similar to SYMBOL_REF ones, just the
364 constraints for code models differ. */
365 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
4368a420 366 && offset > -0x10000)
19ed9d7b 367 return true;
8fe75e43
RH
368 break;
369
370 default:
19ed9d7b 371 return false;
8fe75e43
RH
372 }
373 }
374 break;
375
376 default:
7637e42c 377 gcc_unreachable ();
8fe75e43 378 }
19ed9d7b 379 return false;
8fe75e43
RH
380})
381
31ed1665
JJ
382;; Return true if VALUE is a constant integer whose low and high words satisfy
383;; x86_64_immediate_operand.
384(define_predicate "x86_64_hilo_int_operand"
385 (match_code "const_int,const_wide_int")
386{
387 switch (GET_CODE (op))
388 {
389 case CONST_INT:
390 return x86_64_immediate_operand (op, mode);
391
392 case CONST_WIDE_INT:
393 gcc_assert (CONST_WIDE_INT_NUNITS (op) == 2);
394 return (x86_64_immediate_operand (GEN_INT (CONST_WIDE_INT_ELT (op, 0)),
395 DImode)
396 && x86_64_immediate_operand (GEN_INT (CONST_WIDE_INT_ELT (op,
397 1)),
398 DImode));
399
400 default:
401 gcc_unreachable ();
402 }
403})
404
47a6cc4e
JJ
405;; Return true if VALUE is a constant integer whose value is
406;; x86_64_immediate_operand value zero extended from word mode to mode.
407(define_predicate "x86_64_dwzext_immediate_operand"
408 (match_code "const_int,const_wide_int")
409{
9a5381f7 410 if (ix86_endbr_immediate_operand (op, VOIDmode))
411 return false;
412
47a6cc4e
JJ
413 switch (GET_CODE (op))
414 {
415 case CONST_INT:
416 if (!TARGET_64BIT)
417 return UINTVAL (op) <= HOST_WIDE_INT_UC (0xffffffff);
418 return UINTVAL (op) <= HOST_WIDE_INT_UC (0x7fffffff);
419
420 case CONST_WIDE_INT:
421 if (!TARGET_64BIT)
422 return false;
423 return (CONST_WIDE_INT_NUNITS (op) == 2
424 && CONST_WIDE_INT_ELT (op, 1) == 0
425 && (trunc_int_for_mode (CONST_WIDE_INT_ELT (op, 0), SImode)
426 == (HOST_WIDE_INT) CONST_WIDE_INT_ELT (op, 0)));
427
428 default:
429 gcc_unreachable ();
430 }
431})
432
d5e254e1
IE
433;; Return true if size of VALUE can be stored in a sign
434;; extended immediate field.
435(define_predicate "x86_64_immediate_size_operand"
436 (and (match_code "symbol_ref")
437 (ior (not (match_test "TARGET_64BIT"))
438 (match_test "ix86_cmodel == CM_SMALL")
439 (match_test "ix86_cmodel == CM_KERNEL"))))
440
19ed9d7b 441;; Return true if OP is general operand representable on x86_64.
8fe75e43
RH
442(define_predicate "x86_64_general_operand"
443 (if_then_else (match_test "TARGET_64BIT")
444 (ior (match_operand 0 "nonimmediate_operand")
445 (match_operand 0 "x86_64_immediate_operand"))
446 (match_operand 0 "general_operand")))
447
31ed1665
JJ
448;; Return true if OP's both words are general operands representable
449;; on x86_64.
450(define_predicate "x86_64_hilo_general_operand"
451 (if_then_else (match_test "TARGET_64BIT")
452 (ior (match_operand 0 "nonimmediate_operand")
453 (match_operand 0 "x86_64_hilo_int_operand"))
454 (match_operand 0 "general_operand")))
455
d1873c57
JJ
456;; Return true if OP is non-VOIDmode general operand representable
457;; on x86_64. This predicate is used in sign-extending conversion
458;; operations that require non-VOIDmode immediate operands.
459(define_predicate "x86_64_sext_operand"
460 (and (match_test "GET_MODE (op) != VOIDmode")
461 (match_operand 0 "x86_64_general_operand")))
462
463;; Return true if OP is non-VOIDmode general operand. This predicate
464;; is used in sign-extending conversion operations that require
465;; non-VOIDmode immediate operands.
466(define_predicate "sext_operand"
467 (and (match_test "GET_MODE (op) != VOIDmode")
468 (match_operand 0 "general_operand")))
469
7482c470
UB
470;; Return true if OP is representable on x86_64 as zero-extended operand.
471;; This predicate is used in zero-extending conversion operations that
472;; require non-VOIDmode immediate operands.
473(define_predicate "x86_64_zext_operand"
ca538e97
UB
474 (if_then_else (match_test "TARGET_64BIT")
475 (ior (match_operand 0 "nonimmediate_operand")
3cb2b15b
UB
476 (and (match_operand 0 "x86_64_zext_immediate_operand")
477 (match_test "GET_MODE (op) != VOIDmode")))
7482c470 478 (match_operand 0 "nonimmediate_operand")))
ca538e97 479
19ed9d7b 480;; Return true if OP is general operand representable on x86_64
8fe75e43
RH
481;; as either sign extended or zero extended constant.
482(define_predicate "x86_64_szext_general_operand"
483 (if_then_else (match_test "TARGET_64BIT")
484 (ior (match_operand 0 "nonimmediate_operand")
aaf5d6c0
UB
485 (match_operand 0 "x86_64_immediate_operand")
486 (match_operand 0 "x86_64_zext_immediate_operand"))
8fe75e43
RH
487 (match_operand 0 "general_operand")))
488
19ed9d7b 489;; Return true if OP is nonmemory operand representable on x86_64.
8fe75e43
RH
490(define_predicate "x86_64_nonmemory_operand"
491 (if_then_else (match_test "TARGET_64BIT")
492 (ior (match_operand 0 "register_operand")
493 (match_operand 0 "x86_64_immediate_operand"))
494 (match_operand 0 "nonmemory_operand")))
495
19ed9d7b 496;; Return true if OP is nonmemory operand representable on x86_64.
8fe75e43
RH
497(define_predicate "x86_64_szext_nonmemory_operand"
498 (if_then_else (match_test "TARGET_64BIT")
499 (ior (match_operand 0 "register_operand")
aaf5d6c0
UB
500 (match_operand 0 "x86_64_immediate_operand")
501 (match_operand 0 "x86_64_zext_immediate_operand"))
8fe75e43
RH
502 (match_operand 0 "nonmemory_operand")))
503
47efdea4
JH
504;; Return true when operand is PIC expression that can be computed by lea
505;; operation.
a2e49bb2 506(define_predicate "pic_32bit_operand"
47efdea4
JH
507 (match_code "const,symbol_ref,label_ref")
508{
509 if (!flag_pic)
19ed9d7b 510 return false;
a2e49bb2 511
47efdea4
JH
512 /* Rule out relocations that translate into 64bit constants. */
513 if (TARGET_64BIT && GET_CODE (op) == CONST)
514 {
515 op = XEXP (op, 0);
7656aee4 516 if (GET_CODE (op) == PLUS && CONST_INT_P (XEXP (op, 1)))
47efdea4
JH
517 op = XEXP (op, 0);
518 if (GET_CODE (op) == UNSPEC
519 && (XINT (op, 1) == UNSPEC_GOTOFF
520 || XINT (op, 1) == UNSPEC_GOT))
19ed9d7b 521 return false;
47efdea4 522 }
a2e49bb2 523
47efdea4
JH
524 return symbolic_operand (op, mode);
525})
526
19ed9d7b 527;; Return true if OP is nonmemory operand acceptable by movabs patterns.
8fe75e43 528(define_predicate "x86_64_movabs_operand"
7aecd4e8
UB
529 (and (match_operand 0 "nonmemory_operand")
530 (not (match_operand 0 "pic_32bit_operand"))))
8fe75e43 531
19ed9d7b 532;; Return true if OP is either a symbol reference or a sum of a symbol
8fe75e43
RH
533;; reference and a constant.
534(define_predicate "symbolic_operand"
535 (match_code "symbol_ref,label_ref,const")
536{
537 switch (GET_CODE (op))
538 {
539 case SYMBOL_REF:
540 case LABEL_REF:
19ed9d7b 541 return true;
8fe75e43
RH
542
543 case CONST:
544 op = XEXP (op, 0);
545 if (GET_CODE (op) == SYMBOL_REF
546 || GET_CODE (op) == LABEL_REF
547 || (GET_CODE (op) == UNSPEC
548 && (XINT (op, 1) == UNSPEC_GOT
549 || XINT (op, 1) == UNSPEC_GOTOFF
986ce92f 550 || XINT (op, 1) == UNSPEC_PCREL
8fe75e43 551 || XINT (op, 1) == UNSPEC_GOTPCREL)))
19ed9d7b 552 return true;
8fe75e43 553 if (GET_CODE (op) != PLUS
7656aee4 554 || !CONST_INT_P (XEXP (op, 1)))
19ed9d7b 555 return false;
8fe75e43
RH
556
557 op = XEXP (op, 0);
558 if (GET_CODE (op) == SYMBOL_REF
559 || GET_CODE (op) == LABEL_REF)
19ed9d7b 560 return true;
8fe75e43
RH
561 /* Only @GOTOFF gets offsets. */
562 if (GET_CODE (op) != UNSPEC
563 || XINT (op, 1) != UNSPEC_GOTOFF)
19ed9d7b 564 return false;
8fe75e43
RH
565
566 op = XVECEXP (op, 0, 0);
567 if (GET_CODE (op) == SYMBOL_REF
568 || GET_CODE (op) == LABEL_REF)
19ed9d7b
UB
569 return true;
570 return false;
8fe75e43
RH
571
572 default:
7637e42c 573 gcc_unreachable ();
8fe75e43
RH
574 }
575})
576
8fe75e43
RH
577;; Return true if OP is a symbolic operand that resolves locally.
578(define_predicate "local_symbolic_operand"
579 (match_code "const,label_ref,symbol_ref")
580{
581 if (GET_CODE (op) == CONST
582 && GET_CODE (XEXP (op, 0)) == PLUS
7656aee4 583 && CONST_INT_P (XEXP (XEXP (op, 0), 1)))
8fe75e43
RH
584 op = XEXP (XEXP (op, 0), 0);
585
586 if (GET_CODE (op) == LABEL_REF)
19ed9d7b 587 return true;
8fe75e43
RH
588
589 if (GET_CODE (op) != SYMBOL_REF)
19ed9d7b 590 return false;
8fe75e43 591
19ed9d7b
UB
592 if (SYMBOL_REF_TLS_MODEL (op))
593 return false;
c1a46941 594
9216baf1
KT
595 /* Dll-imported symbols are always external. */
596 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
597 return false;
8fe75e43 598 if (SYMBOL_REF_LOCAL_P (op))
19ed9d7b 599 return true;
8fe75e43
RH
600
601 /* There is, however, a not insubstantial body of code in the rest of
602 the compiler that assumes it can just stick the results of
603 ASM_GENERATE_INTERNAL_LABEL in a symbol_ref and have done. */
604 /* ??? This is a hack. Should update the body of the compiler to
605 always create a DECL an invoke targetm.encode_section_info. */
606 if (strncmp (XSTR (op, 0), internal_label_prefix,
607 internal_label_prefix_len) == 0)
19ed9d7b 608 return true;
8fe75e43 609
19ed9d7b 610 return false;
8fe75e43
RH
611})
612
b384d9a0
HJ
613(define_predicate "local_func_symbolic_operand"
614 (match_operand 0 "local_symbolic_operand")
615{
616 if (GET_CODE (op) == CONST
617 && GET_CODE (XEXP (op, 0)) == PLUS
618 && CONST_INT_P (XEXP (XEXP (op, 0), 1)))
619 op = XEXP (XEXP (op, 0), 0);
620
621 if (GET_CODE (op) == SYMBOL_REF
622 && !SYMBOL_REF_FUNCTION_P (op))
623 return false;
624
625 return true;
626})
627
170bdaba
RS
628;; Test for a legitimate @GOTOFF operand.
629;;
630;; VxWorks does not impose a fixed gap between segments; the run-time
631;; gap can be different from the object-file gap. We therefore can't
632;; use @GOTOFF unless we are absolutely sure that the symbol is in the
633;; same segment as the GOT. Unfortunately, the flexibility of linker
634;; scripts means that we can't be sure of that in general, so assume
6c5fffd1 635;; @GOTOFF is not valid on VxWorks, except with the large code model.
170bdaba 636(define_predicate "gotoff_operand"
6c5fffd1
EB
637 (and (ior (not (match_test "TARGET_VXWORKS_RTP"))
638 (match_test "ix86_cmodel == CM_LARGE")
639 (match_test "ix86_cmodel == CM_LARGE_PIC"))
170bdaba
RS
640 (match_operand 0 "local_symbolic_operand")))
641
8fe75e43 642;; Test for various thread-local symbols.
62a1c041 643(define_special_predicate "tls_symbolic_operand"
8fe75e43 644 (and (match_code "symbol_ref")
19ed9d7b 645 (match_test "SYMBOL_REF_TLS_MODEL (op)")))
8fe75e43 646
62a1c041 647(define_special_predicate "tls_modbase_operand"
5bf5a10b
AO
648 (and (match_code "symbol_ref")
649 (match_test "op == ix86_tls_module_base ()")))
f3648f7d
UB
650
651(define_predicate "tls_address_pattern"
652 (and (match_code "set,parallel,unspec,unspec_volatile")
653 (match_test "ix86_tls_address_pattern_p (op)")))
5bf5a10b 654
8fe75e43 655;; Test for a pc-relative call operand
a1d3d84b 656(define_predicate "constant_call_address_operand"
da489f73
RH
657 (match_code "symbol_ref")
658{
e7c77c4f
AK
659 if (ix86_cmodel == CM_LARGE || ix86_cmodel == CM_LARGE_PIC
660 || flag_force_indirect_call)
da489f73
RH
661 return false;
662 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
663 return false;
664 return true;
665})
8fe75e43 666
618cc62e
UB
667;; P6 processors will jump to the address after the decrement when %esp
668;; is used as a call operand, so they will execute return address as a code.
669;; See Pentium Pro errata 70, Pentium 2 errata A33 and Pentium 3 errata E17.
670
671(define_predicate "call_register_no_elim_operand"
672 (match_operand 0 "register_operand")
673{
3a6d28d6 674 if (SUBREG_P (op))
618cc62e
UB
675 op = SUBREG_REG (op);
676
677 if (!TARGET_64BIT && op == stack_pointer_rtx)
19ed9d7b 678 return false;
618cc62e
UB
679
680 return register_no_elim_operand (op, mode);
681})
682
4150f926
UB
683;; True for any non-virtual or eliminable register. Used in places where
684;; instantiation of such a register may cause the pattern to not be recognized.
685(define_predicate "register_no_elim_operand"
686 (match_operand 0 "register_operand")
687{
3a6d28d6 688 if (SUBREG_P (op))
4150f926 689 op = SUBREG_REG (op);
b3dd7d8b
UB
690
691 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
692 because it is guaranteed to be reloaded into one. */
693 if (MEM_P (op))
694 return true;
695
4150f926
UB
696 return !(op == arg_pointer_rtx
697 || op == frame_pointer_rtx
698 || IN_RANGE (REGNO (op),
699 FIRST_PSEUDO_REGISTER, LAST_VIRTUAL_REGISTER));
700})
701
8fe75e43
RH
702;; Similarly, but include the stack pointer. This is used to prevent esp
703;; from being used as an index reg.
704(define_predicate "index_register_operand"
705 (match_operand 0 "register_operand")
706{
3a6d28d6 707 if (SUBREG_P (op))
8fe75e43 708 op = SUBREG_REG (op);
b3dd7d8b 709
1e7e62b1 710 if (reload_completed)
9a9286af
RH
711 return REG_OK_FOR_INDEX_STRICT_P (op);
712 else
713 return REG_OK_FOR_INDEX_NONSTRICT_P (op);
8fe75e43
RH
714})
715
716;; Return false if this is any eliminable register. Otherwise general_operand.
717(define_predicate "general_no_elim_operand"
718 (if_then_else (match_code "reg,subreg")
719 (match_operand 0 "register_no_elim_operand")
720 (match_operand 0 "general_operand")))
721
722;; Return false if this is any eliminable register. Otherwise
723;; register_operand or a constant.
724(define_predicate "nonmemory_no_elim_operand"
725 (ior (match_operand 0 "register_no_elim_operand")
726 (match_operand 0 "immediate_operand")))
727
6025b127
L
728;; Test for a valid operand for indirect branch.
729(define_predicate "indirect_branch_operand"
b9719055 730 (ior (match_operand 0 "register_operand")
c2c601b2 731 (and (not (match_test "TARGET_INDIRECT_BRANCH_REGISTER"))
894c144c 732 (not (match_test "TARGET_X32"))
b9719055 733 (match_operand 0 "memory_operand"))))
6025b127 734
55d2ee57 735;; Return true if OP is a memory operands that can be used in sibcalls.
af9345d7
L
736;; Since sibcall never returns, we can only use call-clobbered register
737;; as GOT base. Allow GOT slot here only with pseudo register as GOT
738;; base. Properly handle sibcall over GOT slot with *sibcall_GOT_32
739;; and *sibcall_value_GOT_32 patterns.
55d2ee57 740(define_predicate "sibcall_memory_operand"
af9345d7
L
741 (match_operand 0 "memory_operand")
742{
743 op = XEXP (op, 0);
744 if (CONSTANT_P (op))
745 return true;
746 if (GET_CODE (op) == PLUS && REG_P (XEXP (op, 0)))
747 {
748 int regno = REGNO (XEXP (op, 0));
a365fa06 749 if (!HARD_REGISTER_NUM_P (regno) || call_used_or_fixed_reg_p (regno))
af9345d7
L
750 {
751 op = XEXP (op, 1);
752 if (GOT32_symbol_operand (op, VOIDmode))
753 return true;
754 }
755 }
756 return false;
757})
55d2ee57 758
fa87d16d
L
759;; Return true if OP is a GOT memory operand.
760(define_predicate "GOT_memory_operand"
1b51f038
UB
761 (and (match_operand 0 "memory_operand")
762 (match_code "const" "0")
763 (match_code "unspec" "00")
764 (match_test "XINT (XEXP (XEXP (op, 0), 0), 1) == UNSPEC_GOTPCREL")))
fa87d16d 765
8fe75e43 766;; Test for a valid operand for a call instruction.
1ce8d925
UB
767;; Allow constant call address operands in Pmode only.
768(define_special_predicate "call_insn_operand"
a1d3d84b
UB
769 (ior (match_test "constant_call_address_operand
770 (op, mode == VOIDmode ? mode : Pmode)")
aaf5d6c0 771 (match_operand 0 "call_register_no_elim_operand")
c2c601b2 772 (and (not (match_test "TARGET_INDIRECT_BRANCH_REGISTER"))
5ca876c3
L
773 (ior (and (not (match_test "TARGET_X32"))
774 (match_operand 0 "memory_operand"))
775 (and (match_test "TARGET_X32 && Pmode == DImode")
776 (match_operand 0 "GOT_memory_operand"))))))
8fe75e43 777
35fd3193 778;; Similarly, but for tail calls, in which we cannot allow memory references.
1ce8d925 779(define_special_predicate "sibcall_insn_operand"
a1d3d84b
UB
780 (ior (match_test "constant_call_address_operand
781 (op, mode == VOIDmode ? mode : Pmode)")
cb105922 782 (match_operand 0 "register_no_elim_operand")
c2c601b2 783 (and (not (match_test "TARGET_INDIRECT_BRANCH_REGISTER"))
5ca876c3
L
784 (ior (and (not (match_test "TARGET_X32"))
785 (match_operand 0 "sibcall_memory_operand"))
786 (and (match_test "TARGET_X32 && Pmode == DImode")
787 (match_operand 0 "GOT_memory_operand"))))))
f70d27e0 788
af9345d7
L
789;; Return true if OP is a 32-bit GOT symbol operand.
790(define_predicate "GOT32_symbol_operand"
1b51f038
UB
791 (and (match_code "const")
792 (match_code "unspec" "0")
793 (match_test "XINT (XEXP (op, 0), 1) == UNSPEC_GOT")))
af9345d7 794
8fe75e43
RH
795;; Match exactly zero.
796(define_predicate "const0_operand"
eebe7d1c 797 (match_code "const_int,const_double,const_vector")
b4e82619
RH
798{
799 if (mode == VOIDmode)
800 mode = GET_MODE (op);
801 return op == CONST0_RTX (mode);
802})
8fe75e43 803
55284a77 804;; Match one or a vector with all elements equal to one.
8fe75e43 805(define_predicate "const1_operand"
eebe7d1c 806 (match_code "const_int,const_double,const_vector")
880ab4be
AT
807{
808 if (mode == VOIDmode)
809 mode = GET_MODE (op);
810 return op == CONST1_RTX (mode);
811})
8fe75e43 812
a7d56fb8
UB
813;; Match exactly -1.
814(define_predicate "constm1_operand"
815 (and (match_code "const_int")
2ff0cbe5 816 (match_test "op == constm1_rtx")))
a7d56fb8 817
f7acbf4c
RS
818;; Match exactly eight.
819(define_predicate "const8_operand"
820 (and (match_code "const_int")
821 (match_test "INTVAL (op) == 8")))
822
a952487c
JJ
823;; Match exactly 128.
824(define_predicate "const128_operand"
825 (and (match_code "const_int")
826 (match_test "INTVAL (op) == 128")))
827
88b590c5
UB
828;; Match exactly 0x0FFFFFFFF in anddi as a zero-extension operation
829(define_predicate "const_32bit_mask"
830 (and (match_code "const_int")
831 (match_test "trunc_int_for_mode (INTVAL (op), DImode)
832 == (HOST_WIDE_INT) 0xffffffff")))
833
8fe75e43
RH
834;; Match 2, 4, or 8. Used for leal multiplicands.
835(define_predicate "const248_operand"
836 (match_code "const_int")
837{
838 HOST_WIDE_INT i = INTVAL (op);
839 return i == 2 || i == 4 || i == 8;
840})
841
d697acca
BS
842;; Match 1, 2, or 3. Used for lea shift amounts.
843(define_predicate "const123_operand"
844 (match_code "const_int")
845{
846 HOST_WIDE_INT i = INTVAL (op);
847 return i == 1 || i == 2 || i == 3;
848})
849
66b03f81
UB
850;; Match 2, 3, 6, or 7
851(define_predicate "const2367_operand"
cf73ee60
KY
852 (match_code "const_int")
853{
854 HOST_WIDE_INT i = INTVAL (op);
66b03f81 855 return i == 2 || i == 3 || i == 6 || i == 7;
cf73ee60
KY
856})
857
977e83a3
KY
858;; Match 1, 2, 4, or 8
859(define_predicate "const1248_operand"
860 (match_code "const_int")
861{
862 HOST_WIDE_INT i = INTVAL (op);
863 return i == 1 || i == 2 || i == 4 || i == 8;
864})
865
3b4c46d7
L
866;; Match 3, 5, or 9. Used for leal multiplicands.
867(define_predicate "const359_operand"
868 (match_code "const_int")
869{
870 HOST_WIDE_INT i = INTVAL (op);
871 return i == 3 || i == 5 || i == 9;
872})
873
de72ea02
IT
874;; Match 4 or 8 to 11. Used for embeded rounding.
875(define_predicate "const_4_or_8_to_11_operand"
876 (match_code "const_int")
877{
878 HOST_WIDE_INT i = INTVAL (op);
879 return i == 4 || (i >= 8 && i <= 11);
880})
881
882;; Match 4 or 8. Used for SAE.
883(define_predicate "const48_operand"
884 (match_code "const_int")
885{
886 HOST_WIDE_INT i = INTVAL (op);
887 return i == 4 || i == 8;
888})
889
ef719a44
RH
890;; Match 0 or 1.
891(define_predicate "const_0_to_1_operand"
892 (and (match_code "const_int")
77fa1d54
UB
893 (ior (match_test "op == const0_rtx")
894 (match_test "op == const1_rtx"))))
ef719a44 895
8fe75e43
RH
896;; Match 0 to 3.
897(define_predicate "const_0_to_3_operand"
898 (and (match_code "const_int")
8dde5924 899 (match_test "IN_RANGE (INTVAL (op), 0, 3)")))
8fe75e43 900
47490470
AI
901;; Match 0 to 4.
902(define_predicate "const_0_to_4_operand"
903 (and (match_code "const_int")
904 (match_test "IN_RANGE (INTVAL (op), 0, 4)")))
905
906;; Match 0 to 5.
907(define_predicate "const_0_to_5_operand"
908 (and (match_code "const_int")
909 (match_test "IN_RANGE (INTVAL (op), 0, 5)")))
910
8fe75e43
RH
911;; Match 0 to 7.
912(define_predicate "const_0_to_7_operand"
913 (and (match_code "const_int")
8dde5924 914 (match_test "IN_RANGE (INTVAL (op), 0, 7)")))
8fe75e43
RH
915
916;; Match 0 to 15.
917(define_predicate "const_0_to_15_operand"
918 (and (match_code "const_int")
8dde5924 919 (match_test "IN_RANGE (INTVAL (op), 0, 15)")))
8fe75e43 920
04e1d06b
MM
921;; Match 0 to 31.
922(define_predicate "const_0_to_31_operand"
923 (and (match_code "const_int")
924 (match_test "IN_RANGE (INTVAL (op), 0, 31)")))
925
7cacf53e
RH
926;; Match 0 to 63.
927(define_predicate "const_0_to_63_operand"
928 (and (match_code "const_int")
8dde5924 929 (match_test "IN_RANGE (INTVAL (op), 0, 63)")))
7cacf53e 930
5b7a9751
RS
931;; Match 0 to 127.
932(define_predicate "const_0_to_127_operand"
933 (and (match_code "const_int")
934 (match_test "IN_RANGE (INTVAL (op), 0, 127)")))
935
8fe75e43
RH
936;; Match 0 to 255.
937(define_predicate "const_0_to_255_operand"
938 (and (match_code "const_int")
8dde5924 939 (match_test "IN_RANGE (INTVAL (op), 0, 255)")))
8fe75e43 940
ef719a44
RH
941;; Match (0 to 255) * 8
942(define_predicate "const_0_to_255_mul_8_operand"
943 (match_code "const_int")
944{
945 unsigned HOST_WIDE_INT val = INTVAL (op);
946 return val <= 255*8 && val % 8 == 0;
947})
948
4991e209
RS
949;; Match 1 to 255 except multiples of 8
950(define_predicate "const_0_to_255_not_mul_8_operand"
951 (match_code "const_int")
952{
953 unsigned HOST_WIDE_INT val = INTVAL (op);
954 return val <= 255 && val % 8 != 0;
955})
956
19ed9d7b 957;; Return true if OP is CONST_INT >= 1 and <= 31 (a valid operand
ef719a44
RH
958;; for shift & compare patterns, as shifting by 0 does not change flags).
959(define_predicate "const_1_to_31_operand"
960 (and (match_code "const_int")
8dde5924 961 (match_test "IN_RANGE (INTVAL (op), 1, 31)")))
ef719a44 962
19ed9d7b 963;; Return true if OP is CONST_INT >= 1 and <= 63 (a valid operand
934f2a96
UB
964;; for 64bit shift & compare patterns, as shifting by 0 does not change flags).
965(define_predicate "const_1_to_63_operand"
966 (and (match_code "const_int")
967 (match_test "IN_RANGE (INTVAL (op), 1, 63)")))
968
ef719a44
RH
969;; Match 2 or 3.
970(define_predicate "const_2_to_3_operand"
971 (and (match_code "const_int")
8dde5924 972 (match_test "IN_RANGE (INTVAL (op), 2, 3)")))
ef719a44 973
95879c72
L
974;; Match 4 to 5.
975(define_predicate "const_4_to_5_operand"
976 (and (match_code "const_int")
977 (match_test "IN_RANGE (INTVAL (op), 4, 5)")))
978
ef719a44
RH
979;; Match 4 to 7.
980(define_predicate "const_4_to_7_operand"
981 (and (match_code "const_int")
8dde5924 982 (match_test "IN_RANGE (INTVAL (op), 4, 7)")))
ef719a44 983
95879c72
L
984;; Match 6 to 7.
985(define_predicate "const_6_to_7_operand"
986 (and (match_code "const_int")
987 (match_test "IN_RANGE (INTVAL (op), 6, 7)")))
988
c003c6d6
AI
989;; Match 8 to 9.
990(define_predicate "const_8_to_9_operand"
991 (and (match_code "const_int")
992 (match_test "IN_RANGE (INTVAL (op), 8, 9)")))
993
95879c72
L
994;; Match 8 to 11.
995(define_predicate "const_8_to_11_operand"
996 (and (match_code "const_int")
997 (match_test "IN_RANGE (INTVAL (op), 8, 11)")))
998
2e2206fa
AI
999;; Match 8 to 15.
1000(define_predicate "const_8_to_15_operand"
1001 (and (match_code "const_int")
1002 (match_test "IN_RANGE (INTVAL (op), 8, 15)")))
1003
c003c6d6
AI
1004;; Match 10 to 11.
1005(define_predicate "const_10_to_11_operand"
1006 (and (match_code "const_int")
1007 (match_test "IN_RANGE (INTVAL (op), 10, 11)")))
1008
1009;; Match 12 to 13.
1010(define_predicate "const_12_to_13_operand"
1011 (and (match_code "const_int")
1012 (match_test "IN_RANGE (INTVAL (op), 12, 13)")))
1013
95879c72
L
1014;; Match 12 to 15.
1015(define_predicate "const_12_to_15_operand"
1016 (and (match_code "const_int")
1017 (match_test "IN_RANGE (INTVAL (op), 12, 15)")))
1018
c003c6d6
AI
1019;; Match 14 to 15.
1020(define_predicate "const_14_to_15_operand"
1021 (and (match_code "const_int")
1022 (match_test "IN_RANGE (INTVAL (op), 14, 15)")))
1023
1024;; Match 16 to 19.
1025(define_predicate "const_16_to_19_operand"
1026 (and (match_code "const_int")
1027 (match_test "IN_RANGE (INTVAL (op), 16, 19)")))
1028
2e2206fa
AI
1029;; Match 16 to 31.
1030(define_predicate "const_16_to_31_operand"
1031 (and (match_code "const_int")
1032 (match_test "IN_RANGE (INTVAL (op), 16, 31)")))
1033
c003c6d6
AI
1034;; Match 20 to 23.
1035(define_predicate "const_20_to_23_operand"
1036 (and (match_code "const_int")
1037 (match_test "IN_RANGE (INTVAL (op), 20, 23)")))
1038
1039;; Match 24 to 27.
1040(define_predicate "const_24_to_27_operand"
1041 (and (match_code "const_int")
1042 (match_test "IN_RANGE (INTVAL (op), 24, 27)")))
1043
1044;; Match 28 to 31.
1045(define_predicate "const_28_to_31_operand"
1046 (and (match_code "const_int")
1047 (match_test "IN_RANGE (INTVAL (op), 28, 31)")))
1048
8fe75e43
RH
1049;; True if this is a constant appropriate for an increment or decrement.
1050(define_predicate "incdec_operand"
1051 (match_code "const_int")
1052{
1053 /* On Pentium4, the inc and dec operations causes extra dependency on flag
1054 registers, since carry flag is not set. */
700ae70c 1055 if (!TARGET_USE_INCDEC && !optimize_insn_for_size_p ())
19ed9d7b 1056 return false;
8fe75e43
RH
1057 return op == const1_rtx || op == constm1_rtx;
1058})
1059
287cc750 1060;; True for registers, or const_int_operand, used to vec_setm expander.
8d980e84 1061(define_predicate "vec_setm_sse41_operand"
287cc750 1062 (ior (and (match_operand 0 "register_operand")
f6587817 1063 (match_test "TARGET_SSE4_1"))
287cc750 1064 (match_code "const_int")))
1065
8d980e84
UB
1066(define_predicate "vec_setm_avx2_operand"
1067 (ior (and (match_operand 0 "register_operand")
1068 (match_test "TARGET_AVX2"))
1069 (match_code "const_int")))
1070
20a2c8ac
UB
1071(define_predicate "vec_setm_mmx_operand"
1072 (ior (and (match_operand 0 "register_operand")
1073 (match_test "TARGET_SSE4_1")
1074 (match_test "TARGET_MMX_WITH_SSE"))
1075 (match_code "const_int")))
1076
93330ea1
RH
1077;; True for registers, or 1 or -1. Used to optimize double-word shifts.
1078(define_predicate "reg_or_pm1_operand"
1079 (ior (match_operand 0 "register_operand")
1080 (and (match_code "const_int")
77fa1d54
UB
1081 (ior (match_test "op == const1_rtx")
1082 (match_test "op == constm1_rtx")))))
93330ea1 1083
6ddb30f9 1084;; True for registers, or (not: registers). Used to optimize 3-operand
1085;; bitwise operation.
bc9c8e5f 1086(define_predicate "regmem_or_bitnot_regmem_operand"
1087 (ior (match_operand 0 "nonimmediate_operand")
6ddb30f9 1088 (and (match_code "not")
bc9c8e5f 1089 (match_test "nonimmediate_operand (XEXP (op, 0), mode)"))))
6ddb30f9 1090
8fe75e43
RH
1091;; True if OP is acceptable as operand of DImode shift expander.
1092(define_predicate "shiftdi_operand"
1093 (if_then_else (match_test "TARGET_64BIT")
1094 (match_operand 0 "nonimmediate_operand")
1095 (match_operand 0 "register_operand")))
1096
93330ea1
RH
1097(define_predicate "ashldi_input_operand"
1098 (if_then_else (match_test "TARGET_64BIT")
1099 (match_operand 0 "nonimmediate_operand")
1100 (match_operand 0 "reg_or_pm1_operand")))
1101
8fe75e43 1102;; Return true if OP is a vector load from the constant pool with just
0e40b5f2 1103;; the first element nonzero.
8fe75e43
RH
1104(define_predicate "zero_extended_scalar_load_operand"
1105 (match_code "mem")
1106{
1107 unsigned n_elts;
76ff5c24 1108 op = avoid_constant_pool_reference (op);
1a66936f 1109
76ff5c24 1110 if (GET_CODE (op) != CONST_VECTOR)
19ed9d7b 1111 return false;
1a66936f
UB
1112
1113 n_elts = CONST_VECTOR_NUNITS (op);
1114
8fe75e43
RH
1115 for (n_elts--; n_elts > 0; n_elts--)
1116 {
1117 rtx elt = CONST_VECTOR_ELT (op, n_elts);
1118 if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
19ed9d7b 1119 return false;
8fe75e43 1120 }
19ed9d7b 1121 return true;
8fe75e43
RH
1122})
1123
ee78c20e 1124/* Return true if operand is a float vector constant that is all ones. */
1125(define_predicate "float_vector_all_ones_operand"
1126 (match_code "const_vector,mem")
1127{
1128 mode = GET_MODE (op);
1129 if (!FLOAT_MODE_P (mode)
1130 || (MEM_P (op)
1131 && (!SYMBOL_REF_P (XEXP (op, 0))
1132 || !CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))))
1133 return false;
1134
1135 if (MEM_P (op))
1136 {
1137 op = get_pool_constant (XEXP (op, 0));
1138 if (GET_CODE (op) != CONST_VECTOR)
1139 return false;
1140
1141 if (GET_MODE (op) != mode
1142 && INTEGRAL_MODE_P (GET_MODE (op))
1143 && op == CONSTM1_RTX (GET_MODE (op)))
1144 return true;
1145 }
1146
1147 rtx first = XVECEXP (op, 0, 0);
1148 for (int i = 1; i != GET_MODE_NUNITS (GET_MODE (op)); i++)
1149 {
1150 rtx tmp = XVECEXP (op, 0, i);
1151 if (!rtx_equal_p (tmp, first))
1152 return false;
1153 }
1154 if (GET_MODE (first) == E_SFmode)
1155 {
1156 long l;
1157 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (first), l);
1158 return (l & 0xffffffff) == 0xffffffff;
1159 }
1160 else if (GET_MODE (first) == E_DFmode)
1161 {
1162 long l[2];
1163 REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (first), l);
1164 return ((l[0] & 0xffffffff) == 0xffffffff
1165 && (l[1] & 0xffffffff) == 0xffffffff);
1166 }
1167 else
1168 return false;
1169})
1170
5656a184
EC
1171/* Return true if operand is a vector constant that is all ones. */
1172(define_predicate "vector_all_ones_operand"
30aa6349
RS
1173 (and (match_code "const_vector")
1174 (match_test "INTEGRAL_MODE_P (GET_MODE (op))")
1175 (match_test "op == CONSTM1_RTX (GET_MODE (op))")))
5656a184 1176
a282f086
HL
1177/* Return true if operand is an 128/256bit all ones vector
1178 that zero-extends to 256/512bit. */
1179(define_predicate "vector_all_ones_zero_extend_half_operand"
1180 (match_code "const_vector")
1181{
1182 mode = GET_MODE (op);
1183 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT
1184 || (GET_MODE_SIZE (mode) != 32
1185 && GET_MODE_SIZE (mode) != 64))
1186 return false;
1187
1188 int nelts = CONST_VECTOR_NUNITS (op);
1189 for (int i = 0; i != nelts; i++)
1190 {
1191 rtx elt = CONST_VECTOR_ELT (op, i);
1192 if (i < nelts / 2
1193 && elt != CONSTM1_RTX (GET_MODE_INNER (mode)))
1194 return false;
1195 if (i >= nelts / 2
1196 && elt != CONST0_RTX (GET_MODE_INNER (mode)))
1197 return false;
1198 }
1199 return true;
1200})
1201
1202/* Return true if operand is an 128bit all ones vector
1203 that zero extends to 512bit. */
1204(define_predicate "vector_all_ones_zero_extend_quarter_operand"
1205 (match_code "const_vector")
1206{
1207 mode = GET_MODE (op);
1208 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT
1209 || GET_MODE_SIZE (mode) != 64)
1210 return false;
1211
1212 int nelts = CONST_VECTOR_NUNITS (op);
1213 for (int i = 0; i != nelts; i++)
1214 {
1215 rtx elt = CONST_VECTOR_ELT (op, i);
1216 if (i < nelts / 4
1217 && elt != CONSTM1_RTX (GET_MODE_INNER (mode)))
1218 return false;
1219 if (i >= nelts / 4
1220 && elt != CONST0_RTX (GET_MODE_INNER (mode)))
1221 return false;
1222 }
1223 return true;
1224})
1225
3f50525d
L
1226; Return true when OP is operand acceptable for vector memory operand.
1227; Only AVX can have misaligned memory operand.
1228(define_predicate "vector_memory_operand"
1229 (and (match_operand 0 "memory_operand")
1230 (ior (match_test "TARGET_AVX")
1231 (match_test "MEM_ALIGN (op) >= GET_MODE_ALIGNMENT (mode)"))))
1232
acf93f1e
L
1233; Return true when OP is register_operand or vector_memory_operand.
1234(define_predicate "vector_operand"
1235 (ior (match_operand 0 "register_operand")
1236 (match_operand 0 "vector_memory_operand")))
1237
fa271afb
JJ
1238; Return true when OP is register_operand, vector_memory_operand
1239; or const_vector.
1240(define_predicate "vector_or_const_vector_operand"
1241 (ior (match_operand 0 "register_operand")
1242 (match_operand 0 "vector_memory_operand")
1243 (match_code "const_vector")))
1244
7026bb95 1245(define_predicate "bcst_mem_operand"
1246 (and (match_code "vec_duplicate")
1247 (and (match_test "TARGET_AVX512F")
1248 (ior (match_test "TARGET_AVX512VL")
1249 (match_test "GET_MODE_SIZE (GET_MODE (op)) == 64")))
1250 (match_test "VALID_BCST_MODE_P (GET_MODE_INNER (GET_MODE (op)))")
4d232131
L
1251 (match_test "GET_MODE (XEXP (op, 0))
1252 == GET_MODE_INNER (GET_MODE (op))")
7026bb95 1253 (match_test "memory_operand (XEXP (op, 0), GET_MODE (XEXP (op, 0)))")))
1254
1255; Return true when OP is bcst_mem_operand or vector_memory_operand.
1256(define_predicate "bcst_vector_operand"
1257 (ior (match_operand 0 "vector_operand")
1258 (match_operand 0 "bcst_mem_operand")))
1259
42bace41
JJ
1260;; Return true when OP is either nonimmediate operand, or any
1261;; CONST_VECTOR.
1262(define_predicate "nonimmediate_or_const_vector_operand"
1263 (ior (match_operand 0 "nonimmediate_operand")
1264 (match_code "const_vector")))
1265
813ccbe9 1266(define_predicate "nonimmediate_or_const_vec_dup_operand"
1267 (ior (match_operand 0 "nonimmediate_operand")
1268 (match_test "const_vec_duplicate_p (op)")))
1269
4be31286
AO
1270;; Return true when OP is either register operand, or any
1271;; CONST_VECTOR.
1272(define_predicate "reg_or_const_vector_operand"
1273 (ior (match_operand 0 "register_operand")
1274 (match_code "const_vector")))
1275
f3a5e75c
L
1276;; Return true when OP is CONST_VECTOR which can be converted to a
1277;; sign extended 32-bit integer.
1278(define_predicate "x86_64_const_vector_operand"
1279 (match_code "const_vector")
1280{
43c2505b
RS
1281 if (mode == VOIDmode)
1282 mode = GET_MODE (op);
1283 else if (GET_MODE (op) != mode)
1284 return false;
f3a5e75c
L
1285 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1286 return false;
1287 HOST_WIDE_INT val = ix86_convert_const_vector_to_integer (op, mode);
1288 return trunc_int_for_mode (val, SImode) == val;
1289})
1290
605b6425 1291(define_predicate "nonimmediate_or_x86_64_const_vector_operand"
1292 (ior (match_operand 0 "nonimmediate_operand")
1293 (match_operand 0 "x86_64_const_vector_operand")))
1294
19ed9d7b 1295;; Return true when OP is nonimmediate or standard SSE constant.
5656a184 1296(define_predicate "nonimmediate_or_sse_const_operand"
55284a77
UB
1297 (ior (match_operand 0 "nonimmediate_operand")
1298 (match_test "standard_sse_constant_p (op, mode)")))
5656a184 1299
eb701deb
RH
1300;; Return true if OP is a register or a zero.
1301(define_predicate "reg_or_0_operand"
1302 (ior (match_operand 0 "register_operand")
1303 (match_operand 0 "const0_operand")))
1304
808d8de5
UB
1305; Return true when OP is a nonimmediate or zero.
1306(define_predicate "nonimm_or_0_operand"
1307 (ior (match_operand 0 "nonimmediate_operand")
1308 (match_operand 0 "const0_operand")))
1309
5c8617dc
UB
1310(define_predicate "norex_memory_operand"
1311 (and (match_operand 0 "memory_operand")
1312 (not (match_test "x86_extended_reg_mentioned_p (op)"))))
1313
65e95828
UB
1314;; Return true for RTX codes that force SImode address.
1315(define_predicate "SImode_address_operand"
1316 (match_code "subreg,zero_extend,and"))
1317
249be95c 1318;; Return true if op is a valid address for LEA, and does not contain
5da6a383
UB
1319;; a segment override. Defined as a special predicate to allow
1320;; mode-less const_int operands pass to address_operand.
66d6cbaa 1321(define_special_predicate "address_no_seg_operand"
0d9a5f8a 1322 (match_test "address_operand (op, VOIDmode)")
8fe75e43
RH
1323{
1324 struct ix86_address parts;
7637e42c
NS
1325 int ok;
1326
0d9a5f8a
UB
1327 if (!CONST_INT_P (op)
1328 && mode != VOIDmode
1329 && GET_MODE (op) != mode)
1330 return false;
1331
7637e42c
NS
1332 ok = ix86_decompose_address (op, &parts);
1333 gcc_assert (ok);
00402c94 1334 return parts.seg == ADDR_SPACE_GENERIC;
8fe75e43
RH
1335})
1336
e43451aa
JJ
1337;; Return true if op if a valid base register, displacement or
1338;; sum of base register and displacement for VSIB addressing.
1339(define_predicate "vsib_address_operand"
0d9a5f8a 1340 (match_test "address_operand (op, VOIDmode)")
e43451aa
JJ
1341{
1342 struct ix86_address parts;
1343 int ok;
1344 rtx disp;
1345
1346 ok = ix86_decompose_address (op, &parts);
1347 gcc_assert (ok);
00402c94 1348 if (parts.index || parts.seg != ADDR_SPACE_GENERIC)
e43451aa
JJ
1349 return false;
1350
1351 /* VSIB addressing doesn't support (%rip). */
f7bc421d 1352 if (parts.disp)
e43451aa 1353 {
f7bc421d
JJ
1354 disp = parts.disp;
1355 if (GET_CODE (disp) == CONST)
1356 {
1357 disp = XEXP (disp, 0);
1358 if (GET_CODE (disp) == PLUS)
1359 disp = XEXP (disp, 0);
1360 if (GET_CODE (disp) == UNSPEC)
1361 switch (XINT (disp, 1))
1362 {
1363 case UNSPEC_GOTPCREL:
1364 case UNSPEC_PCREL:
1365 case UNSPEC_GOTNTPOFF:
1366 return false;
1367 }
1368 }
1369 if (TARGET_64BIT
1370 && flag_pic
1371 && (GET_CODE (disp) == SYMBOL_REF
1372 || GET_CODE (disp) == LABEL_REF))
1373 return false;
e43451aa
JJ
1374 }
1375
1376 return true;
1377})
1378
1379(define_predicate "vsib_mem_operator"
1380 (match_code "mem"))
1381
19ed9d7b 1382;; Return true if the rtx is known to be at least 32 bits aligned.
8fe75e43
RH
1383(define_predicate "aligned_operand"
1384 (match_operand 0 "general_operand")
1385{
1386 struct ix86_address parts;
7637e42c 1387 int ok;
8fe75e43
RH
1388
1389 /* Registers and immediate operands are always "aligned". */
52f84254 1390 if (!MEM_P (op))
19ed9d7b 1391 return true;
8fe75e43 1392
d326eaf0
JH
1393 /* All patterns using aligned_operand on memory operands ends up
1394 in promoting memory operand to 64bit and thus causing memory mismatch. */
700ae70c 1395 if (TARGET_MEMORY_MISMATCH_STALL && !optimize_insn_for_size_p ())
19ed9d7b 1396 return false;
d326eaf0 1397
8fe75e43
RH
1398 /* Don't even try to do any aligned optimizations with volatiles. */
1399 if (MEM_VOLATILE_P (op))
19ed9d7b 1400 return false;
0cd0c6fb
JJ
1401
1402 if (MEM_ALIGN (op) >= 32)
19ed9d7b 1403 return true;
0cd0c6fb 1404
8fe75e43
RH
1405 op = XEXP (op, 0);
1406
1407 /* Pushes and pops are only valid on the stack pointer. */
1408 if (GET_CODE (op) == PRE_DEC
1409 || GET_CODE (op) == POST_INC)
19ed9d7b 1410 return true;
8fe75e43
RH
1411
1412 /* Decode the address. */
7637e42c
NS
1413 ok = ix86_decompose_address (op, &parts);
1414 gcc_assert (ok);
8fe75e43 1415
3a6d28d6 1416 if (parts.base && SUBREG_P (parts.base))
24911a50 1417 parts.base = SUBREG_REG (parts.base);
3a6d28d6 1418 if (parts.index && SUBREG_P (parts.index))
24911a50
UB
1419 parts.index = SUBREG_REG (parts.index);
1420
8fe75e43
RH
1421 /* Look for some component that isn't known to be aligned. */
1422 if (parts.index)
1423 {
1424 if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32)
19ed9d7b 1425 return false;
8fe75e43
RH
1426 }
1427 if (parts.base)
1428 {
1429 if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32)
19ed9d7b 1430 return false;
8fe75e43
RH
1431 }
1432 if (parts.disp)
1433 {
7656aee4 1434 if (!CONST_INT_P (parts.disp)
19ed9d7b
UB
1435 || (INTVAL (parts.disp) & 3))
1436 return false;
8fe75e43
RH
1437 }
1438
1439 /* Didn't find one -- this must be an aligned address. */
19ed9d7b 1440 return true;
8fe75e43
RH
1441})
1442
19ed9d7b 1443;; Return true if OP is memory operand with a displacement.
8fe75e43
RH
1444(define_predicate "memory_displacement_operand"
1445 (match_operand 0 "memory_operand")
1446{
1447 struct ix86_address parts;
7637e42c
NS
1448 int ok;
1449
1450 ok = ix86_decompose_address (XEXP (op, 0), &parts);
1451 gcc_assert (ok);
8fe75e43
RH
1452 return parts.disp != NULL_RTX;
1453})
1454
19ed9d7b 1455;; Return true if OP is memory operand with a displacement only.
1c287121
UB
1456(define_predicate "memory_displacement_only_operand"
1457 (match_operand 0 "memory_operand")
1458{
1459 struct ix86_address parts;
1460 int ok;
1461
a952487c 1462 if (TARGET_64BIT)
19ed9d7b 1463 return false;
a952487c 1464
1c287121
UB
1465 ok = ix86_decompose_address (XEXP (op, 0), &parts);
1466 gcc_assert (ok);
1467
1468 if (parts.base || parts.index)
19ed9d7b 1469 return false;
1c287121
UB
1470
1471 return parts.disp != NULL_RTX;
1472})
1473
19ed9d7b 1474;; Return true if OP is memory operand that cannot be represented
8fe75e43
RH
1475;; by the modRM array.
1476(define_predicate "long_memory_operand"
1477 (and (match_operand 0 "memory_operand")
cab54dfa 1478 (match_test "memory_address_length (op, false)")))
8fe75e43 1479
19ed9d7b 1480;; Return true if OP is a comparison operator that can be issued by fcmov.
8fe75e43
RH
1481(define_predicate "fcmov_comparison_operator"
1482 (match_operand 0 "comparison_operator")
1483{
ef4bddc2 1484 machine_mode inmode = GET_MODE (XEXP (op, 0));
8fe75e43
RH
1485 enum rtx_code code = GET_CODE (op);
1486
3f563e0b 1487 if (inmode == CCFPmode)
d03ca8a6
UB
1488 code = ix86_fp_compare_code_to_integer (code);
1489
8fe75e43
RH
1490 /* i387 supports just limited amount of conditional codes. */
1491 switch (code)
1492 {
d03ca8a6
UB
1493 case GEU: case LTU:
1494 if (inmode == CCCmode || inmode == CCGZmode)
1495 return true;
1496 /* FALLTHRU */
1497 case GTU: case LEU:
1498 if (inmode == CCmode || inmode == CCFPmode)
19ed9d7b
UB
1499 return true;
1500 return false;
8fe75e43
RH
1501 case ORDERED: case UNORDERED:
1502 case EQ: case NE:
19ed9d7b 1503 return true;
8fe75e43 1504 default:
19ed9d7b 1505 return false;
8fe75e43
RH
1506 }
1507})
1508
19ed9d7b 1509;; Return true if OP is a comparison that can be used in the CMPSS/CMPPS insns.
8fe75e43
RH
1510;; The first set are supported directly; the second set can't be done with
1511;; full IEEE support, i.e. NaNs.
07c0852e
UB
1512
1513(define_predicate "sse_comparison_operator"
1514 (ior (match_code "eq,ne,lt,le,unordered,unge,ungt,ordered")
77fa1d54
UB
1515 (and (match_test "TARGET_AVX")
1516 (match_code "ge,gt,uneq,unle,unlt,ltgt"))))
95879c72 1517
04e1d06b
MM
1518(define_predicate "ix86_comparison_int_operator"
1519 (match_code "ne,eq,ge,gt,le,lt"))
1520
1521(define_predicate "ix86_comparison_uns_operator"
1522 (match_code "ne,eq,geu,gtu,leu,ltu"))
1523
33ee5810
UB
1524(define_predicate "bt_comparison_operator"
1525 (match_code "ne,eq"))
1526
8ea03e90
UB
1527(define_predicate "shr_comparison_operator"
1528 (match_code "gtu,leu"))
1529
7c287930
UB
1530(define_predicate "add_comparison_operator"
1531 (match_code "geu,ltu"))
1532
19ed9d7b 1533;; Return true if OP is a valid comparison operator in valid mode.
8fe75e43
RH
1534(define_predicate "ix86_comparison_operator"
1535 (match_operand 0 "comparison_operator")
1536{
ef4bddc2 1537 machine_mode inmode = GET_MODE (XEXP (op, 0));
8fe75e43
RH
1538 enum rtx_code code = GET_CODE (op);
1539
3f563e0b 1540 if (inmode == CCFPmode)
0948ccb2
PB
1541 return ix86_trivial_fp_comparison_operator (op, mode);
1542
8fe75e43
RH
1543 switch (code)
1544 {
1545 case EQ: case NE:
fe944402
UB
1546 if (inmode == CCGZmode)
1547 return false;
19ed9d7b 1548 return true;
fe944402 1549 case GE: case LT:
8fe75e43 1550 if (inmode == CCmode || inmode == CCGCmode
fe944402 1551 || inmode == CCGOCmode || inmode == CCNOmode || inmode == CCGZmode)
19ed9d7b
UB
1552 return true;
1553 return false;
fe944402 1554 case GEU: case LTU:
d03ca8a6 1555 if (inmode == CCCmode || inmode == CCGZmode)
fe944402
UB
1556 return true;
1557 /* FALLTHRU */
1558 case GTU: case LEU:
d03ca8a6 1559 if (inmode == CCmode)
19ed9d7b
UB
1560 return true;
1561 return false;
d39d658d 1562 case ORDERED: case UNORDERED:
8fe75e43 1563 if (inmode == CCmode)
19ed9d7b
UB
1564 return true;
1565 return false;
8fe75e43
RH
1566 case GT: case LE:
1567 if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
19ed9d7b
UB
1568 return true;
1569 return false;
8fe75e43 1570 default:
19ed9d7b 1571 return false;
8fe75e43
RH
1572 }
1573})
1574
19ed9d7b
UB
1575;; Return true if OP is a valid comparison operator
1576;; testing carry flag to be set.
8fe75e43 1577(define_predicate "ix86_carry_flag_operator"
d03ca8a6 1578 (match_code "ltu,unlt")
8fe75e43 1579{
ef4bddc2 1580 machine_mode inmode = GET_MODE (XEXP (op, 0));
8fe75e43
RH
1581 enum rtx_code code = GET_CODE (op);
1582
3f563e0b 1583 if (inmode == CCFPmode)
d03ca8a6
UB
1584 code = ix86_fp_compare_code_to_integer (code);
1585 else if (inmode != CCmode && inmode != CCCmode && inmode != CCGZmode)
19ed9d7b 1586 return false;
8fe75e43
RH
1587
1588 return code == LTU;
1589})
1590
c111f606
UB
1591;; Return true if OP is a valid comparison operator
1592;; testing carry flag to be unset.
1593(define_predicate "ix86_carry_flag_unset_operator"
1594 (match_code "geu,ge")
1595{
1596 machine_mode inmode = GET_MODE (XEXP (op, 0));
1597 enum rtx_code code = GET_CODE (op);
1598
1599 if (inmode == CCFPmode)
1600 code = ix86_fp_compare_code_to_integer (code);
1601 else if (inmode != CCmode && inmode != CCCmode && inmode != CCGZmode)
1602 return false;
1603
1604 return code == GEU;
1605})
1606
19ed9d7b 1607;; Return true if this comparison only requires testing one flag bit.
0948ccb2
PB
1608(define_predicate "ix86_trivial_fp_comparison_operator"
1609 (match_code "gt,ge,unlt,unle,uneq,ltgt,ordered,unordered"))
1610
19ed9d7b 1611;; Return true if we know how to do this comparison. Others require
0948ccb2
PB
1612;; testing more than one flag bit, and we let the generic middle-end
1613;; code do that.
1614(define_predicate "ix86_fp_comparison_operator"
1615 (if_then_else (match_test "ix86_fp_comparison_strategy (GET_CODE (op))
1616 == IX86_FPCMP_ARITH")
1617 (match_operand 0 "comparison_operator")
1618 (match_operand 0 "ix86_trivial_fp_comparison_operator")))
1619
8fe75e43
RH
1620;; Nearly general operand, but accept any const_double, since we wish
1621;; to be able to drop them into memory rather than have them get pulled
1622;; into registers.
1623(define_predicate "cmp_fp_expander_operand"
1624 (ior (match_code "const_double")
1625 (match_operand 0 "general_operand")))
1626
1627;; Return true if this is a valid binary floating-point operation.
1628(define_predicate "binary_fp_operator"
1629 (match_code "plus,minus,mult,div"))
1630
1631;; Return true if this is a multiply operation.
1632(define_predicate "mult_operator"
1633 (match_code "mult"))
1634
1635;; Return true if this is a division operation.
1636(define_predicate "div_operator"
1637 (match_code "div"))
1638
f1b13064
JJ
1639;; Return true if this is a and, ior or xor operation.
1640(define_predicate "logic_operator"
1641 (match_code "and,ior,xor"))
1642
3f831b7d
JJ
1643;; Return true if this is a plus, minus, and, ior or xor operation.
1644(define_predicate "plusminuslogic_operator"
1645 (match_code "plus,minus,and,ior,xor"))
1646
8fe75e43
RH
1647;; Return true for ARITHMETIC_P.
1648(define_predicate "arith_or_logical_operator"
513618db
RH
1649 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax,compare,minus,div,
1650 mod,udiv,umod,ashift,rotate,ashiftrt,lshiftrt,rotatert"))
8fe75e43 1651
bab64f23
PB
1652;; Return true for COMMUTATIVE_P.
1653(define_predicate "commutative_operator"
1654 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax"))
1655
19ed9d7b 1656;; Return true if OP is a binary operator that can be promoted to wider mode.
8fe75e43 1657(define_predicate "promotable_binary_operator"
67266ebb 1658 (ior (match_code "plus,minus,and,ior,xor,ashift")
8fe75e43 1659 (and (match_code "mult")
a646aded 1660 (match_test "TARGET_TUNE_PROMOTE_HIMODE_IMUL"))))
8fe75e43 1661
25da5dc7
RH
1662(define_predicate "compare_operator"
1663 (match_code "compare"))
7cacf53e 1664
f913cc2a
UB
1665;; Return true if OP is a memory operand, aligned to
1666;; less than its natural alignment.
66e1ecfe
L
1667(define_predicate "misaligned_operand"
1668 (and (match_code "mem")
f913cc2a 1669 (match_test "MEM_ALIGN (op) < GET_MODE_BITSIZE (mode)")))
95879c72 1670
94de7e22 1671;; Return true if OP is a parallel for an mov{d,q,dqa,ps,pd} vec_select,
1672;; where one of the two operands of the vec_concat is const0_operand.
1673(define_predicate "movq_parallel"
1674 (match_code "parallel")
1675{
1676 unsigned nelt = XVECLEN (op, 0);
1677 unsigned nelt2 = nelt >> 1;
1678 unsigned i;
1679
1680 if (nelt < 2)
1681 return false;
1682
1683 /* Validate that all of the elements are constants,
1684 lower halves of permute are lower halves of the first operand,
1685 upper halves of permute come from any of the second operand. */
1686 for (i = 0; i < nelt; ++i)
1687 {
1688 rtx er = XVECEXP (op, 0, i);
1689 unsigned HOST_WIDE_INT ei;
1690
1691 if (!CONST_INT_P (er))
d6345481 1692 return false;
94de7e22 1693 ei = INTVAL (er);
1694 if (i < nelt2 && ei != i)
d6345481 1695 return false;
94de7e22 1696 if (i >= nelt2 && (ei < nelt || ei >= nelt << 1))
d6345481 1697 return false;
94de7e22 1698 }
1699
d6345481 1700 return true;
94de7e22 1701})
1702
19ed9d7b 1703;; Return true if OP is a vzeroall operation, known to be a PARALLEL.
95879c72
L
1704(define_predicate "vzeroall_operation"
1705 (match_code "parallel")
1706{
85b1d1bd 1707 unsigned i, nregs = TARGET_64BIT ? 16 : 8;
95879c72 1708
85b1d1bd 1709 if ((unsigned) XVECLEN (op, 0) != 1 + nregs)
19ed9d7b 1710 return false;
95879c72 1711
85b1d1bd
UB
1712 for (i = 0; i < nregs; i++)
1713 {
1714 rtx elt = XVECEXP (op, 0, i+1);
1715
1716 if (GET_CODE (elt) != SET
1717 || GET_CODE (SET_DEST (elt)) != REG
1718 || GET_MODE (SET_DEST (elt)) != V8SImode
02469d3a 1719 || REGNO (SET_DEST (elt)) != GET_SSE_REGNO (i)
85b1d1bd 1720 || SET_SRC (elt) != CONST0_RTX (V8SImode))
19ed9d7b 1721 return false;
85b1d1bd 1722 }
19ed9d7b 1723 return true;
85b1d1bd
UB
1724})
1725
b38ab29f
UB
1726;; return true if OP is a vzeroall pattern.
1727(define_predicate "vzeroall_pattern"
1728 (and (match_code "parallel")
1729 (match_code "unspec_volatile" "a")
1730 (match_test "XINT (XVECEXP (op, 0, 0), 1) == UNSPECV_VZEROALL")))
1731
1732;; return true if OP is a vzeroupper pattern.
1733(define_predicate "vzeroupper_pattern"
69811448 1734 (and (match_code "parallel")
9a90b311 1735 (match_code "unspec" "b")
1736 (match_test "XINT (XVECEXP (op, 0, 1), 1) == UNSPEC_CALLEE_ABI")
1737 (match_test "INTVAL (XVECEXP (XVECEXP (op, 0, 1), 0, 0)) == ABI_VZEROUPPER")))
ff97910d 1738
7121e32b
UB
1739;; Return true if OP is an addsub vec_merge operation
1740(define_predicate "addsub_vm_operator"
1741 (match_code "vec_merge")
1742{
1743 rtx op0, op1;
1744 int swapped;
1745 HOST_WIDE_INT mask;
1746 int nunits, elt;
1747
1748 op0 = XEXP (op, 0);
1749 op1 = XEXP (op, 1);
1750
1751 /* Sanity check. */
1752 if (GET_CODE (op0) == MINUS && GET_CODE (op1) == PLUS)
1753 swapped = 0;
1754 else if (GET_CODE (op0) == PLUS && GET_CODE (op1) == MINUS)
1755 swapped = 1;
1756 else
1757 gcc_unreachable ();
1758
1759 mask = INTVAL (XEXP (op, 2));
1760 nunits = GET_MODE_NUNITS (mode);
1761
1762 for (elt = 0; elt < nunits; elt++)
1763 {
1764 /* bit clear: take from op0, set: take from op1 */
1765 int bit = !(mask & (HOST_WIDE_INT_1U << elt));
1766
1767 if (bit != ((elt & 1) ^ swapped))
1768 return false;
1769 }
1770
1771 return true;
1772})
1773
1774;; Return true if OP is an addsub vec_select/vec_concat operation
1775(define_predicate "addsub_vs_operator"
1776 (and (match_code "vec_select")
1777 (match_code "vec_concat" "0"))
1778{
1779 rtx op0, op1;
1780 bool swapped;
1781 int nunits, elt;
1782
1783 op0 = XEXP (XEXP (op, 0), 0);
1784 op1 = XEXP (XEXP (op, 0), 1);
1785
1786 /* Sanity check. */
1787 if (GET_CODE (op0) == MINUS && GET_CODE (op1) == PLUS)
1788 swapped = false;
1789 else if (GET_CODE (op0) == PLUS && GET_CODE (op1) == MINUS)
1790 swapped = true;
1791 else
1792 gcc_unreachable ();
1793
1794 nunits = GET_MODE_NUNITS (mode);
1795 if (XVECLEN (XEXP (op, 1), 0) != nunits)
1796 return false;
1797
1798 /* We already checked that permutation is suitable for addsub,
1799 so only look at the first element of the parallel. */
1800 elt = INTVAL (XVECEXP (XEXP (op, 1), 0, 0));
5e04b3b6 1801
7121e32b
UB
1802 return elt == (swapped ? nunits : 0);
1803})
1804
1805;; Return true if OP is a parallel for an addsub vec_select.
1806(define_predicate "addsub_vs_parallel"
1807 (and (match_code "parallel")
1808 (match_code "const_int" "a"))
1809{
1810 int nelt = XVECLEN (op, 0);
1811 int elt, i;
1812
1813 if (nelt < 2)
1814 return false;
1815
1816 /* Check that the permutation is suitable for addsub.
1817 For example, { 0 9 2 11 4 13 6 15 } or { 8 1 10 3 12 5 14 7 }. */
1818 elt = INTVAL (XVECEXP (op, 0, 0));
1819 if (elt == 0)
1820 {
1821 for (i = 1; i < nelt; ++i)
1822 if (INTVAL (XVECEXP (op, 0, i)) != (i + (i & 1) * nelt))
1823 return false;
1824 }
1825 else if (elt == nelt)
1826 {
1827 for (i = 1; i < nelt; ++i)
1828 if (INTVAL (XVECEXP (op, 0, i)) != (elt + i - (i & 1) * nelt))
1829 return false;
1830 }
1831 else
1832 return false;
1833
1834 return true;
1835})
1836
faf2b6bc 1837;; Return true if OP is a constant pool in perm{w,d,b} which constains index
1838;; match pmov{dw,wb,qd}.
1839(define_predicate "permvar_truncate_operand"
1840 (match_code "mem")
1841{
1842 int nelt = GET_MODE_NUNITS (mode);
1843 int perm[128];
1844 int id;
1845
1846 if (!INTEGRAL_MODE_P (mode) || !VECTOR_MODE_P (mode))
1847 return false;
1848
1849 if (nelt < 2)
1850 return false;
1851
1852 if (!ix86_extract_perm_from_pool_constant (&perm[0], op))
1853 return false;
1854
1855 id = exact_log2 (nelt);
1856
1857 /* Check that the permutation is suitable for pmovz{bw,wd,dq}.
1858 For example V16HImode to V8HImode
1859 { 0 2 4 6 8 10 12 14 * * * * * * * * }. */
1860 for (int i = 0; i != nelt / 2; i++)
1861 if ((perm[i] & ((1 << id) - 1)) != i * 2)
1862 return false;
1863
1864 return true;
1865})
1866
1867;; Return true if OP is a constant pool in shufb which constains index
1868;; match pmovdw.
1869(define_predicate "pshufb_truncv4siv4hi_operand"
1870 (match_code "mem")
1871{
1872 int perm[128];
1873
1874 if (mode != E_V16QImode)
1875 return false;
1876
1877 if (!ix86_extract_perm_from_pool_constant (&perm[0], op))
1878 return false;
1879
1880 /* Check that the permutation is suitable for pmovdw.
1881 For example V4SImode to V4HImode
1882 { 0 1 4 5 8 9 12 13 * * * * * * * * }.
1883 index = i % 2 + (i / 2) * 4. */
1884 for (int i = 0; i != 8; i++)
1885 {
1886 /* if (SRC2[(i * 8)+7] = 1) then DEST[(i*8)+7..(i*8)+0] := 0; */
1887 if (perm[i] & 128)
1888 return false;
1889
1890 if ((perm[i] & 15) != ((i & 1) + (i & 0xFE) * 2))
1891 return false;
1892 }
1893
1894 return true;
1895})
1896
1897;; Return true if OP is a constant pool in shufb which constains index
1898;; match pmovdw.
1899(define_predicate "pshufb_truncv8hiv8qi_operand"
1900 (match_code "mem")
1901{
1902 int perm[128];
1903
1904 if (mode != E_V16QImode)
1905 return false;
1906
1907 if (!ix86_extract_perm_from_pool_constant (&perm[0], op))
1908 return false;
1909
1910 /* Check that the permutation is suitable for pmovwb.
1911 For example V16QImode to V8QImode
1912 { 0 2 4 6 8 10 12 14 * * * * * * * * }.
1913 index = i % 2 + (i / 2) * 4. */
1914 for (int i = 0; i != 8; i++)
1915 {
1916 /* if (SRC2[(i * 8)+7] = 1) then DEST[(i*8)+7..(i*8)+0] := 0; */
1917 if (perm[i] & 128)
1918 return false;
1919
1920 if ((perm[i] & 15) != i * 2)
1921 return false;
1922 }
1923
1924 return true;
1925})
1926
b668a06e
JJ
1927;; Return true if OP is a parallel for an pmovz{bw,wd,dq} vec_select,
1928;; where one of the two operands of the vec_concat is const0_operand.
1929(define_predicate "pmovzx_parallel"
1930 (and (match_code "parallel")
1931 (match_code "const_int" "a"))
1932{
1933 int nelt = XVECLEN (op, 0);
1934 int elt, i;
1935
1936 if (nelt < 2)
1937 return false;
1938
1939 /* Check that the permutation is suitable for pmovz{bw,wd,dq}.
1940 For example { 0 16 1 17 2 18 3 19 4 20 5 21 6 22 7 23 }. */
1941 elt = INTVAL (XVECEXP (op, 0, 0));
1942 if (elt == 0)
1943 {
1944 for (i = 1; i < nelt; ++i)
1945 if ((i & 1) != 0)
1946 {
1947 if (INTVAL (XVECEXP (op, 0, i)) < nelt)
1948 return false;
1949 }
1950 else if (INTVAL (XVECEXP (op, 0, i)) != i / 2)
1951 return false;
1952 }
1953 else
1954 return false;
1955
1956 return true;
1957})
1958
8f9fea41
HJ
1959;; Return true if OP is a const vector with duplicate value.
1960(define_predicate "const_vector_duplicate_operand"
1961 (match_code "const_vector")
1962{
1963 rtx elt = XVECEXP (op, 0, 0);
1964 int i, nelt = XVECLEN (op, 0);
1965
1966 for (i = 1; i < nelt; ++i)
1967 if (!rtx_equal_p (elt, XVECEXP (op, 0, i)))
1968 return false;
1969 return true;
1970})
1971
7121e32b 1972;; Return true if OP is a parallel for a vbroadcast permute.
5e04b3b6
RH
1973(define_predicate "avx_vbroadcast_operand"
1974 (and (match_code "parallel")
1975 (match_code "const_int" "a"))
1976{
1977 rtx elt = XVECEXP (op, 0, 0);
1978 int i, nelt = XVECLEN (op, 0);
1979
1980 /* Don't bother checking there are the right number of operands,
1981 merely that they're all identical. */
1982 for (i = 1; i < nelt; ++i)
1983 if (XVECEXP (op, 0, i) != elt)
1984 return false;
1985 return true;
1986})
96d86115 1987
edbb0749
ES
1988;; Return true if OP is a parallel for a palignr permute.
1989(define_predicate "palignr_operand"
1990 (and (match_code "parallel")
1991 (match_code "const_int" "a"))
1992{
1993 int elt = INTVAL (XVECEXP (op, 0, 0));
1994 int i, nelt = XVECLEN (op, 0);
1995
1996 /* Check that an order in the permutation is suitable for palignr.
1997 For example, {5 6 7 0 1 2 3 4} is "palignr 5, xmm, xmm". */
1998 for (i = 1; i < nelt; ++i)
1999 if (INTVAL (XVECEXP (op, 0, i)) != ((elt + i) % nelt))
2000 return false;
2001 return true;
2002})
2003
96d86115
RH
2004;; Return true if OP is a proper third operand to vpblendw256.
2005(define_predicate "avx2_pblendw_operand"
2006 (match_code "const_int")
2007{
2008 HOST_WIDE_INT val = INTVAL (op);
2009 HOST_WIDE_INT low = val & 0xff;
524857ec 2010 return val == ((low << 8) | low);
96d86115 2011})
baee1763 2012
acf93f1e 2013;; Return true if OP is vector_operand or CONST_VECTOR.
baee1763 2014(define_predicate "general_vector_operand"
acf93f1e 2015 (ior (match_operand 0 "vector_operand")
baee1763 2016 (match_code "const_vector")))
0fe65b75
AI
2017
2018;; Return true if OP is either -1 constant or stored in register.
2019(define_predicate "register_or_constm1_operand"
2020 (ior (match_operand 0 "register_operand")
2021 (and (match_code "const_int")
2022 (match_test "op == constm1_rtx"))))
d6d4d770
DS
2023
2024;; Return true if the vector ends with between 12 and 18 register saves using
2025;; RAX as the base address.
2026(define_predicate "save_multiple"
2027 (match_code "parallel")
2028{
2029 const unsigned len = XVECLEN (op, 0);
2030 unsigned i;
2031
2032 /* Starting from end of vector, count register saves. */
2033 for (i = 0; i < len; ++i)
2034 {
2035 rtx src, dest, addr;
2036 rtx e = XVECEXP (op, 0, len - 1 - i);
2037
2038 if (GET_CODE (e) != SET)
2039 break;
2040
2041 src = SET_SRC (e);
2042 dest = SET_DEST (e);
2043
2044 if (!REG_P (src) || !MEM_P (dest))
2045 break;
2046
2047 addr = XEXP (dest, 0);
2048
2049 /* Good if dest address is in RAX. */
2050 if (REG_P (addr) && REGNO (addr) == AX_REG)
2051 continue;
2052
2053 /* Good if dest address is offset of RAX. */
2054 if (GET_CODE (addr) == PLUS
2055 && REG_P (XEXP (addr, 0))
2056 && REGNO (XEXP (addr, 0)) == AX_REG)
2057 continue;
2058
2059 break;
2060 }
2061 return (i >= 12 && i <= 18);
2062})
2063
2064
2065;; Return true if the vector ends with between 12 and 18 register loads using
2066;; RSI as the base address.
2067(define_predicate "restore_multiple"
2068 (match_code "parallel")
2069{
2070 const unsigned len = XVECLEN (op, 0);
2071 unsigned i;
2072
2073 /* Starting from end of vector, count register restores. */
2074 for (i = 0; i < len; ++i)
2075 {
2076 rtx src, dest, addr;
2077 rtx e = XVECEXP (op, 0, len - 1 - i);
2078
2079 if (GET_CODE (e) != SET)
2080 break;
2081
2082 src = SET_SRC (e);
2083 dest = SET_DEST (e);
2084
2085 if (!MEM_P (src) || !REG_P (dest))
2086 break;
2087
2088 addr = XEXP (src, 0);
2089
2090 /* Good if src address is in RSI. */
2091 if (REG_P (addr) && REGNO (addr) == SI_REG)
2092 continue;
2093
2094 /* Good if src address is offset of RSI. */
2095 if (GET_CODE (addr) == PLUS
2096 && REG_P (XEXP (addr, 0))
2097 && REGNO (XEXP (addr, 0)) == SI_REG)
2098 continue;
2099
2100 break;
2101 }
2102 return (i >= 12 && i <= 18);
2103})
632a2f50 2104
2105;; Keylocker specific predicates
2106(define_predicate "encodekey128_operation"
2107 (match_code "parallel")
2108{
2109 unsigned i;
2110 rtx elt;
2111
2112 if (XVECLEN (op, 0) != 8)
2113 return false;
2114
2115 for(i = 0; i < 3; i++)
2116 {
2117 elt = XVECEXP (op, 0, i + 1);
2118 if (GET_CODE (elt) != SET
2119 || GET_CODE (SET_DEST (elt)) != REG
2120 || GET_MODE (SET_DEST (elt)) != V2DImode
2121 || REGNO (SET_DEST (elt)) != GET_SSE_REGNO (i)
2122 || GET_CODE (SET_SRC (elt)) != UNSPEC_VOLATILE
2123 || GET_MODE (SET_SRC (elt)) != V2DImode
2124 || XVECLEN(SET_SRC (elt), 0) != 1
2125 || XVECEXP(SET_SRC (elt), 0, 0) != const0_rtx)
2126 return false;
2127 }
2128
2129 for(i = 4; i < 7; i++)
2130 {
2131 elt = XVECEXP (op, 0, i);
db288230
L
2132 if (GET_CODE (elt) != CLOBBER
2133 || GET_MODE (elt) != VOIDmode
2134 || GET_CODE (XEXP (elt, 0)) != REG
2135 || GET_MODE (XEXP (elt, 0)) != V2DImode
2136 || REGNO (XEXP (elt, 0)) != GET_SSE_REGNO (i))
632a2f50 2137 return false;
2138 }
2139
2140 elt = XVECEXP (op, 0, 7);
2141 if (GET_CODE (elt) != CLOBBER
2142 || GET_MODE (elt) != VOIDmode
2143 || GET_CODE (XEXP (elt, 0)) != REG
2144 || GET_MODE (XEXP (elt, 0)) != CCmode
2145 || REGNO (XEXP (elt, 0)) != FLAGS_REG)
2146 return false;
2147 return true;
2148})
2149
2150(define_predicate "encodekey256_operation"
2151 (match_code "parallel")
2152{
2153 unsigned i;
2154 rtx elt;
2155
2156 if (XVECLEN (op, 0) != 9)
2157 return false;
2158
2159 elt = SET_SRC (XVECEXP (op, 0, 0));
2160 elt = XVECEXP (elt, 0, 2);
2161 if (!REG_P (elt)
2162 || REGNO(elt) != GET_SSE_REGNO (1))
2163 return false;
2164
2165 for(i = 0; i < 4; i++)
2166 {
2167 elt = XVECEXP (op, 0, i + 1);
2168 if (GET_CODE (elt) != SET
2169 || GET_CODE (SET_DEST (elt)) != REG
2170 || GET_MODE (SET_DEST (elt)) != V2DImode
2171 || REGNO (SET_DEST (elt)) != GET_SSE_REGNO (i)
2172 || GET_CODE (SET_SRC (elt)) != UNSPEC_VOLATILE
2173 || GET_MODE (SET_SRC (elt)) != V2DImode
2174 || XVECLEN(SET_SRC (elt), 0) != 1
2175 || XVECEXP(SET_SRC (elt), 0, 0) != const0_rtx)
2176 return false;
2177 }
2178
2179 for(i = 4; i < 7; i++)
2180 {
2181 elt = XVECEXP (op, 0, i + 1);
db288230
L
2182 if (GET_CODE (elt) != CLOBBER
2183 || GET_MODE (elt) != VOIDmode
2184 || GET_CODE (XEXP (elt, 0)) != REG
2185 || GET_MODE (XEXP (elt, 0)) != V2DImode
2186 || REGNO (XEXP (elt, 0)) != GET_SSE_REGNO (i))
632a2f50 2187 return false;
2188 }
2189
2190 elt = XVECEXP (op, 0, 8);
2191 if (GET_CODE (elt) != CLOBBER
2192 || GET_MODE (elt) != VOIDmode
2193 || GET_CODE (XEXP (elt, 0)) != REG
2194 || GET_MODE (XEXP (elt, 0)) != CCmode
2195 || REGNO (XEXP (elt, 0)) != FLAGS_REG)
2196 return false;
2197 return true;
2198})
2199
2200
2201(define_predicate "aeswidekl_operation"
2202 (match_code "parallel")
2203{
2204 unsigned i;
2205 rtx elt;
2206
2207 for (i = 0; i < 8; i++)
2208 {
2209 elt = XVECEXP (op, 0, i + 1);
2210 if (GET_CODE (elt) != SET
2211 || GET_CODE (SET_DEST (elt)) != REG
2212 || GET_MODE (SET_DEST (elt)) != V2DImode
2213 || REGNO (SET_DEST (elt)) != GET_SSE_REGNO (i)
2214 || GET_CODE (SET_SRC (elt)) != UNSPEC_VOLATILE
2215 || GET_MODE (SET_SRC (elt)) != V2DImode
2216 || XVECLEN (SET_SRC (elt), 0) != 1
2217 || REGNO (XVECEXP (SET_SRC (elt), 0, 0)) != GET_SSE_REGNO (i))
2218 return false;
2219 }
2220 return true;
2221})