]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/i386/predicates.md
Update copyright years.
[thirdparty/gcc.git] / gcc / config / i386 / predicates.md
CommitLineData
8fe75e43 1;; Predicate definitions for IA-32 and x86-64.
a5544970 2;; Copyright (C) 2004-2019 Free Software Foundation, Inc.
8fe75e43
RH
3;;
4;; This file is part of GCC.
5;;
6;; GCC is free software; you can redistribute it and/or modify
7;; it under the terms of the GNU General Public License as published by
2f83c7d6 8;; the Free Software Foundation; either version 3, or (at your option)
8fe75e43
RH
9;; any later version.
10;;
11;; GCC is distributed in the hope that it will be useful,
12;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14;; GNU General Public License for more details.
15;;
16;; You should have received a copy of the GNU General Public License
2f83c7d6
NC
17;; along with GCC; see the file COPYING3. If not see
18;; <http://www.gnu.org/licenses/>.
8fe75e43 19
19ed9d7b 20;; Return true if OP is either a i387 or SSE fp register.
8fe75e43
RH
21(define_predicate "any_fp_register_operand"
22 (and (match_code "reg")
23 (match_test "ANY_FP_REGNO_P (REGNO (op))")))
24
19ed9d7b 25;; Return true if OP is an i387 fp register.
8fe75e43
RH
26(define_predicate "fp_register_operand"
27 (and (match_code "reg")
66aaf16f 28 (match_test "STACK_REGNO_P (REGNO (op))")))
8fe75e43 29
85a77221
AI
30;; True if the operand is a GENERAL class register.
31(define_predicate "general_reg_operand"
32 (and (match_code "reg")
00cbba89 33 (match_test "GENERAL_REGNO_P (REGNO (op))")))
85a77221 34
cb1fd5b4
UB
35;; True if the operand is a nonimmediate operand with GENERAL class register.
36(define_predicate "nonimmediate_gr_operand"
37 (if_then_else (match_code "reg")
38 (match_test "GENERAL_REGNO_P (REGNO (op))")
39 (match_operand 0 "nonimmediate_operand")))
40
cd3fe55a
UB
41;; True if the operand is a general operand with GENERAL class register.
42(define_predicate "general_gr_operand"
43 (if_then_else (match_code "reg")
44 (match_test "GENERAL_REGNO_P (REGNO (op))")
45 (match_operand 0 "general_operand")))
46
8fe75e43
RH
47;; True if the operand is an MMX register.
48(define_predicate "mmx_reg_operand"
49 (and (match_code "reg")
50 (match_test "MMX_REGNO_P (REGNO (op))")))
770b37b9
UB
51
52;; True if the operand is an SSE register.
53(define_predicate "sse_reg_operand"
54 (and (match_code "reg")
55 (match_test "SSE_REGNO_P (REGNO (op))")))
8fe75e43 56
e500c62a
KY
57;; True if the operand is an AVX-512 new register.
58(define_predicate "ext_sse_reg_operand"
59 (and (match_code "reg")
60 (match_test "EXT_REX_SSE_REGNO_P (REGNO (op))")))
61
55d2ee57
UB
62;; Return true if op is a QImode register.
63(define_predicate "any_QIreg_operand"
64 (and (match_code "reg")
65 (match_test "ANY_QI_REGNO_P (REGNO (op))")))
8fe75e43 66
55d2ee57
UB
67;; Return true if op is one of QImode registers: %[abcd][hl].
68(define_predicate "QIreg_operand"
69 (and (match_code "reg")
70 (match_test "QI_REGNO_P (REGNO (op))")))
71
72;; Return true if op is a QImode register operand other than %[abcd][hl].
73(define_predicate "ext_QIreg_operand"
74 (and (match_test "TARGET_64BIT")
75 (match_code "reg")
76 (not (match_test "QI_REGNO_P (REGNO (op))"))))
77
78;; Return true if op is the AX register.
79(define_predicate "ax_reg_operand"
80 (and (match_code "reg")
81 (match_test "REGNO (op) == AX_REG")))
82
83;; Return true if op is the flags register.
84(define_predicate "flags_reg_operand"
85 (and (match_code "reg")
86 (match_test "REGNO (op) == FLAGS_REG")))
cb105922 87
edf5d079 88;; Match a DI, SI or HImode register for a zero_extract.
8fe75e43 89(define_special_predicate "ext_register_operand"
edf5d079
UB
90 (and (match_operand 0 "register_operand")
91 (ior (and (match_test "TARGET_64BIT")
92 (match_test "GET_MODE (op) == DImode"))
93 (match_test "GET_MODE (op) == SImode")
94 (match_test "GET_MODE (op) == HImode"))))
8fe75e43 95
c8802daf
UB
96;; Match register operands, but include memory operands for TARGET_SSE_MATH.
97(define_predicate "register_ssemem_operand"
98 (if_then_else
99 (match_test "SSE_FLOAT_MODE_P (mode) && TARGET_SSE_MATH")
100 (match_operand 0 "nonimmediate_operand")
101 (match_operand 0 "register_operand")))
102
ebae28e9
UB
103;; Match nonimmediate operands, but exclude memory operands
104;; for TARGET_SSE_MATH if TARGET_MIX_SSE_I387 is not enabled.
105(define_predicate "nonimm_ssenomem_operand"
8b38916a
UB
106 (if_then_else
107 (and (match_test "SSE_FLOAT_MODE_P (mode) && TARGET_SSE_MATH")
108 (not (match_test "TARGET_MIX_SSE_I387")))
109 (match_operand 0 "register_operand")
110 (match_operand 0 "nonimmediate_operand")))
111
112;; The above predicate, suitable for x87 arithmetic operators.
113(define_predicate "x87nonimm_ssenomem_operand"
ebae28e9
UB
114 (if_then_else
115 (and (match_test "SSE_FLOAT_MODE_P (mode) && TARGET_SSE_MATH")
116 (not (match_test "TARGET_MIX_SSE_I387 && X87_ENABLE_ARITH (mode)")))
117 (match_operand 0 "register_operand")
118 (match_operand 0 "nonimmediate_operand")))
8c292b10 119
846e2ad8
UB
120;; Match register operands, include memory operand for TARGET_SSE4_1.
121(define_predicate "register_sse4nonimm_operand"
122 (if_then_else (match_test "TARGET_SSE4_1")
123 (match_operand 0 "nonimmediate_operand")
124 (match_operand 0 "register_operand")))
125
d5e254e1
IE
126;; Return true if VALUE is symbol reference
127(define_predicate "symbol_operand"
128 (match_code "symbol_ref"))
129
19ed9d7b 130;; Return true if VALUE can be stored in a sign extended immediate field.
8fe75e43
RH
131(define_predicate "x86_64_immediate_operand"
132 (match_code "const_int,symbol_ref,label_ref,const")
133{
134 if (!TARGET_64BIT)
135 return immediate_operand (op, mode);
136
137 switch (GET_CODE (op))
138 {
139 case CONST_INT:
44d0de8d 140 {
1a58b548 141 HOST_WIDE_INT val = INTVAL (op);
44d0de8d
UB
142 return trunc_int_for_mode (val, SImode) == val;
143 }
8fe75e43 144 case SYMBOL_REF:
e3d62871
UB
145 /* TLS symbols are not constant. */
146 if (SYMBOL_REF_TLS_MODEL (op))
147 return false;
148
149 /* Load the external function address via the GOT slot. */
150 if (ix86_force_load_from_GOT_p (op))
151 return false;
152
8fe75e43
RH
153 /* For certain code models, the symbolic references are known to fit.
154 in CM_SMALL_PIC model we know it fits if it is local to the shared
155 library. Don't count TLS SYMBOL_REFs here, since they should fit
156 only if inside of UNSPEC handled below. */
7dcbf659
JH
157 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL
158 || (ix86_cmodel == CM_MEDIUM && !SYMBOL_REF_FAR_ADDR_P (op)));
8fe75e43
RH
159
160 case LABEL_REF:
161 /* For certain code models, the code is near as well. */
162 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM
163 || ix86_cmodel == CM_KERNEL);
164
165 case CONST:
166 /* We also may accept the offsetted memory references in certain
167 special cases. */
168 if (GET_CODE (XEXP (op, 0)) == UNSPEC)
169 switch (XINT (XEXP (op, 0), 1))
170 {
171 case UNSPEC_GOTPCREL:
172 case UNSPEC_DTPOFF:
173 case UNSPEC_GOTNTPOFF:
174 case UNSPEC_NTPOFF:
19ed9d7b 175 return true;
8fe75e43
RH
176 default:
177 break;
178 }
179
180 if (GET_CODE (XEXP (op, 0)) == PLUS)
181 {
182 rtx op1 = XEXP (XEXP (op, 0), 0);
183 rtx op2 = XEXP (XEXP (op, 0), 1);
8fe75e43
RH
184
185 if (ix86_cmodel == CM_LARGE)
19ed9d7b 186 return false;
7656aee4 187 if (!CONST_INT_P (op2))
19ed9d7b 188 return false;
4368a420 189
1a58b548 190 HOST_WIDE_INT offset = INTVAL (op2);
4368a420
UB
191 if (trunc_int_for_mode (offset, SImode) != offset)
192 return false;
193
8fe75e43
RH
194 switch (GET_CODE (op1))
195 {
196 case SYMBOL_REF:
fd4aca96
RH
197 /* TLS symbols are not constant. */
198 if (SYMBOL_REF_TLS_MODEL (op1))
19ed9d7b 199 return false;
e3d62871
UB
200
201 /* Load the external function address via the GOT slot. */
202 if (ix86_force_load_from_GOT_p (op1))
203 return false;
204
8fe75e43
RH
205 /* For CM_SMALL assume that latest object is 16MB before
206 end of 31bits boundary. We may also accept pretty
207 large negative constants knowing that all objects are
208 in the positive half of address space. */
7dcbf659
JH
209 if ((ix86_cmodel == CM_SMALL
210 || (ix86_cmodel == CM_MEDIUM
211 && !SYMBOL_REF_FAR_ADDR_P (op1)))
4368a420 212 && offset < 16*1024*1024)
19ed9d7b 213 return true;
8fe75e43
RH
214 /* For CM_KERNEL we know that all object resist in the
215 negative half of 32bits address space. We may not
216 accept negative offsets, since they may be just off
217 and we may accept pretty large positive ones. */
218 if (ix86_cmodel == CM_KERNEL
4368a420 219 && offset > 0)
19ed9d7b 220 return true;
8fe75e43
RH
221 break;
222
223 case LABEL_REF:
224 /* These conditions are similar to SYMBOL_REF ones, just the
225 constraints for code models differ. */
226 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
4368a420 227 && offset < 16*1024*1024)
19ed9d7b 228 return true;
8fe75e43 229 if (ix86_cmodel == CM_KERNEL
4368a420 230 && offset > 0)
19ed9d7b 231 return true;
8fe75e43
RH
232 break;
233
234 case UNSPEC:
235 switch (XINT (op1, 1))
236 {
237 case UNSPEC_DTPOFF:
238 case UNSPEC_NTPOFF:
4368a420 239 return true;
8fe75e43
RH
240 }
241 break;
242
243 default:
244 break;
245 }
246 }
247 break;
248
249 default:
7637e42c 250 gcc_unreachable ();
8fe75e43
RH
251 }
252
19ed9d7b 253 return false;
8fe75e43
RH
254})
255
19ed9d7b 256;; Return true if VALUE can be stored in the zero extended immediate field.
8fe75e43 257(define_predicate "x86_64_zext_immediate_operand"
44d0de8d 258 (match_code "const_int,symbol_ref,label_ref,const")
8fe75e43
RH
259{
260 switch (GET_CODE (op))
261 {
8fe75e43 262 case CONST_INT:
44d0de8d 263 return !(INTVAL (op) & ~(HOST_WIDE_INT) 0xffffffff);
8fe75e43
RH
264
265 case SYMBOL_REF:
8fe75e43 266 /* TLS symbols are not constant. */
fd4aca96 267 if (SYMBOL_REF_TLS_MODEL (op))
8fe75e43 268 return false;
e3d62871
UB
269
270 /* Load the external function address via the GOT slot. */
271 if (ix86_force_load_from_GOT_p (op))
272 return false;
273
274 /* For certain code models, the symbolic references are known to fit. */
7dcbf659
JH
275 return (ix86_cmodel == CM_SMALL
276 || (ix86_cmodel == CM_MEDIUM
277 && !SYMBOL_REF_FAR_ADDR_P (op)));
8fe75e43
RH
278
279 case LABEL_REF:
280 /* For certain code models, the code is near as well. */
281 return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM;
282
283 case CONST:
284 /* We also may accept the offsetted memory references in certain
285 special cases. */
286 if (GET_CODE (XEXP (op, 0)) == PLUS)
287 {
288 rtx op1 = XEXP (XEXP (op, 0), 0);
289 rtx op2 = XEXP (XEXP (op, 0), 1);
290
291 if (ix86_cmodel == CM_LARGE)
19ed9d7b 292 return false;
4368a420
UB
293 if (!CONST_INT_P (op2))
294 return false;
295
1a58b548 296 HOST_WIDE_INT offset = INTVAL (op2);
4368a420
UB
297 if (trunc_int_for_mode (offset, SImode) != offset)
298 return false;
299
8fe75e43
RH
300 switch (GET_CODE (op1))
301 {
302 case SYMBOL_REF:
fd4aca96
RH
303 /* TLS symbols are not constant. */
304 if (SYMBOL_REF_TLS_MODEL (op1))
19ed9d7b 305 return false;
e3d62871
UB
306
307 /* Load the external function address via the GOT slot. */
308 if (ix86_force_load_from_GOT_p (op1))
309 return false;
310
8fe75e43
RH
311 /* For small code model we may accept pretty large positive
312 offsets, since one bit is available for free. Negative
313 offsets are limited by the size of NULL pointer area
314 specified by the ABI. */
7dcbf659
JH
315 if ((ix86_cmodel == CM_SMALL
316 || (ix86_cmodel == CM_MEDIUM
317 && !SYMBOL_REF_FAR_ADDR_P (op1)))
4368a420 318 && offset > -0x10000)
19ed9d7b 319 return true;
8fe75e43
RH
320 /* ??? For the kernel, we may accept adjustment of
321 -0x10000000, since we know that it will just convert
322 negative address space to positive, but perhaps this
323 is not worthwhile. */
324 break;
325
326 case LABEL_REF:
327 /* These conditions are similar to SYMBOL_REF ones, just the
328 constraints for code models differ. */
329 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
4368a420 330 && offset > -0x10000)
19ed9d7b 331 return true;
8fe75e43
RH
332 break;
333
334 default:
19ed9d7b 335 return false;
8fe75e43
RH
336 }
337 }
338 break;
339
340 default:
7637e42c 341 gcc_unreachable ();
8fe75e43 342 }
19ed9d7b 343 return false;
8fe75e43
RH
344})
345
31ed1665
JJ
346;; Return true if VALUE is a constant integer whose low and high words satisfy
347;; x86_64_immediate_operand.
348(define_predicate "x86_64_hilo_int_operand"
349 (match_code "const_int,const_wide_int")
350{
351 switch (GET_CODE (op))
352 {
353 case CONST_INT:
354 return x86_64_immediate_operand (op, mode);
355
356 case CONST_WIDE_INT:
357 gcc_assert (CONST_WIDE_INT_NUNITS (op) == 2);
358 return (x86_64_immediate_operand (GEN_INT (CONST_WIDE_INT_ELT (op, 0)),
359 DImode)
360 && x86_64_immediate_operand (GEN_INT (CONST_WIDE_INT_ELT (op,
361 1)),
362 DImode));
363
364 default:
365 gcc_unreachable ();
366 }
367})
368
47a6cc4e
JJ
369;; Return true if VALUE is a constant integer whose value is
370;; x86_64_immediate_operand value zero extended from word mode to mode.
371(define_predicate "x86_64_dwzext_immediate_operand"
372 (match_code "const_int,const_wide_int")
373{
374 switch (GET_CODE (op))
375 {
376 case CONST_INT:
377 if (!TARGET_64BIT)
378 return UINTVAL (op) <= HOST_WIDE_INT_UC (0xffffffff);
379 return UINTVAL (op) <= HOST_WIDE_INT_UC (0x7fffffff);
380
381 case CONST_WIDE_INT:
382 if (!TARGET_64BIT)
383 return false;
384 return (CONST_WIDE_INT_NUNITS (op) == 2
385 && CONST_WIDE_INT_ELT (op, 1) == 0
386 && (trunc_int_for_mode (CONST_WIDE_INT_ELT (op, 0), SImode)
387 == (HOST_WIDE_INT) CONST_WIDE_INT_ELT (op, 0)));
388
389 default:
390 gcc_unreachable ();
391 }
392})
393
d5e254e1
IE
394;; Return true if size of VALUE can be stored in a sign
395;; extended immediate field.
396(define_predicate "x86_64_immediate_size_operand"
397 (and (match_code "symbol_ref")
398 (ior (not (match_test "TARGET_64BIT"))
399 (match_test "ix86_cmodel == CM_SMALL")
400 (match_test "ix86_cmodel == CM_KERNEL"))))
401
19ed9d7b 402;; Return true if OP is general operand representable on x86_64.
8fe75e43
RH
403(define_predicate "x86_64_general_operand"
404 (if_then_else (match_test "TARGET_64BIT")
405 (ior (match_operand 0 "nonimmediate_operand")
406 (match_operand 0 "x86_64_immediate_operand"))
407 (match_operand 0 "general_operand")))
408
31ed1665
JJ
409;; Return true if OP's both words are general operands representable
410;; on x86_64.
411(define_predicate "x86_64_hilo_general_operand"
412 (if_then_else (match_test "TARGET_64BIT")
413 (ior (match_operand 0 "nonimmediate_operand")
414 (match_operand 0 "x86_64_hilo_int_operand"))
415 (match_operand 0 "general_operand")))
416
d1873c57
JJ
417;; Return true if OP is non-VOIDmode general operand representable
418;; on x86_64. This predicate is used in sign-extending conversion
419;; operations that require non-VOIDmode immediate operands.
420(define_predicate "x86_64_sext_operand"
421 (and (match_test "GET_MODE (op) != VOIDmode")
422 (match_operand 0 "x86_64_general_operand")))
423
424;; Return true if OP is non-VOIDmode general operand. This predicate
425;; is used in sign-extending conversion operations that require
426;; non-VOIDmode immediate operands.
427(define_predicate "sext_operand"
428 (and (match_test "GET_MODE (op) != VOIDmode")
429 (match_operand 0 "general_operand")))
430
7482c470
UB
431;; Return true if OP is representable on x86_64 as zero-extended operand.
432;; This predicate is used in zero-extending conversion operations that
433;; require non-VOIDmode immediate operands.
434(define_predicate "x86_64_zext_operand"
ca538e97
UB
435 (if_then_else (match_test "TARGET_64BIT")
436 (ior (match_operand 0 "nonimmediate_operand")
3cb2b15b
UB
437 (and (match_operand 0 "x86_64_zext_immediate_operand")
438 (match_test "GET_MODE (op) != VOIDmode")))
7482c470 439 (match_operand 0 "nonimmediate_operand")))
ca538e97 440
19ed9d7b 441;; Return true if OP is general operand representable on x86_64
8fe75e43
RH
442;; as either sign extended or zero extended constant.
443(define_predicate "x86_64_szext_general_operand"
444 (if_then_else (match_test "TARGET_64BIT")
445 (ior (match_operand 0 "nonimmediate_operand")
aaf5d6c0
UB
446 (match_operand 0 "x86_64_immediate_operand")
447 (match_operand 0 "x86_64_zext_immediate_operand"))
8fe75e43
RH
448 (match_operand 0 "general_operand")))
449
19ed9d7b 450;; Return true if OP is nonmemory operand representable on x86_64.
8fe75e43
RH
451(define_predicate "x86_64_nonmemory_operand"
452 (if_then_else (match_test "TARGET_64BIT")
453 (ior (match_operand 0 "register_operand")
454 (match_operand 0 "x86_64_immediate_operand"))
455 (match_operand 0 "nonmemory_operand")))
456
19ed9d7b 457;; Return true if OP is nonmemory operand representable on x86_64.
8fe75e43
RH
458(define_predicate "x86_64_szext_nonmemory_operand"
459 (if_then_else (match_test "TARGET_64BIT")
460 (ior (match_operand 0 "register_operand")
aaf5d6c0
UB
461 (match_operand 0 "x86_64_immediate_operand")
462 (match_operand 0 "x86_64_zext_immediate_operand"))
8fe75e43
RH
463 (match_operand 0 "nonmemory_operand")))
464
47efdea4
JH
465;; Return true when operand is PIC expression that can be computed by lea
466;; operation.
a2e49bb2 467(define_predicate "pic_32bit_operand"
47efdea4
JH
468 (match_code "const,symbol_ref,label_ref")
469{
470 if (!flag_pic)
19ed9d7b 471 return false;
a2e49bb2 472
47efdea4
JH
473 /* Rule out relocations that translate into 64bit constants. */
474 if (TARGET_64BIT && GET_CODE (op) == CONST)
475 {
476 op = XEXP (op, 0);
7656aee4 477 if (GET_CODE (op) == PLUS && CONST_INT_P (XEXP (op, 1)))
47efdea4
JH
478 op = XEXP (op, 0);
479 if (GET_CODE (op) == UNSPEC
480 && (XINT (op, 1) == UNSPEC_GOTOFF
481 || XINT (op, 1) == UNSPEC_GOT))
19ed9d7b 482 return false;
47efdea4 483 }
a2e49bb2 484
47efdea4
JH
485 return symbolic_operand (op, mode);
486})
487
19ed9d7b 488;; Return true if OP is nonmemory operand acceptable by movabs patterns.
8fe75e43 489(define_predicate "x86_64_movabs_operand"
7aecd4e8
UB
490 (and (match_operand 0 "nonmemory_operand")
491 (not (match_operand 0 "pic_32bit_operand"))))
8fe75e43 492
19ed9d7b 493;; Return true if OP is either a symbol reference or a sum of a symbol
8fe75e43
RH
494;; reference and a constant.
495(define_predicate "symbolic_operand"
496 (match_code "symbol_ref,label_ref,const")
497{
498 switch (GET_CODE (op))
499 {
500 case SYMBOL_REF:
501 case LABEL_REF:
19ed9d7b 502 return true;
8fe75e43
RH
503
504 case CONST:
505 op = XEXP (op, 0);
506 if (GET_CODE (op) == SYMBOL_REF
507 || GET_CODE (op) == LABEL_REF
508 || (GET_CODE (op) == UNSPEC
509 && (XINT (op, 1) == UNSPEC_GOT
510 || XINT (op, 1) == UNSPEC_GOTOFF
986ce92f 511 || XINT (op, 1) == UNSPEC_PCREL
8fe75e43 512 || XINT (op, 1) == UNSPEC_GOTPCREL)))
19ed9d7b 513 return true;
8fe75e43 514 if (GET_CODE (op) != PLUS
7656aee4 515 || !CONST_INT_P (XEXP (op, 1)))
19ed9d7b 516 return false;
8fe75e43
RH
517
518 op = XEXP (op, 0);
519 if (GET_CODE (op) == SYMBOL_REF
520 || GET_CODE (op) == LABEL_REF)
19ed9d7b 521 return true;
8fe75e43
RH
522 /* Only @GOTOFF gets offsets. */
523 if (GET_CODE (op) != UNSPEC
524 || XINT (op, 1) != UNSPEC_GOTOFF)
19ed9d7b 525 return false;
8fe75e43
RH
526
527 op = XVECEXP (op, 0, 0);
528 if (GET_CODE (op) == SYMBOL_REF
529 || GET_CODE (op) == LABEL_REF)
19ed9d7b
UB
530 return true;
531 return false;
8fe75e43
RH
532
533 default:
7637e42c 534 gcc_unreachable ();
8fe75e43
RH
535 }
536})
537
8fe75e43
RH
538;; Return true if OP is a symbolic operand that resolves locally.
539(define_predicate "local_symbolic_operand"
540 (match_code "const,label_ref,symbol_ref")
541{
542 if (GET_CODE (op) == CONST
543 && GET_CODE (XEXP (op, 0)) == PLUS
7656aee4 544 && CONST_INT_P (XEXP (XEXP (op, 0), 1)))
8fe75e43
RH
545 op = XEXP (XEXP (op, 0), 0);
546
547 if (GET_CODE (op) == LABEL_REF)
19ed9d7b 548 return true;
8fe75e43
RH
549
550 if (GET_CODE (op) != SYMBOL_REF)
19ed9d7b 551 return false;
8fe75e43 552
19ed9d7b
UB
553 if (SYMBOL_REF_TLS_MODEL (op))
554 return false;
c1a46941 555
9216baf1
KT
556 /* Dll-imported symbols are always external. */
557 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
558 return false;
8fe75e43 559 if (SYMBOL_REF_LOCAL_P (op))
19ed9d7b 560 return true;
8fe75e43
RH
561
562 /* There is, however, a not insubstantial body of code in the rest of
563 the compiler that assumes it can just stick the results of
564 ASM_GENERATE_INTERNAL_LABEL in a symbol_ref and have done. */
565 /* ??? This is a hack. Should update the body of the compiler to
566 always create a DECL an invoke targetm.encode_section_info. */
567 if (strncmp (XSTR (op, 0), internal_label_prefix,
568 internal_label_prefix_len) == 0)
19ed9d7b 569 return true;
8fe75e43 570
19ed9d7b 571 return false;
8fe75e43
RH
572})
573
170bdaba
RS
574;; Test for a legitimate @GOTOFF operand.
575;;
576;; VxWorks does not impose a fixed gap between segments; the run-time
577;; gap can be different from the object-file gap. We therefore can't
578;; use @GOTOFF unless we are absolutely sure that the symbol is in the
579;; same segment as the GOT. Unfortunately, the flexibility of linker
580;; scripts means that we can't be sure of that in general, so assume
581;; that @GOTOFF is never valid on VxWorks.
582(define_predicate "gotoff_operand"
77fa1d54 583 (and (not (match_test "TARGET_VXWORKS_RTP"))
170bdaba
RS
584 (match_operand 0 "local_symbolic_operand")))
585
8fe75e43 586;; Test for various thread-local symbols.
62a1c041 587(define_special_predicate "tls_symbolic_operand"
8fe75e43 588 (and (match_code "symbol_ref")
19ed9d7b 589 (match_test "SYMBOL_REF_TLS_MODEL (op)")))
8fe75e43 590
62a1c041 591(define_special_predicate "tls_modbase_operand"
5bf5a10b
AO
592 (and (match_code "symbol_ref")
593 (match_test "op == ix86_tls_module_base ()")))
f3648f7d
UB
594
595(define_predicate "tls_address_pattern"
596 (and (match_code "set,parallel,unspec,unspec_volatile")
597 (match_test "ix86_tls_address_pattern_p (op)")))
5bf5a10b 598
8fe75e43 599;; Test for a pc-relative call operand
a1d3d84b 600(define_predicate "constant_call_address_operand"
da489f73
RH
601 (match_code "symbol_ref")
602{
e7c77c4f
AK
603 if (ix86_cmodel == CM_LARGE || ix86_cmodel == CM_LARGE_PIC
604 || flag_force_indirect_call)
da489f73
RH
605 return false;
606 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
607 return false;
608 return true;
609})
8fe75e43 610
618cc62e
UB
611;; P6 processors will jump to the address after the decrement when %esp
612;; is used as a call operand, so they will execute return address as a code.
613;; See Pentium Pro errata 70, Pentium 2 errata A33 and Pentium 3 errata E17.
614
615(define_predicate "call_register_no_elim_operand"
616 (match_operand 0 "register_operand")
617{
3a6d28d6 618 if (SUBREG_P (op))
618cc62e
UB
619 op = SUBREG_REG (op);
620
621 if (!TARGET_64BIT && op == stack_pointer_rtx)
19ed9d7b 622 return false;
618cc62e
UB
623
624 return register_no_elim_operand (op, mode);
625})
626
4150f926
UB
627;; True for any non-virtual or eliminable register. Used in places where
628;; instantiation of such a register may cause the pattern to not be recognized.
629(define_predicate "register_no_elim_operand"
630 (match_operand 0 "register_operand")
631{
3a6d28d6 632 if (SUBREG_P (op))
4150f926
UB
633 op = SUBREG_REG (op);
634 return !(op == arg_pointer_rtx
635 || op == frame_pointer_rtx
636 || IN_RANGE (REGNO (op),
637 FIRST_PSEUDO_REGISTER, LAST_VIRTUAL_REGISTER));
638})
639
8fe75e43
RH
640;; Similarly, but include the stack pointer. This is used to prevent esp
641;; from being used as an index reg.
642(define_predicate "index_register_operand"
643 (match_operand 0 "register_operand")
644{
3a6d28d6 645 if (SUBREG_P (op))
8fe75e43 646 op = SUBREG_REG (op);
1e7e62b1 647 if (reload_completed)
9a9286af
RH
648 return REG_OK_FOR_INDEX_STRICT_P (op);
649 else
650 return REG_OK_FOR_INDEX_NONSTRICT_P (op);
8fe75e43
RH
651})
652
653;; Return false if this is any eliminable register. Otherwise general_operand.
654(define_predicate "general_no_elim_operand"
655 (if_then_else (match_code "reg,subreg")
656 (match_operand 0 "register_no_elim_operand")
657 (match_operand 0 "general_operand")))
658
659;; Return false if this is any eliminable register. Otherwise
660;; register_operand or a constant.
661(define_predicate "nonmemory_no_elim_operand"
662 (ior (match_operand 0 "register_no_elim_operand")
663 (match_operand 0 "immediate_operand")))
664
6025b127
L
665;; Test for a valid operand for indirect branch.
666(define_predicate "indirect_branch_operand"
b9719055 667 (ior (match_operand 0 "register_operand")
c2c601b2 668 (and (not (match_test "TARGET_INDIRECT_BRANCH_REGISTER"))
894c144c 669 (not (match_test "TARGET_X32"))
b9719055 670 (match_operand 0 "memory_operand"))))
6025b127 671
55d2ee57 672;; Return true if OP is a memory operands that can be used in sibcalls.
af9345d7
L
673;; Since sibcall never returns, we can only use call-clobbered register
674;; as GOT base. Allow GOT slot here only with pseudo register as GOT
675;; base. Properly handle sibcall over GOT slot with *sibcall_GOT_32
676;; and *sibcall_value_GOT_32 patterns.
55d2ee57 677(define_predicate "sibcall_memory_operand"
af9345d7
L
678 (match_operand 0 "memory_operand")
679{
680 op = XEXP (op, 0);
681 if (CONSTANT_P (op))
682 return true;
683 if (GET_CODE (op) == PLUS && REG_P (XEXP (op, 0)))
684 {
685 int regno = REGNO (XEXP (op, 0));
686 if (!HARD_REGISTER_NUM_P (regno) || call_used_regs[regno])
687 {
688 op = XEXP (op, 1);
689 if (GOT32_symbol_operand (op, VOIDmode))
690 return true;
691 }
692 }
693 return false;
694})
55d2ee57 695
fa87d16d
L
696;; Return true if OP is a GOT memory operand.
697(define_predicate "GOT_memory_operand"
4a5a0497 698 (match_operand 0 "memory_operand")
fa87d16d
L
699{
700 op = XEXP (op, 0);
701 return (GET_CODE (op) == CONST
702 && GET_CODE (XEXP (op, 0)) == UNSPEC
703 && XINT (XEXP (op, 0), 1) == UNSPEC_GOTPCREL);
704})
705
8fe75e43 706;; Test for a valid operand for a call instruction.
1ce8d925
UB
707;; Allow constant call address operands in Pmode only.
708(define_special_predicate "call_insn_operand"
a1d3d84b
UB
709 (ior (match_test "constant_call_address_operand
710 (op, mode == VOIDmode ? mode : Pmode)")
aaf5d6c0 711 (match_operand 0 "call_register_no_elim_operand")
c2c601b2 712 (and (not (match_test "TARGET_INDIRECT_BRANCH_REGISTER"))
5ca876c3
L
713 (ior (and (not (match_test "TARGET_X32"))
714 (match_operand 0 "memory_operand"))
715 (and (match_test "TARGET_X32 && Pmode == DImode")
716 (match_operand 0 "GOT_memory_operand"))))))
8fe75e43 717
35fd3193 718;; Similarly, but for tail calls, in which we cannot allow memory references.
1ce8d925 719(define_special_predicate "sibcall_insn_operand"
a1d3d84b
UB
720 (ior (match_test "constant_call_address_operand
721 (op, mode == VOIDmode ? mode : Pmode)")
cb105922 722 (match_operand 0 "register_no_elim_operand")
c2c601b2 723 (and (not (match_test "TARGET_INDIRECT_BRANCH_REGISTER"))
5ca876c3
L
724 (ior (and (not (match_test "TARGET_X32"))
725 (match_operand 0 "sibcall_memory_operand"))
726 (and (match_test "TARGET_X32 && Pmode == DImode")
727 (match_operand 0 "GOT_memory_operand"))))))
f70d27e0 728
af9345d7
L
729;; Return true if OP is a 32-bit GOT symbol operand.
730(define_predicate "GOT32_symbol_operand"
4a5a0497 731 (match_test "GET_CODE (op) == CONST
af9345d7
L
732 && GET_CODE (XEXP (op, 0)) == UNSPEC
733 && XINT (XEXP (op, 0), 1) == UNSPEC_GOT"))
734
8fe75e43
RH
735;; Match exactly zero.
736(define_predicate "const0_operand"
eebe7d1c 737 (match_code "const_int,const_double,const_vector")
b4e82619
RH
738{
739 if (mode == VOIDmode)
740 mode = GET_MODE (op);
741 return op == CONST0_RTX (mode);
742})
8fe75e43 743
55284a77 744;; Match one or a vector with all elements equal to one.
8fe75e43 745(define_predicate "const1_operand"
eebe7d1c 746 (match_code "const_int,const_double,const_vector")
880ab4be
AT
747{
748 if (mode == VOIDmode)
749 mode = GET_MODE (op);
750 return op == CONST1_RTX (mode);
751})
8fe75e43 752
a7d56fb8
UB
753;; Match exactly -1.
754(define_predicate "constm1_operand"
755 (and (match_code "const_int")
2ff0cbe5 756 (match_test "op == constm1_rtx")))
a7d56fb8 757
f7acbf4c
RS
758;; Match exactly eight.
759(define_predicate "const8_operand"
760 (and (match_code "const_int")
761 (match_test "INTVAL (op) == 8")))
762
a952487c
JJ
763;; Match exactly 128.
764(define_predicate "const128_operand"
765 (and (match_code "const_int")
766 (match_test "INTVAL (op) == 128")))
767
88b590c5
UB
768;; Match exactly 0x0FFFFFFFF in anddi as a zero-extension operation
769(define_predicate "const_32bit_mask"
770 (and (match_code "const_int")
771 (match_test "trunc_int_for_mode (INTVAL (op), DImode)
772 == (HOST_WIDE_INT) 0xffffffff")))
773
8fe75e43
RH
774;; Match 2, 4, or 8. Used for leal multiplicands.
775(define_predicate "const248_operand"
776 (match_code "const_int")
777{
778 HOST_WIDE_INT i = INTVAL (op);
779 return i == 2 || i == 4 || i == 8;
780})
781
d697acca
BS
782;; Match 1, 2, or 3. Used for lea shift amounts.
783(define_predicate "const123_operand"
784 (match_code "const_int")
785{
786 HOST_WIDE_INT i = INTVAL (op);
787 return i == 1 || i == 2 || i == 3;
788})
789
66b03f81
UB
790;; Match 2, 3, 6, or 7
791(define_predicate "const2367_operand"
cf73ee60
KY
792 (match_code "const_int")
793{
794 HOST_WIDE_INT i = INTVAL (op);
66b03f81 795 return i == 2 || i == 3 || i == 6 || i == 7;
cf73ee60
KY
796})
797
977e83a3
KY
798;; Match 1, 2, 4, or 8
799(define_predicate "const1248_operand"
800 (match_code "const_int")
801{
802 HOST_WIDE_INT i = INTVAL (op);
803 return i == 1 || i == 2 || i == 4 || i == 8;
804})
805
3b4c46d7
L
806;; Match 3, 5, or 9. Used for leal multiplicands.
807(define_predicate "const359_operand"
808 (match_code "const_int")
809{
810 HOST_WIDE_INT i = INTVAL (op);
811 return i == 3 || i == 5 || i == 9;
812})
813
de72ea02
IT
814;; Match 4 or 8 to 11. Used for embeded rounding.
815(define_predicate "const_4_or_8_to_11_operand"
816 (match_code "const_int")
817{
818 HOST_WIDE_INT i = INTVAL (op);
819 return i == 4 || (i >= 8 && i <= 11);
820})
821
822;; Match 4 or 8. Used for SAE.
823(define_predicate "const48_operand"
824 (match_code "const_int")
825{
826 HOST_WIDE_INT i = INTVAL (op);
827 return i == 4 || i == 8;
828})
829
ef719a44
RH
830;; Match 0 or 1.
831(define_predicate "const_0_to_1_operand"
832 (and (match_code "const_int")
77fa1d54
UB
833 (ior (match_test "op == const0_rtx")
834 (match_test "op == const1_rtx"))))
ef719a44 835
8fe75e43
RH
836;; Match 0 to 3.
837(define_predicate "const_0_to_3_operand"
838 (and (match_code "const_int")
8dde5924 839 (match_test "IN_RANGE (INTVAL (op), 0, 3)")))
8fe75e43 840
47490470
AI
841;; Match 0 to 4.
842(define_predicate "const_0_to_4_operand"
843 (and (match_code "const_int")
844 (match_test "IN_RANGE (INTVAL (op), 0, 4)")))
845
846;; Match 0 to 5.
847(define_predicate "const_0_to_5_operand"
848 (and (match_code "const_int")
849 (match_test "IN_RANGE (INTVAL (op), 0, 5)")))
850
8fe75e43
RH
851;; Match 0 to 7.
852(define_predicate "const_0_to_7_operand"
853 (and (match_code "const_int")
8dde5924 854 (match_test "IN_RANGE (INTVAL (op), 0, 7)")))
8fe75e43
RH
855
856;; Match 0 to 15.
857(define_predicate "const_0_to_15_operand"
858 (and (match_code "const_int")
8dde5924 859 (match_test "IN_RANGE (INTVAL (op), 0, 15)")))
8fe75e43 860
04e1d06b
MM
861;; Match 0 to 31.
862(define_predicate "const_0_to_31_operand"
863 (and (match_code "const_int")
864 (match_test "IN_RANGE (INTVAL (op), 0, 31)")))
865
7cacf53e
RH
866;; Match 0 to 63.
867(define_predicate "const_0_to_63_operand"
868 (and (match_code "const_int")
8dde5924 869 (match_test "IN_RANGE (INTVAL (op), 0, 63)")))
7cacf53e 870
8fe75e43
RH
871;; Match 0 to 255.
872(define_predicate "const_0_to_255_operand"
873 (and (match_code "const_int")
8dde5924 874 (match_test "IN_RANGE (INTVAL (op), 0, 255)")))
8fe75e43 875
ef719a44
RH
876;; Match (0 to 255) * 8
877(define_predicate "const_0_to_255_mul_8_operand"
878 (match_code "const_int")
879{
880 unsigned HOST_WIDE_INT val = INTVAL (op);
881 return val <= 255*8 && val % 8 == 0;
882})
883
19ed9d7b 884;; Return true if OP is CONST_INT >= 1 and <= 31 (a valid operand
ef719a44
RH
885;; for shift & compare patterns, as shifting by 0 does not change flags).
886(define_predicate "const_1_to_31_operand"
887 (and (match_code "const_int")
8dde5924 888 (match_test "IN_RANGE (INTVAL (op), 1, 31)")))
ef719a44 889
19ed9d7b 890;; Return true if OP is CONST_INT >= 1 and <= 63 (a valid operand
934f2a96
UB
891;; for 64bit shift & compare patterns, as shifting by 0 does not change flags).
892(define_predicate "const_1_to_63_operand"
893 (and (match_code "const_int")
894 (match_test "IN_RANGE (INTVAL (op), 1, 63)")))
895
ef719a44
RH
896;; Match 2 or 3.
897(define_predicate "const_2_to_3_operand"
898 (and (match_code "const_int")
8dde5924 899 (match_test "IN_RANGE (INTVAL (op), 2, 3)")))
ef719a44 900
95879c72
L
901;; Match 4 to 5.
902(define_predicate "const_4_to_5_operand"
903 (and (match_code "const_int")
904 (match_test "IN_RANGE (INTVAL (op), 4, 5)")))
905
ef719a44
RH
906;; Match 4 to 7.
907(define_predicate "const_4_to_7_operand"
908 (and (match_code "const_int")
8dde5924 909 (match_test "IN_RANGE (INTVAL (op), 4, 7)")))
ef719a44 910
95879c72
L
911;; Match 6 to 7.
912(define_predicate "const_6_to_7_operand"
913 (and (match_code "const_int")
914 (match_test "IN_RANGE (INTVAL (op), 6, 7)")))
915
c003c6d6
AI
916;; Match 8 to 9.
917(define_predicate "const_8_to_9_operand"
918 (and (match_code "const_int")
919 (match_test "IN_RANGE (INTVAL (op), 8, 9)")))
920
95879c72
L
921;; Match 8 to 11.
922(define_predicate "const_8_to_11_operand"
923 (and (match_code "const_int")
924 (match_test "IN_RANGE (INTVAL (op), 8, 11)")))
925
2e2206fa
AI
926;; Match 8 to 15.
927(define_predicate "const_8_to_15_operand"
928 (and (match_code "const_int")
929 (match_test "IN_RANGE (INTVAL (op), 8, 15)")))
930
c003c6d6
AI
931;; Match 10 to 11.
932(define_predicate "const_10_to_11_operand"
933 (and (match_code "const_int")
934 (match_test "IN_RANGE (INTVAL (op), 10, 11)")))
935
936;; Match 12 to 13.
937(define_predicate "const_12_to_13_operand"
938 (and (match_code "const_int")
939 (match_test "IN_RANGE (INTVAL (op), 12, 13)")))
940
95879c72
L
941;; Match 12 to 15.
942(define_predicate "const_12_to_15_operand"
943 (and (match_code "const_int")
944 (match_test "IN_RANGE (INTVAL (op), 12, 15)")))
945
c003c6d6
AI
946;; Match 14 to 15.
947(define_predicate "const_14_to_15_operand"
948 (and (match_code "const_int")
949 (match_test "IN_RANGE (INTVAL (op), 14, 15)")))
950
951;; Match 16 to 19.
952(define_predicate "const_16_to_19_operand"
953 (and (match_code "const_int")
954 (match_test "IN_RANGE (INTVAL (op), 16, 19)")))
955
2e2206fa
AI
956;; Match 16 to 31.
957(define_predicate "const_16_to_31_operand"
958 (and (match_code "const_int")
959 (match_test "IN_RANGE (INTVAL (op), 16, 31)")))
960
c003c6d6
AI
961;; Match 20 to 23.
962(define_predicate "const_20_to_23_operand"
963 (and (match_code "const_int")
964 (match_test "IN_RANGE (INTVAL (op), 20, 23)")))
965
966;; Match 24 to 27.
967(define_predicate "const_24_to_27_operand"
968 (and (match_code "const_int")
969 (match_test "IN_RANGE (INTVAL (op), 24, 27)")))
970
971;; Match 28 to 31.
972(define_predicate "const_28_to_31_operand"
973 (and (match_code "const_int")
974 (match_test "IN_RANGE (INTVAL (op), 28, 31)")))
975
8fe75e43
RH
976;; True if this is a constant appropriate for an increment or decrement.
977(define_predicate "incdec_operand"
978 (match_code "const_int")
979{
980 /* On Pentium4, the inc and dec operations causes extra dependency on flag
981 registers, since carry flag is not set. */
700ae70c 982 if (!TARGET_USE_INCDEC && !optimize_insn_for_size_p ())
19ed9d7b 983 return false;
8fe75e43
RH
984 return op == const1_rtx || op == constm1_rtx;
985})
986
93330ea1
RH
987;; True for registers, or 1 or -1. Used to optimize double-word shifts.
988(define_predicate "reg_or_pm1_operand"
989 (ior (match_operand 0 "register_operand")
990 (and (match_code "const_int")
77fa1d54
UB
991 (ior (match_test "op == const1_rtx")
992 (match_test "op == constm1_rtx")))))
93330ea1 993
8fe75e43
RH
994;; True if OP is acceptable as operand of DImode shift expander.
995(define_predicate "shiftdi_operand"
996 (if_then_else (match_test "TARGET_64BIT")
997 (match_operand 0 "nonimmediate_operand")
998 (match_operand 0 "register_operand")))
999
93330ea1
RH
1000(define_predicate "ashldi_input_operand"
1001 (if_then_else (match_test "TARGET_64BIT")
1002 (match_operand 0 "nonimmediate_operand")
1003 (match_operand 0 "reg_or_pm1_operand")))
1004
8fe75e43 1005;; Return true if OP is a vector load from the constant pool with just
0e40b5f2 1006;; the first element nonzero.
8fe75e43
RH
1007(define_predicate "zero_extended_scalar_load_operand"
1008 (match_code "mem")
1009{
1010 unsigned n_elts;
76ff5c24 1011 op = avoid_constant_pool_reference (op);
1a66936f 1012
76ff5c24 1013 if (GET_CODE (op) != CONST_VECTOR)
19ed9d7b 1014 return false;
1a66936f
UB
1015
1016 n_elts = CONST_VECTOR_NUNITS (op);
1017
8fe75e43
RH
1018 for (n_elts--; n_elts > 0; n_elts--)
1019 {
1020 rtx elt = CONST_VECTOR_ELT (op, n_elts);
1021 if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
19ed9d7b 1022 return false;
8fe75e43 1023 }
19ed9d7b 1024 return true;
8fe75e43
RH
1025})
1026
5656a184
EC
1027/* Return true if operand is a vector constant that is all ones. */
1028(define_predicate "vector_all_ones_operand"
30aa6349
RS
1029 (and (match_code "const_vector")
1030 (match_test "INTEGRAL_MODE_P (GET_MODE (op))")
1031 (match_test "op == CONSTM1_RTX (GET_MODE (op))")))
5656a184 1032
3f50525d
L
1033; Return true when OP is operand acceptable for vector memory operand.
1034; Only AVX can have misaligned memory operand.
1035(define_predicate "vector_memory_operand"
1036 (and (match_operand 0 "memory_operand")
1037 (ior (match_test "TARGET_AVX")
1038 (match_test "MEM_ALIGN (op) >= GET_MODE_ALIGNMENT (mode)"))))
1039
acf93f1e
L
1040; Return true when OP is register_operand or vector_memory_operand.
1041(define_predicate "vector_operand"
1042 (ior (match_operand 0 "register_operand")
1043 (match_operand 0 "vector_memory_operand")))
1044
42bace41
JJ
1045;; Return true when OP is either nonimmediate operand, or any
1046;; CONST_VECTOR.
1047(define_predicate "nonimmediate_or_const_vector_operand"
1048 (ior (match_operand 0 "nonimmediate_operand")
1049 (match_code "const_vector")))
1050
19ed9d7b 1051;; Return true when OP is nonimmediate or standard SSE constant.
5656a184 1052(define_predicate "nonimmediate_or_sse_const_operand"
55284a77
UB
1053 (ior (match_operand 0 "nonimmediate_operand")
1054 (match_test "standard_sse_constant_p (op, mode)")))
5656a184 1055
eb701deb
RH
1056;; Return true if OP is a register or a zero.
1057(define_predicate "reg_or_0_operand"
1058 (ior (match_operand 0 "register_operand")
1059 (match_operand 0 "const0_operand")))
1060
808d8de5
UB
1061; Return true when OP is a nonimmediate or zero.
1062(define_predicate "nonimm_or_0_operand"
1063 (ior (match_operand 0 "nonimmediate_operand")
1064 (match_operand 0 "const0_operand")))
1065
5c8617dc
UB
1066(define_predicate "norex_memory_operand"
1067 (and (match_operand 0 "memory_operand")
1068 (not (match_test "x86_extended_reg_mentioned_p (op)"))))
1069
65e95828
UB
1070;; Return true for RTX codes that force SImode address.
1071(define_predicate "SImode_address_operand"
1072 (match_code "subreg,zero_extend,and"))
1073
249be95c 1074;; Return true if op is a valid address for LEA, and does not contain
5da6a383
UB
1075;; a segment override. Defined as a special predicate to allow
1076;; mode-less const_int operands pass to address_operand.
66d6cbaa 1077(define_special_predicate "address_no_seg_operand"
0d9a5f8a 1078 (match_test "address_operand (op, VOIDmode)")
8fe75e43
RH
1079{
1080 struct ix86_address parts;
7637e42c
NS
1081 int ok;
1082
0d9a5f8a
UB
1083 if (!CONST_INT_P (op)
1084 && mode != VOIDmode
1085 && GET_MODE (op) != mode)
1086 return false;
1087
7637e42c
NS
1088 ok = ix86_decompose_address (op, &parts);
1089 gcc_assert (ok);
00402c94 1090 return parts.seg == ADDR_SPACE_GENERIC;
8fe75e43
RH
1091})
1092
e43451aa
JJ
1093;; Return true if op if a valid base register, displacement or
1094;; sum of base register and displacement for VSIB addressing.
1095(define_predicate "vsib_address_operand"
0d9a5f8a 1096 (match_test "address_operand (op, VOIDmode)")
e43451aa
JJ
1097{
1098 struct ix86_address parts;
1099 int ok;
1100 rtx disp;
1101
1102 ok = ix86_decompose_address (op, &parts);
1103 gcc_assert (ok);
00402c94 1104 if (parts.index || parts.seg != ADDR_SPACE_GENERIC)
e43451aa
JJ
1105 return false;
1106
1107 /* VSIB addressing doesn't support (%rip). */
f7bc421d 1108 if (parts.disp)
e43451aa 1109 {
f7bc421d
JJ
1110 disp = parts.disp;
1111 if (GET_CODE (disp) == CONST)
1112 {
1113 disp = XEXP (disp, 0);
1114 if (GET_CODE (disp) == PLUS)
1115 disp = XEXP (disp, 0);
1116 if (GET_CODE (disp) == UNSPEC)
1117 switch (XINT (disp, 1))
1118 {
1119 case UNSPEC_GOTPCREL:
1120 case UNSPEC_PCREL:
1121 case UNSPEC_GOTNTPOFF:
1122 return false;
1123 }
1124 }
1125 if (TARGET_64BIT
1126 && flag_pic
1127 && (GET_CODE (disp) == SYMBOL_REF
1128 || GET_CODE (disp) == LABEL_REF))
1129 return false;
e43451aa
JJ
1130 }
1131
1132 return true;
1133})
1134
1135(define_predicate "vsib_mem_operator"
1136 (match_code "mem"))
1137
19ed9d7b 1138;; Return true if the rtx is known to be at least 32 bits aligned.
8fe75e43
RH
1139(define_predicate "aligned_operand"
1140 (match_operand 0 "general_operand")
1141{
1142 struct ix86_address parts;
7637e42c 1143 int ok;
8fe75e43
RH
1144
1145 /* Registers and immediate operands are always "aligned". */
52f84254 1146 if (!MEM_P (op))
19ed9d7b 1147 return true;
8fe75e43 1148
d326eaf0
JH
1149 /* All patterns using aligned_operand on memory operands ends up
1150 in promoting memory operand to 64bit and thus causing memory mismatch. */
700ae70c 1151 if (TARGET_MEMORY_MISMATCH_STALL && !optimize_insn_for_size_p ())
19ed9d7b 1152 return false;
d326eaf0 1153
8fe75e43
RH
1154 /* Don't even try to do any aligned optimizations with volatiles. */
1155 if (MEM_VOLATILE_P (op))
19ed9d7b 1156 return false;
0cd0c6fb
JJ
1157
1158 if (MEM_ALIGN (op) >= 32)
19ed9d7b 1159 return true;
0cd0c6fb 1160
8fe75e43
RH
1161 op = XEXP (op, 0);
1162
1163 /* Pushes and pops are only valid on the stack pointer. */
1164 if (GET_CODE (op) == PRE_DEC
1165 || GET_CODE (op) == POST_INC)
19ed9d7b 1166 return true;
8fe75e43
RH
1167
1168 /* Decode the address. */
7637e42c
NS
1169 ok = ix86_decompose_address (op, &parts);
1170 gcc_assert (ok);
8fe75e43 1171
3a6d28d6 1172 if (parts.base && SUBREG_P (parts.base))
24911a50 1173 parts.base = SUBREG_REG (parts.base);
3a6d28d6 1174 if (parts.index && SUBREG_P (parts.index))
24911a50
UB
1175 parts.index = SUBREG_REG (parts.index);
1176
8fe75e43
RH
1177 /* Look for some component that isn't known to be aligned. */
1178 if (parts.index)
1179 {
1180 if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32)
19ed9d7b 1181 return false;
8fe75e43
RH
1182 }
1183 if (parts.base)
1184 {
1185 if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32)
19ed9d7b 1186 return false;
8fe75e43
RH
1187 }
1188 if (parts.disp)
1189 {
7656aee4 1190 if (!CONST_INT_P (parts.disp)
19ed9d7b
UB
1191 || (INTVAL (parts.disp) & 3))
1192 return false;
8fe75e43
RH
1193 }
1194
1195 /* Didn't find one -- this must be an aligned address. */
19ed9d7b 1196 return true;
8fe75e43
RH
1197})
1198
19ed9d7b 1199;; Return true if OP is memory operand with a displacement.
8fe75e43
RH
1200(define_predicate "memory_displacement_operand"
1201 (match_operand 0 "memory_operand")
1202{
1203 struct ix86_address parts;
7637e42c
NS
1204 int ok;
1205
1206 ok = ix86_decompose_address (XEXP (op, 0), &parts);
1207 gcc_assert (ok);
8fe75e43
RH
1208 return parts.disp != NULL_RTX;
1209})
1210
19ed9d7b 1211;; Return true if OP is memory operand with a displacement only.
1c287121
UB
1212(define_predicate "memory_displacement_only_operand"
1213 (match_operand 0 "memory_operand")
1214{
1215 struct ix86_address parts;
1216 int ok;
1217
a952487c 1218 if (TARGET_64BIT)
19ed9d7b 1219 return false;
a952487c 1220
1c287121
UB
1221 ok = ix86_decompose_address (XEXP (op, 0), &parts);
1222 gcc_assert (ok);
1223
1224 if (parts.base || parts.index)
19ed9d7b 1225 return false;
1c287121
UB
1226
1227 return parts.disp != NULL_RTX;
1228})
1229
19ed9d7b 1230;; Return true if OP is memory operand that cannot be represented
8fe75e43
RH
1231;; by the modRM array.
1232(define_predicate "long_memory_operand"
1233 (and (match_operand 0 "memory_operand")
cab54dfa 1234 (match_test "memory_address_length (op, false)")))
8fe75e43 1235
19ed9d7b 1236;; Return true if OP is a comparison operator that can be issued by fcmov.
8fe75e43
RH
1237(define_predicate "fcmov_comparison_operator"
1238 (match_operand 0 "comparison_operator")
1239{
ef4bddc2 1240 machine_mode inmode = GET_MODE (XEXP (op, 0));
8fe75e43
RH
1241 enum rtx_code code = GET_CODE (op);
1242
3f563e0b 1243 if (inmode == CCFPmode)
8fe75e43 1244 {
0948ccb2 1245 if (!ix86_trivial_fp_comparison_operator (op, mode))
19ed9d7b 1246 return false;
8fe75e43
RH
1247 code = ix86_fp_compare_code_to_integer (code);
1248 }
1249 /* i387 supports just limited amount of conditional codes. */
1250 switch (code)
1251 {
1252 case LTU: case GTU: case LEU: case GEU:
b5176e0a 1253 if (inmode == CCmode || inmode == CCFPmode || inmode == CCCmode)
19ed9d7b
UB
1254 return true;
1255 return false;
8fe75e43
RH
1256 case ORDERED: case UNORDERED:
1257 case EQ: case NE:
19ed9d7b 1258 return true;
8fe75e43 1259 default:
19ed9d7b 1260 return false;
8fe75e43
RH
1261 }
1262})
1263
19ed9d7b 1264;; Return true if OP is a comparison that can be used in the CMPSS/CMPPS insns.
8fe75e43
RH
1265;; The first set are supported directly; the second set can't be done with
1266;; full IEEE support, i.e. NaNs.
07c0852e
UB
1267
1268(define_predicate "sse_comparison_operator"
1269 (ior (match_code "eq,ne,lt,le,unordered,unge,ungt,ordered")
77fa1d54
UB
1270 (and (match_test "TARGET_AVX")
1271 (match_code "ge,gt,uneq,unle,unlt,ltgt"))))
95879c72 1272
04e1d06b
MM
1273(define_predicate "ix86_comparison_int_operator"
1274 (match_code "ne,eq,ge,gt,le,lt"))
1275
1276(define_predicate "ix86_comparison_uns_operator"
1277 (match_code "ne,eq,geu,gtu,leu,ltu"))
1278
33ee5810
UB
1279(define_predicate "bt_comparison_operator"
1280 (match_code "ne,eq"))
1281
19ed9d7b 1282;; Return true if OP is a valid comparison operator in valid mode.
8fe75e43
RH
1283(define_predicate "ix86_comparison_operator"
1284 (match_operand 0 "comparison_operator")
1285{
ef4bddc2 1286 machine_mode inmode = GET_MODE (XEXP (op, 0));
8fe75e43
RH
1287 enum rtx_code code = GET_CODE (op);
1288
3f563e0b 1289 if (inmode == CCFPmode)
0948ccb2
PB
1290 return ix86_trivial_fp_comparison_operator (op, mode);
1291
8fe75e43
RH
1292 switch (code)
1293 {
1294 case EQ: case NE:
fe944402
UB
1295 if (inmode == CCGZmode)
1296 return false;
19ed9d7b 1297 return true;
fe944402 1298 case GE: case LT:
8fe75e43 1299 if (inmode == CCmode || inmode == CCGCmode
fe944402 1300 || inmode == CCGOCmode || inmode == CCNOmode || inmode == CCGZmode)
19ed9d7b
UB
1301 return true;
1302 return false;
fe944402
UB
1303 case GEU: case LTU:
1304 if (inmode == CCGZmode)
1305 return true;
1306 /* FALLTHRU */
1307 case GTU: case LEU:
1308 if (inmode == CCmode || inmode == CCCmode || inmode == CCGZmode)
19ed9d7b
UB
1309 return true;
1310 return false;
d39d658d 1311 case ORDERED: case UNORDERED:
8fe75e43 1312 if (inmode == CCmode)
19ed9d7b
UB
1313 return true;
1314 return false;
8fe75e43
RH
1315 case GT: case LE:
1316 if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
19ed9d7b
UB
1317 return true;
1318 return false;
8fe75e43 1319 default:
19ed9d7b 1320 return false;
8fe75e43
RH
1321 }
1322})
1323
19ed9d7b
UB
1324;; Return true if OP is a valid comparison operator
1325;; testing carry flag to be set.
8fe75e43 1326(define_predicate "ix86_carry_flag_operator"
d39d658d 1327 (match_code "ltu,lt,unlt,gtu,gt,ungt,le,unle,ge,unge,ltgt,uneq")
8fe75e43 1328{
ef4bddc2 1329 machine_mode inmode = GET_MODE (XEXP (op, 0));
8fe75e43
RH
1330 enum rtx_code code = GET_CODE (op);
1331
3f563e0b 1332 if (inmode == CCFPmode)
8fe75e43 1333 {
0948ccb2 1334 if (!ix86_trivial_fp_comparison_operator (op, mode))
19ed9d7b 1335 return false;
8fe75e43
RH
1336 code = ix86_fp_compare_code_to_integer (code);
1337 }
d39d658d
RIL
1338 else if (inmode == CCCmode)
1339 return code == LTU || code == GTU;
8fe75e43 1340 else if (inmode != CCmode)
19ed9d7b 1341 return false;
8fe75e43
RH
1342
1343 return code == LTU;
1344})
1345
19ed9d7b 1346;; Return true if this comparison only requires testing one flag bit.
0948ccb2
PB
1347(define_predicate "ix86_trivial_fp_comparison_operator"
1348 (match_code "gt,ge,unlt,unle,uneq,ltgt,ordered,unordered"))
1349
19ed9d7b 1350;; Return true if we know how to do this comparison. Others require
0948ccb2
PB
1351;; testing more than one flag bit, and we let the generic middle-end
1352;; code do that.
1353(define_predicate "ix86_fp_comparison_operator"
1354 (if_then_else (match_test "ix86_fp_comparison_strategy (GET_CODE (op))
1355 == IX86_FPCMP_ARITH")
1356 (match_operand 0 "comparison_operator")
1357 (match_operand 0 "ix86_trivial_fp_comparison_operator")))
1358
8fe75e43
RH
1359;; Nearly general operand, but accept any const_double, since we wish
1360;; to be able to drop them into memory rather than have them get pulled
1361;; into registers.
1362(define_predicate "cmp_fp_expander_operand"
1363 (ior (match_code "const_double")
1364 (match_operand 0 "general_operand")))
1365
1366;; Return true if this is a valid binary floating-point operation.
1367(define_predicate "binary_fp_operator"
1368 (match_code "plus,minus,mult,div"))
1369
1370;; Return true if this is a multiply operation.
1371(define_predicate "mult_operator"
1372 (match_code "mult"))
1373
1374;; Return true if this is a division operation.
1375(define_predicate "div_operator"
1376 (match_code "div"))
1377
3f831b7d
JJ
1378;; Return true if this is a plus, minus, and, ior or xor operation.
1379(define_predicate "plusminuslogic_operator"
1380 (match_code "plus,minus,and,ior,xor"))
1381
8fe75e43
RH
1382;; Return true for ARITHMETIC_P.
1383(define_predicate "arith_or_logical_operator"
513618db
RH
1384 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax,compare,minus,div,
1385 mod,udiv,umod,ashift,rotate,ashiftrt,lshiftrt,rotatert"))
8fe75e43 1386
bab64f23
PB
1387;; Return true for COMMUTATIVE_P.
1388(define_predicate "commutative_operator"
1389 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax"))
1390
19ed9d7b 1391;; Return true if OP is a binary operator that can be promoted to wider mode.
8fe75e43 1392(define_predicate "promotable_binary_operator"
67266ebb 1393 (ior (match_code "plus,minus,and,ior,xor,ashift")
8fe75e43 1394 (and (match_code "mult")
a646aded 1395 (match_test "TARGET_TUNE_PROMOTE_HIMODE_IMUL"))))
8fe75e43 1396
25da5dc7
RH
1397(define_predicate "compare_operator"
1398 (match_code "compare"))
7cacf53e
RH
1399
1400(define_predicate "absneg_operator"
1401 (match_code "abs,neg"))
66e1ecfe 1402
f913cc2a
UB
1403;; Return true if OP is a memory operand, aligned to
1404;; less than its natural alignment.
66e1ecfe
L
1405(define_predicate "misaligned_operand"
1406 (and (match_code "mem")
f913cc2a 1407 (match_test "MEM_ALIGN (op) < GET_MODE_BITSIZE (mode)")))
95879c72 1408
19ed9d7b 1409;; Return true if OP is a vzeroall operation, known to be a PARALLEL.
95879c72
L
1410(define_predicate "vzeroall_operation"
1411 (match_code "parallel")
1412{
85b1d1bd 1413 unsigned i, nregs = TARGET_64BIT ? 16 : 8;
95879c72 1414
85b1d1bd 1415 if ((unsigned) XVECLEN (op, 0) != 1 + nregs)
19ed9d7b 1416 return false;
95879c72 1417
85b1d1bd
UB
1418 for (i = 0; i < nregs; i++)
1419 {
1420 rtx elt = XVECEXP (op, 0, i+1);
1421
1422 if (GET_CODE (elt) != SET
1423 || GET_CODE (SET_DEST (elt)) != REG
1424 || GET_MODE (SET_DEST (elt)) != V8SImode
02469d3a 1425 || REGNO (SET_DEST (elt)) != GET_SSE_REGNO (i)
85b1d1bd 1426 || SET_SRC (elt) != CONST0_RTX (V8SImode))
19ed9d7b 1427 return false;
85b1d1bd 1428 }
19ed9d7b 1429 return true;
85b1d1bd
UB
1430})
1431
b38ab29f
UB
1432;; return true if OP is a vzeroall pattern.
1433(define_predicate "vzeroall_pattern"
1434 (and (match_code "parallel")
1435 (match_code "unspec_volatile" "a")
1436 (match_test "XINT (XVECEXP (op, 0, 0), 1) == UNSPECV_VZEROALL")))
1437
1438;; return true if OP is a vzeroupper pattern.
1439(define_predicate "vzeroupper_pattern"
c501a38e
UB
1440 (and (match_code "unspec_volatile")
1441 (match_test "XINT (op, 1) == UNSPECV_VZEROUPPER")))
ff97910d 1442
7121e32b
UB
1443;; Return true if OP is an addsub vec_merge operation
1444(define_predicate "addsub_vm_operator"
1445 (match_code "vec_merge")
1446{
1447 rtx op0, op1;
1448 int swapped;
1449 HOST_WIDE_INT mask;
1450 int nunits, elt;
1451
1452 op0 = XEXP (op, 0);
1453 op1 = XEXP (op, 1);
1454
1455 /* Sanity check. */
1456 if (GET_CODE (op0) == MINUS && GET_CODE (op1) == PLUS)
1457 swapped = 0;
1458 else if (GET_CODE (op0) == PLUS && GET_CODE (op1) == MINUS)
1459 swapped = 1;
1460 else
1461 gcc_unreachable ();
1462
1463 mask = INTVAL (XEXP (op, 2));
1464 nunits = GET_MODE_NUNITS (mode);
1465
1466 for (elt = 0; elt < nunits; elt++)
1467 {
1468 /* bit clear: take from op0, set: take from op1 */
1469 int bit = !(mask & (HOST_WIDE_INT_1U << elt));
1470
1471 if (bit != ((elt & 1) ^ swapped))
1472 return false;
1473 }
1474
1475 return true;
1476})
1477
1478;; Return true if OP is an addsub vec_select/vec_concat operation
1479(define_predicate "addsub_vs_operator"
1480 (and (match_code "vec_select")
1481 (match_code "vec_concat" "0"))
1482{
1483 rtx op0, op1;
1484 bool swapped;
1485 int nunits, elt;
1486
1487 op0 = XEXP (XEXP (op, 0), 0);
1488 op1 = XEXP (XEXP (op, 0), 1);
1489
1490 /* Sanity check. */
1491 if (GET_CODE (op0) == MINUS && GET_CODE (op1) == PLUS)
1492 swapped = false;
1493 else if (GET_CODE (op0) == PLUS && GET_CODE (op1) == MINUS)
1494 swapped = true;
1495 else
1496 gcc_unreachable ();
1497
1498 nunits = GET_MODE_NUNITS (mode);
1499 if (XVECLEN (XEXP (op, 1), 0) != nunits)
1500 return false;
1501
1502 /* We already checked that permutation is suitable for addsub,
1503 so only look at the first element of the parallel. */
1504 elt = INTVAL (XVECEXP (XEXP (op, 1), 0, 0));
5e04b3b6 1505
7121e32b
UB
1506 return elt == (swapped ? nunits : 0);
1507})
1508
1509;; Return true if OP is a parallel for an addsub vec_select.
1510(define_predicate "addsub_vs_parallel"
1511 (and (match_code "parallel")
1512 (match_code "const_int" "a"))
1513{
1514 int nelt = XVECLEN (op, 0);
1515 int elt, i;
1516
1517 if (nelt < 2)
1518 return false;
1519
1520 /* Check that the permutation is suitable for addsub.
1521 For example, { 0 9 2 11 4 13 6 15 } or { 8 1 10 3 12 5 14 7 }. */
1522 elt = INTVAL (XVECEXP (op, 0, 0));
1523 if (elt == 0)
1524 {
1525 for (i = 1; i < nelt; ++i)
1526 if (INTVAL (XVECEXP (op, 0, i)) != (i + (i & 1) * nelt))
1527 return false;
1528 }
1529 else if (elt == nelt)
1530 {
1531 for (i = 1; i < nelt; ++i)
1532 if (INTVAL (XVECEXP (op, 0, i)) != (elt + i - (i & 1) * nelt))
1533 return false;
1534 }
1535 else
1536 return false;
1537
1538 return true;
1539})
1540
1541;; Return true if OP is a parallel for a vbroadcast permute.
5e04b3b6
RH
1542(define_predicate "avx_vbroadcast_operand"
1543 (and (match_code "parallel")
1544 (match_code "const_int" "a"))
1545{
1546 rtx elt = XVECEXP (op, 0, 0);
1547 int i, nelt = XVECLEN (op, 0);
1548
1549 /* Don't bother checking there are the right number of operands,
1550 merely that they're all identical. */
1551 for (i = 1; i < nelt; ++i)
1552 if (XVECEXP (op, 0, i) != elt)
1553 return false;
1554 return true;
1555})
96d86115 1556
edbb0749
ES
1557;; Return true if OP is a parallel for a palignr permute.
1558(define_predicate "palignr_operand"
1559 (and (match_code "parallel")
1560 (match_code "const_int" "a"))
1561{
1562 int elt = INTVAL (XVECEXP (op, 0, 0));
1563 int i, nelt = XVECLEN (op, 0);
1564
1565 /* Check that an order in the permutation is suitable for palignr.
1566 For example, {5 6 7 0 1 2 3 4} is "palignr 5, xmm, xmm". */
1567 for (i = 1; i < nelt; ++i)
1568 if (INTVAL (XVECEXP (op, 0, i)) != ((elt + i) % nelt))
1569 return false;
1570 return true;
1571})
1572
96d86115
RH
1573;; Return true if OP is a proper third operand to vpblendw256.
1574(define_predicate "avx2_pblendw_operand"
1575 (match_code "const_int")
1576{
1577 HOST_WIDE_INT val = INTVAL (op);
1578 HOST_WIDE_INT low = val & 0xff;
524857ec 1579 return val == ((low << 8) | low);
96d86115 1580})
baee1763 1581
acf93f1e 1582;; Return true if OP is vector_operand or CONST_VECTOR.
baee1763 1583(define_predicate "general_vector_operand"
acf93f1e 1584 (ior (match_operand 0 "vector_operand")
baee1763 1585 (match_code "const_vector")))
0fe65b75
AI
1586
1587;; Return true if OP is either -1 constant or stored in register.
1588(define_predicate "register_or_constm1_operand"
1589 (ior (match_operand 0 "register_operand")
1590 (and (match_code "const_int")
1591 (match_test "op == constm1_rtx"))))
d6d4d770
DS
1592
1593;; Return true if the vector ends with between 12 and 18 register saves using
1594;; RAX as the base address.
1595(define_predicate "save_multiple"
1596 (match_code "parallel")
1597{
1598 const unsigned len = XVECLEN (op, 0);
1599 unsigned i;
1600
1601 /* Starting from end of vector, count register saves. */
1602 for (i = 0; i < len; ++i)
1603 {
1604 rtx src, dest, addr;
1605 rtx e = XVECEXP (op, 0, len - 1 - i);
1606
1607 if (GET_CODE (e) != SET)
1608 break;
1609
1610 src = SET_SRC (e);
1611 dest = SET_DEST (e);
1612
1613 if (!REG_P (src) || !MEM_P (dest))
1614 break;
1615
1616 addr = XEXP (dest, 0);
1617
1618 /* Good if dest address is in RAX. */
1619 if (REG_P (addr) && REGNO (addr) == AX_REG)
1620 continue;
1621
1622 /* Good if dest address is offset of RAX. */
1623 if (GET_CODE (addr) == PLUS
1624 && REG_P (XEXP (addr, 0))
1625 && REGNO (XEXP (addr, 0)) == AX_REG)
1626 continue;
1627
1628 break;
1629 }
1630 return (i >= 12 && i <= 18);
1631})
1632
1633
1634;; Return true if the vector ends with between 12 and 18 register loads using
1635;; RSI as the base address.
1636(define_predicate "restore_multiple"
1637 (match_code "parallel")
1638{
1639 const unsigned len = XVECLEN (op, 0);
1640 unsigned i;
1641
1642 /* Starting from end of vector, count register restores. */
1643 for (i = 0; i < len; ++i)
1644 {
1645 rtx src, dest, addr;
1646 rtx e = XVECEXP (op, 0, len - 1 - i);
1647
1648 if (GET_CODE (e) != SET)
1649 break;
1650
1651 src = SET_SRC (e);
1652 dest = SET_DEST (e);
1653
1654 if (!MEM_P (src) || !REG_P (dest))
1655 break;
1656
1657 addr = XEXP (src, 0);
1658
1659 /* Good if src address is in RSI. */
1660 if (REG_P (addr) && REGNO (addr) == SI_REG)
1661 continue;
1662
1663 /* Good if src address is offset of RSI. */
1664 if (GET_CODE (addr) == PLUS
1665 && REG_P (XEXP (addr, 0))
1666 && REGNO (XEXP (addr, 0)) == SI_REG)
1667 continue;
1668
1669 break;
1670 }
1671 return (i >= 12 && i <= 18);
1672})