]>
Commit | Line | Data |
---|---|---|
c629a39a | 1 | ;; Predicate definitions for IA-32 and x86-64. |
f1717362 | 2 | ;; Copyright (C) 2004-2016 Free Software Foundation, Inc. |
c629a39a | 3 | ;; |
4 | ;; This file is part of GCC. | |
5 | ;; | |
6 | ;; GCC is free software; you can redistribute it and/or modify | |
7 | ;; it under the terms of the GNU General Public License as published by | |
038d1e19 | 8 | ;; the Free Software Foundation; either version 3, or (at your option) |
c629a39a | 9 | ;; any later version. |
10 | ;; | |
11 | ;; GCC is distributed in the hope that it will be useful, | |
12 | ;; but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | ;; GNU General Public License for more details. | |
15 | ;; | |
16 | ;; You should have received a copy of the GNU General Public License | |
038d1e19 | 17 | ;; along with GCC; see the file COPYING3. If not see |
18 | ;; <http://www.gnu.org/licenses/>. | |
c629a39a | 19 | |
9ad50766 | 20 | ;; Return true if OP is either a i387 or SSE fp register. |
c629a39a | 21 | (define_predicate "any_fp_register_operand" |
22 | (and (match_code "reg") | |
23 | (match_test "ANY_FP_REGNO_P (REGNO (op))"))) | |
24 | ||
9ad50766 | 25 | ;; Return true if OP is an i387 fp register. |
c629a39a | 26 | (define_predicate "fp_register_operand" |
27 | (and (match_code "reg") | |
c2dd480c | 28 | (match_test "STACK_REGNO_P (REGNO (op))"))) |
c629a39a | 29 | |
9ad50766 | 30 | ;; Return true if OP is a non-fp register_operand. |
c629a39a | 31 | (define_predicate "register_and_not_any_fp_reg_operand" |
32 | (and (match_code "reg") | |
33 | (not (match_test "ANY_FP_REGNO_P (REGNO (op))")))) | |
34 | ||
28dccb9a | 35 | ;; True if the operand is a GENERAL class register. |
36 | (define_predicate "general_reg_operand" | |
37 | (and (match_code "reg") | |
b75bf564 | 38 | (match_test "GENERAL_REGNO_P (REGNO (op))"))) |
28dccb9a | 39 | |
651c1bd1 | 40 | ;; True if the operand is a nonimmediate operand with GENERAL class register. |
41 | (define_predicate "nonimmediate_gr_operand" | |
42 | (if_then_else (match_code "reg") | |
43 | (match_test "GENERAL_REGNO_P (REGNO (op))") | |
44 | (match_operand 0 "nonimmediate_operand"))) | |
45 | ||
9ad50766 | 46 | ;; Return true if OP is a register operand other than an i387 fp register. |
c629a39a | 47 | (define_predicate "register_and_not_fp_reg_operand" |
48 | (and (match_code "reg") | |
c2dd480c | 49 | (not (match_test "STACK_REGNO_P (REGNO (op))")))) |
c629a39a | 50 | |
51 | ;; True if the operand is an MMX register. | |
52 | (define_predicate "mmx_reg_operand" | |
53 | (and (match_code "reg") | |
54 | (match_test "MMX_REGNO_P (REGNO (op))"))) | |
8608a07f | 55 | |
56 | ;; True if the operand is an SSE register. | |
57 | (define_predicate "sse_reg_operand" | |
58 | (and (match_code "reg") | |
59 | (match_test "SSE_REGNO_P (REGNO (op))"))) | |
c629a39a | 60 | |
acd30502 | 61 | ;; True if the operand is an AVX-512 new register. |
62 | (define_predicate "ext_sse_reg_operand" | |
63 | (and (match_code "reg") | |
64 | (match_test "EXT_REX_SSE_REGNO_P (REGNO (op))"))) | |
65 | ||
28dccb9a | 66 | ;; True if the operand is an AVX-512 mask register. |
67 | (define_predicate "mask_reg_operand" | |
68 | (and (match_code "reg") | |
69 | (match_test "MASK_REGNO_P (REGNO (op))"))) | |
acd30502 | 70 | |
f15065c9 | 71 | ;; Return true if op is a QImode register. |
72 | (define_predicate "any_QIreg_operand" | |
73 | (and (match_code "reg") | |
74 | (match_test "ANY_QI_REGNO_P (REGNO (op))"))) | |
c629a39a | 75 | |
f15065c9 | 76 | ;; Return true if op is one of QImode registers: %[abcd][hl]. |
77 | (define_predicate "QIreg_operand" | |
78 | (and (match_code "reg") | |
79 | (match_test "QI_REGNO_P (REGNO (op))"))) | |
80 | ||
81 | ;; Return true if op is a QImode register operand other than %[abcd][hl]. | |
82 | (define_predicate "ext_QIreg_operand" | |
83 | (and (match_test "TARGET_64BIT") | |
84 | (match_code "reg") | |
85 | (not (match_test "QI_REGNO_P (REGNO (op))")))) | |
86 | ||
87 | ;; Return true if op is the AX register. | |
88 | (define_predicate "ax_reg_operand" | |
89 | (and (match_code "reg") | |
90 | (match_test "REGNO (op) == AX_REG"))) | |
91 | ||
92 | ;; Return true if op is the flags register. | |
93 | (define_predicate "flags_reg_operand" | |
94 | (and (match_code "reg") | |
95 | (match_test "REGNO (op) == FLAGS_REG"))) | |
3d1eef54 | 96 | |
c629a39a | 97 | ;; Match an SI or HImode register for a zero_extract. |
98 | (define_special_predicate "ext_register_operand" | |
99 | (match_operand 0 "register_operand") | |
100 | { | |
101 | if ((!TARGET_64BIT || GET_MODE (op) != DImode) | |
102 | && GET_MODE (op) != SImode && GET_MODE (op) != HImode) | |
9ad50766 | 103 | return false; |
e15c0942 | 104 | if (SUBREG_P (op)) |
c629a39a | 105 | op = SUBREG_REG (op); |
106 | ||
107 | /* Be careful to accept only registers having upper parts. */ | |
fc3d5f88 | 108 | return (REG_P (op) |
f15065c9 | 109 | && (REGNO (op) > LAST_VIRTUAL_REGISTER || QI_REGNO_P (REGNO (op)))); |
c629a39a | 110 | }) |
111 | ||
ee9e7384 | 112 | ;; Match nonimmediate operands, but exclude memory operands on 64bit targets. |
113 | (define_predicate "nonimmediate_x64nomem_operand" | |
114 | (if_then_else (match_test "TARGET_64BIT") | |
115 | (match_operand 0 "register_operand") | |
116 | (match_operand 0 "nonimmediate_operand"))) | |
117 | ||
118 | ;; Match general operands, but exclude memory operands on 64bit targets. | |
119 | (define_predicate "general_x64nomem_operand" | |
120 | (if_then_else (match_test "TARGET_64BIT") | |
121 | (match_operand 0 "nonmemory_operand") | |
122 | (match_operand 0 "general_operand"))) | |
123 | ||
9faeb927 | 124 | ;; Match register operands, include memory operand for TARGET_MIX_SSE_I387. |
125 | (define_predicate "register_mixssei387nonimm_operand" | |
126 | (if_then_else (match_test "TARGET_MIX_SSE_I387") | |
127 | (match_operand 0 "nonimmediate_operand") | |
128 | (match_operand 0 "register_operand"))) | |
129 | ||
7d9c40e2 | 130 | ;; Match register operands, include memory operand for TARGET_SSE4_1. |
131 | (define_predicate "register_sse4nonimm_operand" | |
132 | (if_then_else (match_test "TARGET_SSE4_1") | |
133 | (match_operand 0 "nonimmediate_operand") | |
134 | (match_operand 0 "register_operand"))) | |
135 | ||
058a1b7a | 136 | ;; Return true if VALUE is symbol reference |
137 | (define_predicate "symbol_operand" | |
138 | (match_code "symbol_ref")) | |
139 | ||
9ad50766 | 140 | ;; Return true if VALUE can be stored in a sign extended immediate field. |
c629a39a | 141 | (define_predicate "x86_64_immediate_operand" |
142 | (match_code "const_int,symbol_ref,label_ref,const") | |
143 | { | |
144 | if (!TARGET_64BIT) | |
145 | return immediate_operand (op, mode); | |
146 | ||
147 | switch (GET_CODE (op)) | |
148 | { | |
149 | case CONST_INT: | |
23e9258a | 150 | { |
151 | HOST_WIDE_INT val = trunc_int_for_mode (INTVAL (op), DImode); | |
152 | return trunc_int_for_mode (val, SImode) == val; | |
153 | } | |
c629a39a | 154 | case SYMBOL_REF: |
155 | /* For certain code models, the symbolic references are known to fit. | |
156 | in CM_SMALL_PIC model we know it fits if it is local to the shared | |
157 | library. Don't count TLS SYMBOL_REFs here, since they should fit | |
158 | only if inside of UNSPEC handled below. */ | |
159 | /* TLS symbols are not constant. */ | |
60d65bce | 160 | if (SYMBOL_REF_TLS_MODEL (op)) |
c629a39a | 161 | return false; |
43e4a084 | 162 | return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL |
163 | || (ix86_cmodel == CM_MEDIUM && !SYMBOL_REF_FAR_ADDR_P (op))); | |
c629a39a | 164 | |
165 | case LABEL_REF: | |
166 | /* For certain code models, the code is near as well. */ | |
167 | return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM | |
168 | || ix86_cmodel == CM_KERNEL); | |
169 | ||
170 | case CONST: | |
171 | /* We also may accept the offsetted memory references in certain | |
172 | special cases. */ | |
173 | if (GET_CODE (XEXP (op, 0)) == UNSPEC) | |
174 | switch (XINT (XEXP (op, 0), 1)) | |
175 | { | |
176 | case UNSPEC_GOTPCREL: | |
177 | case UNSPEC_DTPOFF: | |
178 | case UNSPEC_GOTNTPOFF: | |
179 | case UNSPEC_NTPOFF: | |
9ad50766 | 180 | return true; |
c629a39a | 181 | default: |
182 | break; | |
183 | } | |
184 | ||
185 | if (GET_CODE (XEXP (op, 0)) == PLUS) | |
186 | { | |
187 | rtx op1 = XEXP (XEXP (op, 0), 0); | |
188 | rtx op2 = XEXP (XEXP (op, 0), 1); | |
189 | HOST_WIDE_INT offset; | |
190 | ||
191 | if (ix86_cmodel == CM_LARGE) | |
9ad50766 | 192 | return false; |
24deb0af | 193 | if (!CONST_INT_P (op2)) |
9ad50766 | 194 | return false; |
c629a39a | 195 | offset = trunc_int_for_mode (INTVAL (op2), DImode); |
196 | switch (GET_CODE (op1)) | |
197 | { | |
198 | case SYMBOL_REF: | |
60d65bce | 199 | /* TLS symbols are not constant. */ |
200 | if (SYMBOL_REF_TLS_MODEL (op1)) | |
9ad50766 | 201 | return false; |
c629a39a | 202 | /* For CM_SMALL assume that latest object is 16MB before |
203 | end of 31bits boundary. We may also accept pretty | |
204 | large negative constants knowing that all objects are | |
205 | in the positive half of address space. */ | |
43e4a084 | 206 | if ((ix86_cmodel == CM_SMALL |
207 | || (ix86_cmodel == CM_MEDIUM | |
208 | && !SYMBOL_REF_FAR_ADDR_P (op1))) | |
c629a39a | 209 | && offset < 16*1024*1024 |
210 | && trunc_int_for_mode (offset, SImode) == offset) | |
9ad50766 | 211 | return true; |
c629a39a | 212 | /* For CM_KERNEL we know that all object resist in the |
213 | negative half of 32bits address space. We may not | |
214 | accept negative offsets, since they may be just off | |
215 | and we may accept pretty large positive ones. */ | |
216 | if (ix86_cmodel == CM_KERNEL | |
217 | && offset > 0 | |
218 | && trunc_int_for_mode (offset, SImode) == offset) | |
9ad50766 | 219 | return true; |
c629a39a | 220 | break; |
221 | ||
222 | case LABEL_REF: | |
223 | /* These conditions are similar to SYMBOL_REF ones, just the | |
224 | constraints for code models differ. */ | |
225 | if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM) | |
226 | && offset < 16*1024*1024 | |
227 | && trunc_int_for_mode (offset, SImode) == offset) | |
9ad50766 | 228 | return true; |
c629a39a | 229 | if (ix86_cmodel == CM_KERNEL |
230 | && offset > 0 | |
231 | && trunc_int_for_mode (offset, SImode) == offset) | |
9ad50766 | 232 | return true; |
c629a39a | 233 | break; |
234 | ||
235 | case UNSPEC: | |
236 | switch (XINT (op1, 1)) | |
237 | { | |
238 | case UNSPEC_DTPOFF: | |
239 | case UNSPEC_NTPOFF: | |
66219707 | 240 | if (trunc_int_for_mode (offset, SImode) == offset) |
9ad50766 | 241 | return true; |
c629a39a | 242 | } |
243 | break; | |
244 | ||
245 | default: | |
246 | break; | |
247 | } | |
248 | } | |
249 | break; | |
250 | ||
251 | default: | |
8c3c4cd9 | 252 | gcc_unreachable (); |
c629a39a | 253 | } |
254 | ||
9ad50766 | 255 | return false; |
c629a39a | 256 | }) |
257 | ||
9ad50766 | 258 | ;; Return true if VALUE can be stored in the zero extended immediate field. |
c629a39a | 259 | (define_predicate "x86_64_zext_immediate_operand" |
23e9258a | 260 | (match_code "const_int,symbol_ref,label_ref,const") |
c629a39a | 261 | { |
262 | switch (GET_CODE (op)) | |
263 | { | |
c629a39a | 264 | case CONST_INT: |
23e9258a | 265 | return !(INTVAL (op) & ~(HOST_WIDE_INT) 0xffffffff); |
c629a39a | 266 | |
267 | case SYMBOL_REF: | |
268 | /* For certain code models, the symbolic references are known to fit. */ | |
269 | /* TLS symbols are not constant. */ | |
60d65bce | 270 | if (SYMBOL_REF_TLS_MODEL (op)) |
c629a39a | 271 | return false; |
43e4a084 | 272 | return (ix86_cmodel == CM_SMALL |
273 | || (ix86_cmodel == CM_MEDIUM | |
274 | && !SYMBOL_REF_FAR_ADDR_P (op))); | |
c629a39a | 275 | |
276 | case LABEL_REF: | |
277 | /* For certain code models, the code is near as well. */ | |
278 | return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM; | |
279 | ||
280 | case CONST: | |
281 | /* We also may accept the offsetted memory references in certain | |
282 | special cases. */ | |
283 | if (GET_CODE (XEXP (op, 0)) == PLUS) | |
284 | { | |
285 | rtx op1 = XEXP (XEXP (op, 0), 0); | |
286 | rtx op2 = XEXP (XEXP (op, 0), 1); | |
287 | ||
288 | if (ix86_cmodel == CM_LARGE) | |
9ad50766 | 289 | return false; |
c629a39a | 290 | switch (GET_CODE (op1)) |
291 | { | |
292 | case SYMBOL_REF: | |
60d65bce | 293 | /* TLS symbols are not constant. */ |
294 | if (SYMBOL_REF_TLS_MODEL (op1)) | |
9ad50766 | 295 | return false; |
c629a39a | 296 | /* For small code model we may accept pretty large positive |
297 | offsets, since one bit is available for free. Negative | |
298 | offsets are limited by the size of NULL pointer area | |
299 | specified by the ABI. */ | |
43e4a084 | 300 | if ((ix86_cmodel == CM_SMALL |
301 | || (ix86_cmodel == CM_MEDIUM | |
302 | && !SYMBOL_REF_FAR_ADDR_P (op1))) | |
24deb0af | 303 | && CONST_INT_P (op2) |
c629a39a | 304 | && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000 |
305 | && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2)) | |
9ad50766 | 306 | return true; |
c629a39a | 307 | /* ??? For the kernel, we may accept adjustment of |
308 | -0x10000000, since we know that it will just convert | |
309 | negative address space to positive, but perhaps this | |
310 | is not worthwhile. */ | |
311 | break; | |
312 | ||
313 | case LABEL_REF: | |
314 | /* These conditions are similar to SYMBOL_REF ones, just the | |
315 | constraints for code models differ. */ | |
316 | if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM) | |
24deb0af | 317 | && CONST_INT_P (op2) |
c629a39a | 318 | && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000 |
319 | && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2)) | |
9ad50766 | 320 | return true; |
c629a39a | 321 | break; |
322 | ||
323 | default: | |
9ad50766 | 324 | return false; |
c629a39a | 325 | } |
326 | } | |
327 | break; | |
328 | ||
329 | default: | |
8c3c4cd9 | 330 | gcc_unreachable (); |
c629a39a | 331 | } |
9ad50766 | 332 | return false; |
c629a39a | 333 | }) |
334 | ||
058a1b7a | 335 | ;; Return true if size of VALUE can be stored in a sign |
336 | ;; extended immediate field. | |
337 | (define_predicate "x86_64_immediate_size_operand" | |
338 | (and (match_code "symbol_ref") | |
339 | (ior (not (match_test "TARGET_64BIT")) | |
340 | (match_test "ix86_cmodel == CM_SMALL") | |
341 | (match_test "ix86_cmodel == CM_KERNEL")))) | |
342 | ||
9ad50766 | 343 | ;; Return true if OP is general operand representable on x86_64. |
c629a39a | 344 | (define_predicate "x86_64_general_operand" |
345 | (if_then_else (match_test "TARGET_64BIT") | |
346 | (ior (match_operand 0 "nonimmediate_operand") | |
347 | (match_operand 0 "x86_64_immediate_operand")) | |
348 | (match_operand 0 "general_operand"))) | |
349 | ||
d1eb2d97 | 350 | ;; Return true if OP is non-VOIDmode general operand representable |
351 | ;; on x86_64. This predicate is used in sign-extending conversion | |
352 | ;; operations that require non-VOIDmode immediate operands. | |
353 | (define_predicate "x86_64_sext_operand" | |
354 | (and (match_test "GET_MODE (op) != VOIDmode") | |
355 | (match_operand 0 "x86_64_general_operand"))) | |
356 | ||
357 | ;; Return true if OP is non-VOIDmode general operand. This predicate | |
358 | ;; is used in sign-extending conversion operations that require | |
359 | ;; non-VOIDmode immediate operands. | |
360 | (define_predicate "sext_operand" | |
361 | (and (match_test "GET_MODE (op) != VOIDmode") | |
362 | (match_operand 0 "general_operand"))) | |
363 | ||
c7222732 | 364 | ;; Return true if OP is representable on x86_64 as zero-extended operand. |
365 | ;; This predicate is used in zero-extending conversion operations that | |
366 | ;; require non-VOIDmode immediate operands. | |
367 | (define_predicate "x86_64_zext_operand" | |
79c51b65 | 368 | (if_then_else (match_test "TARGET_64BIT") |
369 | (ior (match_operand 0 "nonimmediate_operand") | |
259ad1be | 370 | (and (match_operand 0 "x86_64_zext_immediate_operand") |
371 | (match_test "GET_MODE (op) != VOIDmode"))) | |
c7222732 | 372 | (match_operand 0 "nonimmediate_operand"))) |
79c51b65 | 373 | |
9ad50766 | 374 | ;; Return true if OP is general operand representable on x86_64 |
c629a39a | 375 | ;; as either sign extended or zero extended constant. |
376 | (define_predicate "x86_64_szext_general_operand" | |
377 | (if_then_else (match_test "TARGET_64BIT") | |
378 | (ior (match_operand 0 "nonimmediate_operand") | |
d7d572ef | 379 | (match_operand 0 "x86_64_immediate_operand") |
380 | (match_operand 0 "x86_64_zext_immediate_operand")) | |
c629a39a | 381 | (match_operand 0 "general_operand"))) |
382 | ||
9ad50766 | 383 | ;; Return true if OP is nonmemory operand representable on x86_64. |
c629a39a | 384 | (define_predicate "x86_64_nonmemory_operand" |
385 | (if_then_else (match_test "TARGET_64BIT") | |
386 | (ior (match_operand 0 "register_operand") | |
387 | (match_operand 0 "x86_64_immediate_operand")) | |
388 | (match_operand 0 "nonmemory_operand"))) | |
389 | ||
9ad50766 | 390 | ;; Return true if OP is nonmemory operand representable on x86_64. |
c629a39a | 391 | (define_predicate "x86_64_szext_nonmemory_operand" |
392 | (if_then_else (match_test "TARGET_64BIT") | |
393 | (ior (match_operand 0 "register_operand") | |
d7d572ef | 394 | (match_operand 0 "x86_64_immediate_operand") |
395 | (match_operand 0 "x86_64_zext_immediate_operand")) | |
c629a39a | 396 | (match_operand 0 "nonmemory_operand"))) |
397 | ||
acf5e8bb | 398 | ;; Return true when operand is PIC expression that can be computed by lea |
399 | ;; operation. | |
9e08230d | 400 | (define_predicate "pic_32bit_operand" |
acf5e8bb | 401 | (match_code "const,symbol_ref,label_ref") |
402 | { | |
403 | if (!flag_pic) | |
9ad50766 | 404 | return false; |
9e08230d | 405 | |
acf5e8bb | 406 | /* Rule out relocations that translate into 64bit constants. */ |
407 | if (TARGET_64BIT && GET_CODE (op) == CONST) | |
408 | { | |
409 | op = XEXP (op, 0); | |
24deb0af | 410 | if (GET_CODE (op) == PLUS && CONST_INT_P (XEXP (op, 1))) |
acf5e8bb | 411 | op = XEXP (op, 0); |
412 | if (GET_CODE (op) == UNSPEC | |
413 | && (XINT (op, 1) == UNSPEC_GOTOFF | |
414 | || XINT (op, 1) == UNSPEC_GOT)) | |
9ad50766 | 415 | return false; |
acf5e8bb | 416 | } |
9e08230d | 417 | |
acf5e8bb | 418 | return symbolic_operand (op, mode); |
419 | }) | |
420 | ||
9ad50766 | 421 | ;; Return true if OP is nonmemory operand acceptable by movabs patterns. |
c629a39a | 422 | (define_predicate "x86_64_movabs_operand" |
d5979ebe | 423 | (and (match_operand 0 "nonmemory_operand") |
424 | (not (match_operand 0 "pic_32bit_operand")))) | |
c629a39a | 425 | |
9ad50766 | 426 | ;; Return true if OP is either a symbol reference or a sum of a symbol |
c629a39a | 427 | ;; reference and a constant. |
428 | (define_predicate "symbolic_operand" | |
429 | (match_code "symbol_ref,label_ref,const") | |
430 | { | |
431 | switch (GET_CODE (op)) | |
432 | { | |
433 | case SYMBOL_REF: | |
434 | case LABEL_REF: | |
9ad50766 | 435 | return true; |
c629a39a | 436 | |
437 | case CONST: | |
438 | op = XEXP (op, 0); | |
439 | if (GET_CODE (op) == SYMBOL_REF | |
440 | || GET_CODE (op) == LABEL_REF | |
441 | || (GET_CODE (op) == UNSPEC | |
442 | && (XINT (op, 1) == UNSPEC_GOT | |
443 | || XINT (op, 1) == UNSPEC_GOTOFF | |
c96c9817 | 444 | || XINT (op, 1) == UNSPEC_PCREL |
c629a39a | 445 | || XINT (op, 1) == UNSPEC_GOTPCREL))) |
9ad50766 | 446 | return true; |
c629a39a | 447 | if (GET_CODE (op) != PLUS |
24deb0af | 448 | || !CONST_INT_P (XEXP (op, 1))) |
9ad50766 | 449 | return false; |
c629a39a | 450 | |
451 | op = XEXP (op, 0); | |
452 | if (GET_CODE (op) == SYMBOL_REF | |
453 | || GET_CODE (op) == LABEL_REF) | |
9ad50766 | 454 | return true; |
c629a39a | 455 | /* Only @GOTOFF gets offsets. */ |
456 | if (GET_CODE (op) != UNSPEC | |
457 | || XINT (op, 1) != UNSPEC_GOTOFF) | |
9ad50766 | 458 | return false; |
c629a39a | 459 | |
460 | op = XVECEXP (op, 0, 0); | |
461 | if (GET_CODE (op) == SYMBOL_REF | |
462 | || GET_CODE (op) == LABEL_REF) | |
9ad50766 | 463 | return true; |
464 | return false; | |
c629a39a | 465 | |
466 | default: | |
8c3c4cd9 | 467 | gcc_unreachable (); |
c629a39a | 468 | } |
469 | }) | |
470 | ||
c629a39a | 471 | ;; Return true if OP is a symbolic operand that resolves locally. |
472 | (define_predicate "local_symbolic_operand" | |
473 | (match_code "const,label_ref,symbol_ref") | |
474 | { | |
475 | if (GET_CODE (op) == CONST | |
476 | && GET_CODE (XEXP (op, 0)) == PLUS | |
24deb0af | 477 | && CONST_INT_P (XEXP (XEXP (op, 0), 1))) |
c629a39a | 478 | op = XEXP (XEXP (op, 0), 0); |
479 | ||
480 | if (GET_CODE (op) == LABEL_REF) | |
9ad50766 | 481 | return true; |
c629a39a | 482 | |
483 | if (GET_CODE (op) != SYMBOL_REF) | |
9ad50766 | 484 | return false; |
c629a39a | 485 | |
9ad50766 | 486 | if (SYMBOL_REF_TLS_MODEL (op)) |
487 | return false; | |
8d3480ed | 488 | |
ed5a7307 | 489 | /* Dll-imported symbols are always external. */ |
490 | if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op)) | |
491 | return false; | |
c629a39a | 492 | if (SYMBOL_REF_LOCAL_P (op)) |
9ad50766 | 493 | return true; |
c629a39a | 494 | |
495 | /* There is, however, a not insubstantial body of code in the rest of | |
496 | the compiler that assumes it can just stick the results of | |
497 | ASM_GENERATE_INTERNAL_LABEL in a symbol_ref and have done. */ | |
498 | /* ??? This is a hack. Should update the body of the compiler to | |
499 | always create a DECL an invoke targetm.encode_section_info. */ | |
500 | if (strncmp (XSTR (op, 0), internal_label_prefix, | |
501 | internal_label_prefix_len) == 0) | |
9ad50766 | 502 | return true; |
c629a39a | 503 | |
9ad50766 | 504 | return false; |
c629a39a | 505 | }) |
506 | ||
f60a98e6 | 507 | ;; Test for a legitimate @GOTOFF operand. |
508 | ;; | |
509 | ;; VxWorks does not impose a fixed gap between segments; the run-time | |
510 | ;; gap can be different from the object-file gap. We therefore can't | |
511 | ;; use @GOTOFF unless we are absolutely sure that the symbol is in the | |
512 | ;; same segment as the GOT. Unfortunately, the flexibility of linker | |
513 | ;; scripts means that we can't be sure of that in general, so assume | |
514 | ;; that @GOTOFF is never valid on VxWorks. | |
515 | (define_predicate "gotoff_operand" | |
3c0bc3f2 | 516 | (and (not (match_test "TARGET_VXWORKS_RTP")) |
f60a98e6 | 517 | (match_operand 0 "local_symbolic_operand"))) |
518 | ||
c629a39a | 519 | ;; Test for various thread-local symbols. |
6a755243 | 520 | (define_special_predicate "tls_symbolic_operand" |
c629a39a | 521 | (and (match_code "symbol_ref") |
9ad50766 | 522 | (match_test "SYMBOL_REF_TLS_MODEL (op)"))) |
c629a39a | 523 | |
6a755243 | 524 | (define_special_predicate "tls_modbase_operand" |
4a55687c | 525 | (and (match_code "symbol_ref") |
526 | (match_test "op == ix86_tls_module_base ()"))) | |
527 | ||
c629a39a | 528 | ;; Test for a pc-relative call operand |
6281b3df | 529 | (define_predicate "constant_call_address_operand" |
dd1a226e | 530 | (match_code "symbol_ref") |
531 | { | |
532 | if (ix86_cmodel == CM_LARGE || ix86_cmodel == CM_LARGE_PIC) | |
533 | return false; | |
534 | if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op)) | |
535 | return false; | |
536 | return true; | |
537 | }) | |
c629a39a | 538 | |
b16350fd | 539 | ;; P6 processors will jump to the address after the decrement when %esp |
540 | ;; is used as a call operand, so they will execute return address as a code. | |
541 | ;; See Pentium Pro errata 70, Pentium 2 errata A33 and Pentium 3 errata E17. | |
542 | ||
543 | (define_predicate "call_register_no_elim_operand" | |
544 | (match_operand 0 "register_operand") | |
545 | { | |
e15c0942 | 546 | if (SUBREG_P (op)) |
b16350fd | 547 | op = SUBREG_REG (op); |
548 | ||
549 | if (!TARGET_64BIT && op == stack_pointer_rtx) | |
9ad50766 | 550 | return false; |
b16350fd | 551 | |
552 | return register_no_elim_operand (op, mode); | |
553 | }) | |
554 | ||
1f46c1d5 | 555 | ;; True for any non-virtual or eliminable register. Used in places where |
556 | ;; instantiation of such a register may cause the pattern to not be recognized. | |
557 | (define_predicate "register_no_elim_operand" | |
558 | (match_operand 0 "register_operand") | |
559 | { | |
e15c0942 | 560 | if (SUBREG_P (op)) |
1f46c1d5 | 561 | op = SUBREG_REG (op); |
562 | return !(op == arg_pointer_rtx | |
563 | || op == frame_pointer_rtx | |
564 | || IN_RANGE (REGNO (op), | |
565 | FIRST_PSEUDO_REGISTER, LAST_VIRTUAL_REGISTER)); | |
566 | }) | |
567 | ||
c629a39a | 568 | ;; Similarly, but include the stack pointer. This is used to prevent esp |
569 | ;; from being used as an index reg. | |
570 | (define_predicate "index_register_operand" | |
571 | (match_operand 0 "register_operand") | |
572 | { | |
e15c0942 | 573 | if (SUBREG_P (op)) |
c629a39a | 574 | op = SUBREG_REG (op); |
d3181ef9 | 575 | if (reload_completed) |
78c3d863 | 576 | return REG_OK_FOR_INDEX_STRICT_P (op); |
577 | else | |
578 | return REG_OK_FOR_INDEX_NONSTRICT_P (op); | |
c629a39a | 579 | }) |
580 | ||
581 | ;; Return false if this is any eliminable register. Otherwise general_operand. | |
582 | (define_predicate "general_no_elim_operand" | |
583 | (if_then_else (match_code "reg,subreg") | |
584 | (match_operand 0 "register_no_elim_operand") | |
585 | (match_operand 0 "general_operand"))) | |
586 | ||
587 | ;; Return false if this is any eliminable register. Otherwise | |
588 | ;; register_operand or a constant. | |
589 | (define_predicate "nonmemory_no_elim_operand" | |
590 | (ior (match_operand 0 "register_no_elim_operand") | |
591 | (match_operand 0 "immediate_operand"))) | |
592 | ||
9636738d | 593 | ;; Test for a valid operand for indirect branch. |
594 | (define_predicate "indirect_branch_operand" | |
dea4eea4 | 595 | (ior (match_operand 0 "register_operand") |
596 | (and (not (match_test "TARGET_X32")) | |
597 | (match_operand 0 "memory_operand")))) | |
9636738d | 598 | |
f15065c9 | 599 | ;; Return true if OP is a memory operands that can be used in sibcalls. |
afc3cb5e | 600 | ;; Since sibcall never returns, we can only use call-clobbered register |
601 | ;; as GOT base. Allow GOT slot here only with pseudo register as GOT | |
602 | ;; base. Properly handle sibcall over GOT slot with *sibcall_GOT_32 | |
603 | ;; and *sibcall_value_GOT_32 patterns. | |
f15065c9 | 604 | (define_predicate "sibcall_memory_operand" |
afc3cb5e | 605 | (match_operand 0 "memory_operand") |
606 | { | |
607 | op = XEXP (op, 0); | |
608 | if (CONSTANT_P (op)) | |
609 | return true; | |
610 | if (GET_CODE (op) == PLUS && REG_P (XEXP (op, 0))) | |
611 | { | |
612 | int regno = REGNO (XEXP (op, 0)); | |
613 | if (!HARD_REGISTER_NUM_P (regno) || call_used_regs[regno]) | |
614 | { | |
615 | op = XEXP (op, 1); | |
616 | if (GOT32_symbol_operand (op, VOIDmode)) | |
617 | return true; | |
618 | } | |
619 | } | |
620 | return false; | |
621 | }) | |
f15065c9 | 622 | |
e21f9587 | 623 | ;; Return true if OP is a GOT memory operand. |
624 | (define_predicate "GOT_memory_operand" | |
625 | (match_operand 0 "memory_operand") | |
626 | { | |
627 | op = XEXP (op, 0); | |
628 | return (GET_CODE (op) == CONST | |
629 | && GET_CODE (XEXP (op, 0)) == UNSPEC | |
630 | && XINT (XEXP (op, 0), 1) == UNSPEC_GOTPCREL); | |
631 | }) | |
632 | ||
c629a39a | 633 | ;; Test for a valid operand for a call instruction. |
c290746e | 634 | ;; Allow constant call address operands in Pmode only. |
635 | (define_special_predicate "call_insn_operand" | |
6281b3df | 636 | (ior (match_test "constant_call_address_operand |
637 | (op, mode == VOIDmode ? mode : Pmode)") | |
d7d572ef | 638 | (match_operand 0 "call_register_no_elim_operand") |
e21f9587 | 639 | (ior (and (not (match_test "TARGET_X32")) |
640 | (match_operand 0 "sibcall_memory_operand")) | |
641 | (and (match_test "TARGET_X32 && Pmode == DImode") | |
642 | (match_operand 0 "GOT_memory_operand"))))) | |
c629a39a | 643 | |
0975351b | 644 | ;; Similarly, but for tail calls, in which we cannot allow memory references. |
c290746e | 645 | (define_special_predicate "sibcall_insn_operand" |
6281b3df | 646 | (ior (match_test "constant_call_address_operand |
647 | (op, mode == VOIDmode ? mode : Pmode)") | |
3d1eef54 | 648 | (match_operand 0 "register_no_elim_operand") |
e21f9587 | 649 | (ior (and (not (match_test "TARGET_X32")) |
650 | (match_operand 0 "sibcall_memory_operand")) | |
651 | (and (match_test "TARGET_X32 && Pmode == DImode") | |
652 | (match_operand 0 "GOT_memory_operand"))))) | |
38e98f58 | 653 | |
afc3cb5e | 654 | ;; Return true if OP is a 32-bit GOT symbol operand. |
655 | (define_predicate "GOT32_symbol_operand" | |
656 | (match_test "GET_CODE (op) == CONST | |
657 | && GET_CODE (XEXP (op, 0)) == UNSPEC | |
658 | && XINT (XEXP (op, 0), 1) == UNSPEC_GOT")) | |
659 | ||
c629a39a | 660 | ;; Match exactly zero. |
661 | (define_predicate "const0_operand" | |
23e9258a | 662 | (match_code "const_int,const_wide_int,const_double,const_vector") |
ade66374 | 663 | { |
664 | if (mode == VOIDmode) | |
665 | mode = GET_MODE (op); | |
666 | return op == CONST0_RTX (mode); | |
667 | }) | |
c629a39a | 668 | |
54f53cd0 | 669 | ;; Match -1. |
670 | (define_predicate "constm1_operand" | |
23e9258a | 671 | (match_code "const_int,const_wide_int,const_double,const_vector") |
54f53cd0 | 672 | { |
673 | if (mode == VOIDmode) | |
674 | mode = GET_MODE (op); | |
675 | return op == CONSTM1_RTX (mode); | |
676 | }) | |
677 | ||
e4048f11 | 678 | ;; Match one or vector filled with ones. |
c629a39a | 679 | (define_predicate "const1_operand" |
23e9258a | 680 | (match_code "const_int,const_wide_int,const_double,const_vector") |
e4048f11 | 681 | { |
682 | if (mode == VOIDmode) | |
683 | mode = GET_MODE (op); | |
684 | return op == CONST1_RTX (mode); | |
685 | }) | |
c629a39a | 686 | |
e051aa14 | 687 | ;; Match exactly eight. |
688 | (define_predicate "const8_operand" | |
689 | (and (match_code "const_int") | |
690 | (match_test "INTVAL (op) == 8"))) | |
691 | ||
2aa911e0 | 692 | ;; Match exactly 128. |
693 | (define_predicate "const128_operand" | |
694 | (and (match_code "const_int") | |
695 | (match_test "INTVAL (op) == 128"))) | |
696 | ||
52d12c0f | 697 | ;; Match exactly 0x0FFFFFFFF in anddi as a zero-extension operation |
698 | (define_predicate "const_32bit_mask" | |
699 | (and (match_code "const_int") | |
700 | (match_test "trunc_int_for_mode (INTVAL (op), DImode) | |
701 | == (HOST_WIDE_INT) 0xffffffff"))) | |
702 | ||
c629a39a | 703 | ;; Match 2, 4, or 8. Used for leal multiplicands. |
704 | (define_predicate "const248_operand" | |
705 | (match_code "const_int") | |
706 | { | |
707 | HOST_WIDE_INT i = INTVAL (op); | |
708 | return i == 2 || i == 4 || i == 8; | |
709 | }) | |
710 | ||
d418f1d9 | 711 | ;; Match 2, 3, 6, or 7 |
712 | (define_predicate "const2367_operand" | |
23afdab7 | 713 | (match_code "const_int") |
714 | { | |
715 | HOST_WIDE_INT i = INTVAL (op); | |
d418f1d9 | 716 | return i == 2 || i == 3 || i == 6 || i == 7; |
23afdab7 | 717 | }) |
718 | ||
5deb404d | 719 | ;; Match 1, 2, 4, or 8 |
720 | (define_predicate "const1248_operand" | |
721 | (match_code "const_int") | |
722 | { | |
723 | HOST_WIDE_INT i = INTVAL (op); | |
724 | return i == 1 || i == 2 || i == 4 || i == 8; | |
725 | }) | |
726 | ||
cd452f14 | 727 | ;; Match 3, 5, or 9. Used for leal multiplicands. |
728 | (define_predicate "const359_operand" | |
729 | (match_code "const_int") | |
730 | { | |
731 | HOST_WIDE_INT i = INTVAL (op); | |
732 | return i == 3 || i == 5 || i == 9; | |
733 | }) | |
734 | ||
bd7e5882 | 735 | ;; Match 4 or 8 to 11. Used for embeded rounding. |
736 | (define_predicate "const_4_or_8_to_11_operand" | |
737 | (match_code "const_int") | |
738 | { | |
739 | HOST_WIDE_INT i = INTVAL (op); | |
740 | return i == 4 || (i >= 8 && i <= 11); | |
741 | }) | |
742 | ||
743 | ;; Match 4 or 8. Used for SAE. | |
744 | (define_predicate "const48_operand" | |
745 | (match_code "const_int") | |
746 | { | |
747 | HOST_WIDE_INT i = INTVAL (op); | |
748 | return i == 4 || i == 8; | |
749 | }) | |
750 | ||
5802c0cb | 751 | ;; Match 0 or 1. |
752 | (define_predicate "const_0_to_1_operand" | |
753 | (and (match_code "const_int") | |
3c0bc3f2 | 754 | (ior (match_test "op == const0_rtx") |
755 | (match_test "op == const1_rtx")))) | |
5802c0cb | 756 | |
c629a39a | 757 | ;; Match 0 to 3. |
758 | (define_predicate "const_0_to_3_operand" | |
759 | (and (match_code "const_int") | |
4e9abdcc | 760 | (match_test "IN_RANGE (INTVAL (op), 0, 3)"))) |
c629a39a | 761 | |
5220cab6 | 762 | ;; Match 0 to 4. |
763 | (define_predicate "const_0_to_4_operand" | |
764 | (and (match_code "const_int") | |
765 | (match_test "IN_RANGE (INTVAL (op), 0, 4)"))) | |
766 | ||
767 | ;; Match 0 to 5. | |
768 | (define_predicate "const_0_to_5_operand" | |
769 | (and (match_code "const_int") | |
770 | (match_test "IN_RANGE (INTVAL (op), 0, 5)"))) | |
771 | ||
c629a39a | 772 | ;; Match 0 to 7. |
773 | (define_predicate "const_0_to_7_operand" | |
774 | (and (match_code "const_int") | |
4e9abdcc | 775 | (match_test "IN_RANGE (INTVAL (op), 0, 7)"))) |
c629a39a | 776 | |
777 | ;; Match 0 to 15. | |
778 | (define_predicate "const_0_to_15_operand" | |
779 | (and (match_code "const_int") | |
4e9abdcc | 780 | (match_test "IN_RANGE (INTVAL (op), 0, 15)"))) |
c629a39a | 781 | |
448e99f5 | 782 | ;; Match 0 to 31. |
783 | (define_predicate "const_0_to_31_operand" | |
784 | (and (match_code "const_int") | |
785 | (match_test "IN_RANGE (INTVAL (op), 0, 31)"))) | |
786 | ||
12977431 | 787 | ;; Match 0 to 63. |
788 | (define_predicate "const_0_to_63_operand" | |
789 | (and (match_code "const_int") | |
4e9abdcc | 790 | (match_test "IN_RANGE (INTVAL (op), 0, 63)"))) |
12977431 | 791 | |
c629a39a | 792 | ;; Match 0 to 255. |
793 | (define_predicate "const_0_to_255_operand" | |
794 | (and (match_code "const_int") | |
4e9abdcc | 795 | (match_test "IN_RANGE (INTVAL (op), 0, 255)"))) |
c629a39a | 796 | |
5802c0cb | 797 | ;; Match (0 to 255) * 8 |
798 | (define_predicate "const_0_to_255_mul_8_operand" | |
799 | (match_code "const_int") | |
800 | { | |
801 | unsigned HOST_WIDE_INT val = INTVAL (op); | |
802 | return val <= 255*8 && val % 8 == 0; | |
803 | }) | |
804 | ||
9ad50766 | 805 | ;; Return true if OP is CONST_INT >= 1 and <= 31 (a valid operand |
5802c0cb | 806 | ;; for shift & compare patterns, as shifting by 0 does not change flags). |
807 | (define_predicate "const_1_to_31_operand" | |
808 | (and (match_code "const_int") | |
4e9abdcc | 809 | (match_test "IN_RANGE (INTVAL (op), 1, 31)"))) |
5802c0cb | 810 | |
9ad50766 | 811 | ;; Return true if OP is CONST_INT >= 1 and <= 63 (a valid operand |
34c1bdea | 812 | ;; for 64bit shift & compare patterns, as shifting by 0 does not change flags). |
813 | (define_predicate "const_1_to_63_operand" | |
814 | (and (match_code "const_int") | |
815 | (match_test "IN_RANGE (INTVAL (op), 1, 63)"))) | |
816 | ||
5802c0cb | 817 | ;; Match 2 or 3. |
818 | (define_predicate "const_2_to_3_operand" | |
819 | (and (match_code "const_int") | |
4e9abdcc | 820 | (match_test "IN_RANGE (INTVAL (op), 2, 3)"))) |
5802c0cb | 821 | |
ed30e0a6 | 822 | ;; Match 4 to 5. |
823 | (define_predicate "const_4_to_5_operand" | |
824 | (and (match_code "const_int") | |
825 | (match_test "IN_RANGE (INTVAL (op), 4, 5)"))) | |
826 | ||
5802c0cb | 827 | ;; Match 4 to 7. |
828 | (define_predicate "const_4_to_7_operand" | |
829 | (and (match_code "const_int") | |
4e9abdcc | 830 | (match_test "IN_RANGE (INTVAL (op), 4, 7)"))) |
5802c0cb | 831 | |
ed30e0a6 | 832 | ;; Match 6 to 7. |
833 | (define_predicate "const_6_to_7_operand" | |
834 | (and (match_code "const_int") | |
835 | (match_test "IN_RANGE (INTVAL (op), 6, 7)"))) | |
836 | ||
697a43f8 | 837 | ;; Match 8 to 9. |
838 | (define_predicate "const_8_to_9_operand" | |
839 | (and (match_code "const_int") | |
840 | (match_test "IN_RANGE (INTVAL (op), 8, 9)"))) | |
841 | ||
ed30e0a6 | 842 | ;; Match 8 to 11. |
843 | (define_predicate "const_8_to_11_operand" | |
844 | (and (match_code "const_int") | |
845 | (match_test "IN_RANGE (INTVAL (op), 8, 11)"))) | |
846 | ||
8e9989b0 | 847 | ;; Match 8 to 15. |
848 | (define_predicate "const_8_to_15_operand" | |
849 | (and (match_code "const_int") | |
850 | (match_test "IN_RANGE (INTVAL (op), 8, 15)"))) | |
851 | ||
697a43f8 | 852 | ;; Match 10 to 11. |
853 | (define_predicate "const_10_to_11_operand" | |
854 | (and (match_code "const_int") | |
855 | (match_test "IN_RANGE (INTVAL (op), 10, 11)"))) | |
856 | ||
857 | ;; Match 12 to 13. | |
858 | (define_predicate "const_12_to_13_operand" | |
859 | (and (match_code "const_int") | |
860 | (match_test "IN_RANGE (INTVAL (op), 12, 13)"))) | |
861 | ||
ed30e0a6 | 862 | ;; Match 12 to 15. |
863 | (define_predicate "const_12_to_15_operand" | |
864 | (and (match_code "const_int") | |
865 | (match_test "IN_RANGE (INTVAL (op), 12, 15)"))) | |
866 | ||
697a43f8 | 867 | ;; Match 14 to 15. |
868 | (define_predicate "const_14_to_15_operand" | |
869 | (and (match_code "const_int") | |
870 | (match_test "IN_RANGE (INTVAL (op), 14, 15)"))) | |
871 | ||
872 | ;; Match 16 to 19. | |
873 | (define_predicate "const_16_to_19_operand" | |
874 | (and (match_code "const_int") | |
875 | (match_test "IN_RANGE (INTVAL (op), 16, 19)"))) | |
876 | ||
8e9989b0 | 877 | ;; Match 16 to 31. |
878 | (define_predicate "const_16_to_31_operand" | |
879 | (and (match_code "const_int") | |
880 | (match_test "IN_RANGE (INTVAL (op), 16, 31)"))) | |
881 | ||
697a43f8 | 882 | ;; Match 20 to 23. |
883 | (define_predicate "const_20_to_23_operand" | |
884 | (and (match_code "const_int") | |
885 | (match_test "IN_RANGE (INTVAL (op), 20, 23)"))) | |
886 | ||
887 | ;; Match 24 to 27. | |
888 | (define_predicate "const_24_to_27_operand" | |
889 | (and (match_code "const_int") | |
890 | (match_test "IN_RANGE (INTVAL (op), 24, 27)"))) | |
891 | ||
892 | ;; Match 28 to 31. | |
893 | (define_predicate "const_28_to_31_operand" | |
894 | (and (match_code "const_int") | |
895 | (match_test "IN_RANGE (INTVAL (op), 28, 31)"))) | |
896 | ||
c629a39a | 897 | ;; True if this is a constant appropriate for an increment or decrement. |
898 | (define_predicate "incdec_operand" | |
899 | (match_code "const_int") | |
900 | { | |
901 | /* On Pentium4, the inc and dec operations causes extra dependency on flag | |
902 | registers, since carry flag is not set. */ | |
75445c99 | 903 | if (!TARGET_USE_INCDEC && !optimize_insn_for_size_p ()) |
9ad50766 | 904 | return false; |
c629a39a | 905 | return op == const1_rtx || op == constm1_rtx; |
906 | }) | |
907 | ||
fb3f9da5 | 908 | ;; True for registers, or 1 or -1. Used to optimize double-word shifts. |
909 | (define_predicate "reg_or_pm1_operand" | |
910 | (ior (match_operand 0 "register_operand") | |
911 | (and (match_code "const_int") | |
3c0bc3f2 | 912 | (ior (match_test "op == const1_rtx") |
913 | (match_test "op == constm1_rtx"))))) | |
fb3f9da5 | 914 | |
c629a39a | 915 | ;; True if OP is acceptable as operand of DImode shift expander. |
916 | (define_predicate "shiftdi_operand" | |
917 | (if_then_else (match_test "TARGET_64BIT") | |
918 | (match_operand 0 "nonimmediate_operand") | |
919 | (match_operand 0 "register_operand"))) | |
920 | ||
fb3f9da5 | 921 | (define_predicate "ashldi_input_operand" |
922 | (if_then_else (match_test "TARGET_64BIT") | |
923 | (match_operand 0 "nonimmediate_operand") | |
924 | (match_operand 0 "reg_or_pm1_operand"))) | |
925 | ||
c629a39a | 926 | ;; Return true if OP is a vector load from the constant pool with just |
183f1993 | 927 | ;; the first element nonzero. |
c629a39a | 928 | (define_predicate "zero_extended_scalar_load_operand" |
929 | (match_code "mem") | |
930 | { | |
931 | unsigned n_elts; | |
932 | op = maybe_get_pool_constant (op); | |
686cc944 | 933 | |
934 | if (!(op && GET_CODE (op) == CONST_VECTOR)) | |
9ad50766 | 935 | return false; |
686cc944 | 936 | |
937 | n_elts = CONST_VECTOR_NUNITS (op); | |
938 | ||
c629a39a | 939 | for (n_elts--; n_elts > 0; n_elts--) |
940 | { | |
941 | rtx elt = CONST_VECTOR_ELT (op, n_elts); | |
942 | if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op)))) | |
9ad50766 | 943 | return false; |
c629a39a | 944 | } |
9ad50766 | 945 | return true; |
c629a39a | 946 | }) |
947 | ||
c0933680 | 948 | /* Return true if operand is a vector constant that is all ones. */ |
949 | (define_predicate "vector_all_ones_operand" | |
a466cf2c | 950 | (and (match_code "const_vector") |
951 | (match_test "INTEGRAL_MODE_P (GET_MODE (op))") | |
952 | (match_test "op == CONSTM1_RTX (GET_MODE (op))"))) | |
c0933680 | 953 | |
9ad50766 | 954 | ; Return true when OP is operand acceptable for standard SSE move. |
c629a39a | 955 | (define_predicate "vector_move_operand" |
956 | (ior (match_operand 0 "nonimmediate_operand") | |
957 | (match_operand 0 "const0_operand"))) | |
958 | ||
3a623316 | 959 | ;; Return true when OP is either nonimmediate operand, or any |
960 | ;; CONST_VECTOR. | |
961 | (define_predicate "nonimmediate_or_const_vector_operand" | |
962 | (ior (match_operand 0 "nonimmediate_operand") | |
963 | (match_code "const_vector"))) | |
964 | ||
9ad50766 | 965 | ;; Return true when OP is nonimmediate or standard SSE constant. |
c0933680 | 966 | (define_predicate "nonimmediate_or_sse_const_operand" |
967 | (match_operand 0 "general_operand") | |
968 | { | |
969 | if (nonimmediate_operand (op, mode)) | |
9ad50766 | 970 | return true; |
c0933680 | 971 | if (standard_sse_constant_p (op) > 0) |
9ad50766 | 972 | return true; |
973 | return false; | |
c0933680 | 974 | }) |
975 | ||
ad2c46cf | 976 | ;; Return true if OP is a register or a zero. |
977 | (define_predicate "reg_or_0_operand" | |
978 | (ior (match_operand 0 "register_operand") | |
979 | (match_operand 0 "const0_operand"))) | |
980 | ||
1e662e65 | 981 | ;; Return true for RTX codes that force SImode address. |
982 | (define_predicate "SImode_address_operand" | |
983 | (match_code "subreg,zero_extend,and")) | |
984 | ||
13acb9da | 985 | ;; Return true if op if a valid address for LEA, and does not contain |
dfa01d32 | 986 | ;; a segment override. Defined as a special predicate to allow |
987 | ;; mode-less const_int operands pass to address_operand. | |
0b177abb | 988 | (define_special_predicate "address_no_seg_operand" |
3094ddd8 | 989 | (match_test "address_operand (op, VOIDmode)") |
c629a39a | 990 | { |
991 | struct ix86_address parts; | |
8c3c4cd9 | 992 | int ok; |
993 | ||
3094ddd8 | 994 | if (!CONST_INT_P (op) |
995 | && mode != VOIDmode | |
996 | && GET_MODE (op) != mode) | |
997 | return false; | |
998 | ||
8c3c4cd9 | 999 | ok = ix86_decompose_address (op, &parts); |
1000 | gcc_assert (ok); | |
716c563b | 1001 | return parts.seg == ADDR_SPACE_GENERIC; |
c629a39a | 1002 | }) |
1003 | ||
de084923 | 1004 | ;; Return true if op if a valid base register, displacement or |
1005 | ;; sum of base register and displacement for VSIB addressing. | |
1006 | (define_predicate "vsib_address_operand" | |
3094ddd8 | 1007 | (match_test "address_operand (op, VOIDmode)") |
de084923 | 1008 | { |
1009 | struct ix86_address parts; | |
1010 | int ok; | |
1011 | rtx disp; | |
1012 | ||
1013 | ok = ix86_decompose_address (op, &parts); | |
1014 | gcc_assert (ok); | |
716c563b | 1015 | if (parts.index || parts.seg != ADDR_SPACE_GENERIC) |
de084923 | 1016 | return false; |
1017 | ||
1018 | /* VSIB addressing doesn't support (%rip). */ | |
f4d378a8 | 1019 | if (parts.disp) |
de084923 | 1020 | { |
f4d378a8 | 1021 | disp = parts.disp; |
1022 | if (GET_CODE (disp) == CONST) | |
1023 | { | |
1024 | disp = XEXP (disp, 0); | |
1025 | if (GET_CODE (disp) == PLUS) | |
1026 | disp = XEXP (disp, 0); | |
1027 | if (GET_CODE (disp) == UNSPEC) | |
1028 | switch (XINT (disp, 1)) | |
1029 | { | |
1030 | case UNSPEC_GOTPCREL: | |
1031 | case UNSPEC_PCREL: | |
1032 | case UNSPEC_GOTNTPOFF: | |
1033 | return false; | |
1034 | } | |
1035 | } | |
1036 | if (TARGET_64BIT | |
1037 | && flag_pic | |
1038 | && (GET_CODE (disp) == SYMBOL_REF | |
1039 | || GET_CODE (disp) == LABEL_REF)) | |
1040 | return false; | |
de084923 | 1041 | } |
1042 | ||
1043 | return true; | |
1044 | }) | |
1045 | ||
058a1b7a | 1046 | ;; Return true if op is valid MPX address operand without base |
1047 | (define_predicate "address_mpx_no_base_operand" | |
3094ddd8 | 1048 | (match_test "address_operand (op, VOIDmode)") |
058a1b7a | 1049 | { |
1050 | struct ix86_address parts; | |
1051 | int ok; | |
1052 | ||
1053 | ok = ix86_decompose_address (op, &parts); | |
1054 | gcc_assert (ok); | |
1055 | ||
1056 | if (parts.index && parts.base) | |
1057 | return false; | |
1058 | ||
716c563b | 1059 | if (parts.seg != ADDR_SPACE_GENERIC) |
058a1b7a | 1060 | return false; |
1061 | ||
1062 | /* Do not support (%rip). */ | |
1063 | if (parts.disp && flag_pic && TARGET_64BIT | |
1064 | && SYMBOLIC_CONST (parts.disp)) | |
1065 | { | |
1066 | if (GET_CODE (parts.disp) != CONST | |
1067 | || GET_CODE (XEXP (parts.disp, 0)) != PLUS | |
1068 | || GET_CODE (XEXP (XEXP (parts.disp, 0), 0)) != UNSPEC | |
1069 | || !CONST_INT_P (XEXP (XEXP (parts.disp, 0), 1)) | |
1070 | || (XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_DTPOFF | |
1071 | && XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_NTPOFF)) | |
1072 | return false; | |
1073 | } | |
1074 | ||
1075 | return true; | |
1076 | }) | |
1077 | ||
1078 | ;; Return true if op is valid MPX address operand without index | |
1079 | (define_predicate "address_mpx_no_index_operand" | |
3094ddd8 | 1080 | (match_test "address_operand (op, VOIDmode)") |
058a1b7a | 1081 | { |
1082 | struct ix86_address parts; | |
1083 | int ok; | |
1084 | ||
1085 | ok = ix86_decompose_address (op, &parts); | |
1086 | gcc_assert (ok); | |
1087 | ||
1088 | if (parts.index) | |
1089 | return false; | |
1090 | ||
716c563b | 1091 | if (parts.seg != ADDR_SPACE_GENERIC) |
058a1b7a | 1092 | return false; |
1093 | ||
1094 | /* Do not support (%rip). */ | |
1095 | if (parts.disp && flag_pic && TARGET_64BIT | |
1096 | && SYMBOLIC_CONST (parts.disp) | |
1097 | && (GET_CODE (parts.disp) != CONST | |
1098 | || GET_CODE (XEXP (parts.disp, 0)) != PLUS | |
1099 | || GET_CODE (XEXP (XEXP (parts.disp, 0), 0)) != UNSPEC | |
1100 | || !CONST_INT_P (XEXP (XEXP (parts.disp, 0), 1)) | |
1101 | || (XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_DTPOFF | |
1102 | && XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_NTPOFF))) | |
1103 | return false; | |
1104 | ||
1105 | return true; | |
1106 | }) | |
1107 | ||
de084923 | 1108 | (define_predicate "vsib_mem_operator" |
1109 | (match_code "mem")) | |
1110 | ||
058a1b7a | 1111 | (define_predicate "bnd_mem_operator" |
1112 | (match_code "mem")) | |
1113 | ||
9ad50766 | 1114 | ;; Return true if the rtx is known to be at least 32 bits aligned. |
c629a39a | 1115 | (define_predicate "aligned_operand" |
1116 | (match_operand 0 "general_operand") | |
1117 | { | |
1118 | struct ix86_address parts; | |
8c3c4cd9 | 1119 | int ok; |
c629a39a | 1120 | |
1121 | /* Registers and immediate operands are always "aligned". */ | |
d41c63a8 | 1122 | if (!MEM_P (op)) |
9ad50766 | 1123 | return true; |
c629a39a | 1124 | |
9db3d688 | 1125 | /* All patterns using aligned_operand on memory operands ends up |
1126 | in promoting memory operand to 64bit and thus causing memory mismatch. */ | |
75445c99 | 1127 | if (TARGET_MEMORY_MISMATCH_STALL && !optimize_insn_for_size_p ()) |
9ad50766 | 1128 | return false; |
9db3d688 | 1129 | |
c629a39a | 1130 | /* Don't even try to do any aligned optimizations with volatiles. */ |
1131 | if (MEM_VOLATILE_P (op)) | |
9ad50766 | 1132 | return false; |
00aa5a52 | 1133 | |
1134 | if (MEM_ALIGN (op) >= 32) | |
9ad50766 | 1135 | return true; |
00aa5a52 | 1136 | |
c629a39a | 1137 | op = XEXP (op, 0); |
1138 | ||
1139 | /* Pushes and pops are only valid on the stack pointer. */ | |
1140 | if (GET_CODE (op) == PRE_DEC | |
1141 | || GET_CODE (op) == POST_INC) | |
9ad50766 | 1142 | return true; |
c629a39a | 1143 | |
1144 | /* Decode the address. */ | |
8c3c4cd9 | 1145 | ok = ix86_decompose_address (op, &parts); |
1146 | gcc_assert (ok); | |
c629a39a | 1147 | |
e15c0942 | 1148 | if (parts.base && SUBREG_P (parts.base)) |
13acb9da | 1149 | parts.base = SUBREG_REG (parts.base); |
e15c0942 | 1150 | if (parts.index && SUBREG_P (parts.index)) |
13acb9da | 1151 | parts.index = SUBREG_REG (parts.index); |
1152 | ||
c629a39a | 1153 | /* Look for some component that isn't known to be aligned. */ |
1154 | if (parts.index) | |
1155 | { | |
1156 | if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32) | |
9ad50766 | 1157 | return false; |
c629a39a | 1158 | } |
1159 | if (parts.base) | |
1160 | { | |
1161 | if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32) | |
9ad50766 | 1162 | return false; |
c629a39a | 1163 | } |
1164 | if (parts.disp) | |
1165 | { | |
24deb0af | 1166 | if (!CONST_INT_P (parts.disp) |
9ad50766 | 1167 | || (INTVAL (parts.disp) & 3)) |
1168 | return false; | |
c629a39a | 1169 | } |
1170 | ||
1171 | /* Didn't find one -- this must be an aligned address. */ | |
9ad50766 | 1172 | return true; |
c629a39a | 1173 | }) |
1174 | ||
9ad50766 | 1175 | ;; Return true if OP is memory operand with a displacement. |
c629a39a | 1176 | (define_predicate "memory_displacement_operand" |
1177 | (match_operand 0 "memory_operand") | |
1178 | { | |
1179 | struct ix86_address parts; | |
8c3c4cd9 | 1180 | int ok; |
1181 | ||
1182 | ok = ix86_decompose_address (XEXP (op, 0), &parts); | |
1183 | gcc_assert (ok); | |
c629a39a | 1184 | return parts.disp != NULL_RTX; |
1185 | }) | |
1186 | ||
9ad50766 | 1187 | ;; Return true if OP is memory operand with a displacement only. |
ea110a71 | 1188 | (define_predicate "memory_displacement_only_operand" |
1189 | (match_operand 0 "memory_operand") | |
1190 | { | |
1191 | struct ix86_address parts; | |
1192 | int ok; | |
1193 | ||
2aa911e0 | 1194 | if (TARGET_64BIT) |
9ad50766 | 1195 | return false; |
2aa911e0 | 1196 | |
ea110a71 | 1197 | ok = ix86_decompose_address (XEXP (op, 0), &parts); |
1198 | gcc_assert (ok); | |
1199 | ||
1200 | if (parts.base || parts.index) | |
9ad50766 | 1201 | return false; |
ea110a71 | 1202 | |
1203 | return parts.disp != NULL_RTX; | |
1204 | }) | |
1205 | ||
9ad50766 | 1206 | ;; Return true if OP is memory operand that cannot be represented |
c629a39a | 1207 | ;; by the modRM array. |
1208 | (define_predicate "long_memory_operand" | |
1209 | (and (match_operand 0 "memory_operand") | |
907c92a4 | 1210 | (match_test "memory_address_length (op, false)"))) |
c629a39a | 1211 | |
9ad50766 | 1212 | ;; Return true if OP is a comparison operator that can be issued by fcmov. |
c629a39a | 1213 | (define_predicate "fcmov_comparison_operator" |
1214 | (match_operand 0 "comparison_operator") | |
1215 | { | |
3754d046 | 1216 | machine_mode inmode = GET_MODE (XEXP (op, 0)); |
c629a39a | 1217 | enum rtx_code code = GET_CODE (op); |
1218 | ||
1219 | if (inmode == CCFPmode || inmode == CCFPUmode) | |
1220 | { | |
e6a0a4a3 | 1221 | if (!ix86_trivial_fp_comparison_operator (op, mode)) |
9ad50766 | 1222 | return false; |
c629a39a | 1223 | code = ix86_fp_compare_code_to_integer (code); |
1224 | } | |
1225 | /* i387 supports just limited amount of conditional codes. */ | |
1226 | switch (code) | |
1227 | { | |
1228 | case LTU: case GTU: case LEU: case GEU: | |
aa8c6aad | 1229 | if (inmode == CCmode || inmode == CCFPmode || inmode == CCFPUmode |
1230 | || inmode == CCCmode) | |
9ad50766 | 1231 | return true; |
1232 | return false; | |
c629a39a | 1233 | case ORDERED: case UNORDERED: |
1234 | case EQ: case NE: | |
9ad50766 | 1235 | return true; |
c629a39a | 1236 | default: |
9ad50766 | 1237 | return false; |
c629a39a | 1238 | } |
1239 | }) | |
1240 | ||
9ad50766 | 1241 | ;; Return true if OP is a comparison that can be used in the CMPSS/CMPPS insns. |
c629a39a | 1242 | ;; The first set are supported directly; the second set can't be done with |
1243 | ;; full IEEE support, i.e. NaNs. | |
27e5502d | 1244 | |
1245 | (define_predicate "sse_comparison_operator" | |
1246 | (ior (match_code "eq,ne,lt,le,unordered,unge,ungt,ordered") | |
3c0bc3f2 | 1247 | (and (match_test "TARGET_AVX") |
1248 | (match_code "ge,gt,uneq,unle,unlt,ltgt")))) | |
ed30e0a6 | 1249 | |
448e99f5 | 1250 | (define_predicate "ix86_comparison_int_operator" |
1251 | (match_code "ne,eq,ge,gt,le,lt")) | |
1252 | ||
1253 | (define_predicate "ix86_comparison_uns_operator" | |
1254 | (match_code "ne,eq,geu,gtu,leu,ltu")) | |
1255 | ||
c9021e73 | 1256 | (define_predicate "bt_comparison_operator" |
1257 | (match_code "ne,eq")) | |
1258 | ||
9ad50766 | 1259 | ;; Return true if OP is a valid comparison operator in valid mode. |
c629a39a | 1260 | (define_predicate "ix86_comparison_operator" |
1261 | (match_operand 0 "comparison_operator") | |
1262 | { | |
3754d046 | 1263 | machine_mode inmode = GET_MODE (XEXP (op, 0)); |
c629a39a | 1264 | enum rtx_code code = GET_CODE (op); |
1265 | ||
1266 | if (inmode == CCFPmode || inmode == CCFPUmode) | |
e6a0a4a3 | 1267 | return ix86_trivial_fp_comparison_operator (op, mode); |
1268 | ||
c629a39a | 1269 | switch (code) |
1270 | { | |
1271 | case EQ: case NE: | |
9ad50766 | 1272 | return true; |
c629a39a | 1273 | case LT: case GE: |
1274 | if (inmode == CCmode || inmode == CCGCmode | |
1275 | || inmode == CCGOCmode || inmode == CCNOmode) | |
9ad50766 | 1276 | return true; |
1277 | return false; | |
aa8c6aad | 1278 | case LTU: case GTU: case LEU: case GEU: |
1279 | if (inmode == CCmode || inmode == CCCmode) | |
9ad50766 | 1280 | return true; |
1281 | return false; | |
aa8c6aad | 1282 | case ORDERED: case UNORDERED: |
c629a39a | 1283 | if (inmode == CCmode) |
9ad50766 | 1284 | return true; |
1285 | return false; | |
c629a39a | 1286 | case GT: case LE: |
1287 | if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode) | |
9ad50766 | 1288 | return true; |
1289 | return false; | |
c629a39a | 1290 | default: |
9ad50766 | 1291 | return false; |
c629a39a | 1292 | } |
1293 | }) | |
1294 | ||
9ad50766 | 1295 | ;; Return true if OP is a valid comparison operator |
1296 | ;; testing carry flag to be set. | |
c629a39a | 1297 | (define_predicate "ix86_carry_flag_operator" |
aa8c6aad | 1298 | (match_code "ltu,lt,unlt,gtu,gt,ungt,le,unle,ge,unge,ltgt,uneq") |
c629a39a | 1299 | { |
3754d046 | 1300 | machine_mode inmode = GET_MODE (XEXP (op, 0)); |
c629a39a | 1301 | enum rtx_code code = GET_CODE (op); |
1302 | ||
c629a39a | 1303 | if (inmode == CCFPmode || inmode == CCFPUmode) |
1304 | { | |
e6a0a4a3 | 1305 | if (!ix86_trivial_fp_comparison_operator (op, mode)) |
9ad50766 | 1306 | return false; |
c629a39a | 1307 | code = ix86_fp_compare_code_to_integer (code); |
1308 | } | |
aa8c6aad | 1309 | else if (inmode == CCCmode) |
1310 | return code == LTU || code == GTU; | |
c629a39a | 1311 | else if (inmode != CCmode) |
9ad50766 | 1312 | return false; |
c629a39a | 1313 | |
1314 | return code == LTU; | |
1315 | }) | |
1316 | ||
9ad50766 | 1317 | ;; Return true if this comparison only requires testing one flag bit. |
e6a0a4a3 | 1318 | (define_predicate "ix86_trivial_fp_comparison_operator" |
1319 | (match_code "gt,ge,unlt,unle,uneq,ltgt,ordered,unordered")) | |
1320 | ||
9ad50766 | 1321 | ;; Return true if we know how to do this comparison. Others require |
e6a0a4a3 | 1322 | ;; testing more than one flag bit, and we let the generic middle-end |
1323 | ;; code do that. | |
1324 | (define_predicate "ix86_fp_comparison_operator" | |
1325 | (if_then_else (match_test "ix86_fp_comparison_strategy (GET_CODE (op)) | |
1326 | == IX86_FPCMP_ARITH") | |
1327 | (match_operand 0 "comparison_operator") | |
1328 | (match_operand 0 "ix86_trivial_fp_comparison_operator"))) | |
1329 | ||
86168f3a | 1330 | ;; Same as above, but for swapped comparison used in *jcc<fp>_<int>_i387. |
d70f1911 | 1331 | (define_predicate "ix86_swapped_fp_comparison_operator" |
1332 | (match_operand 0 "comparison_operator") | |
1333 | { | |
1334 | enum rtx_code code = GET_CODE (op); | |
9ad50766 | 1335 | bool ret; |
d70f1911 | 1336 | |
1337 | PUT_CODE (op, swap_condition (code)); | |
1338 | ret = ix86_fp_comparison_operator (op, mode); | |
1339 | PUT_CODE (op, code); | |
1340 | return ret; | |
1341 | }) | |
1342 | ||
c629a39a | 1343 | ;; Nearly general operand, but accept any const_double, since we wish |
1344 | ;; to be able to drop them into memory rather than have them get pulled | |
1345 | ;; into registers. | |
1346 | (define_predicate "cmp_fp_expander_operand" | |
1347 | (ior (match_code "const_double") | |
1348 | (match_operand 0 "general_operand"))) | |
1349 | ||
1350 | ;; Return true if this is a valid binary floating-point operation. | |
1351 | (define_predicate "binary_fp_operator" | |
1352 | (match_code "plus,minus,mult,div")) | |
1353 | ||
1354 | ;; Return true if this is a multiply operation. | |
1355 | (define_predicate "mult_operator" | |
1356 | (match_code "mult")) | |
1357 | ||
1358 | ;; Return true if this is a division operation. | |
1359 | (define_predicate "div_operator" | |
1360 | (match_code "div")) | |
1361 | ||
4ca75988 | 1362 | ;; Return true if this is a plus, minus, and, ior or xor operation. |
1363 | (define_predicate "plusminuslogic_operator" | |
1364 | (match_code "plus,minus,and,ior,xor")) | |
1365 | ||
76c056c0 | 1366 | ;; Return true if this is a float extend operation. |
1367 | (define_predicate "float_operator" | |
1368 | (match_code "float")) | |
1369 | ||
c629a39a | 1370 | ;; Return true for ARITHMETIC_P. |
1371 | (define_predicate "arith_or_logical_operator" | |
ae48d46a | 1372 | (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax,compare,minus,div, |
1373 | mod,udiv,umod,ashift,rotate,ashiftrt,lshiftrt,rotatert")) | |
c629a39a | 1374 | |
486a1b16 | 1375 | ;; Return true for COMMUTATIVE_P. |
1376 | (define_predicate "commutative_operator" | |
1377 | (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax")) | |
1378 | ||
9ad50766 | 1379 | ;; Return true if OP is a binary operator that can be promoted to wider mode. |
c629a39a | 1380 | (define_predicate "promotable_binary_operator" |
10aab13c | 1381 | (ior (match_code "plus,minus,and,ior,xor,ashift") |
c629a39a | 1382 | (and (match_code "mult") |
58c9a086 | 1383 | (match_test "TARGET_TUNE_PROMOTE_HIMODE_IMUL")))) |
c629a39a | 1384 | |
b2608557 | 1385 | (define_predicate "compare_operator" |
1386 | (match_code "compare")) | |
12977431 | 1387 | |
1388 | (define_predicate "absneg_operator" | |
1389 | (match_code "abs,neg")) | |
0010688c | 1390 | |
4cd27706 | 1391 | ;; Return true if OP is a memory operand, aligned to |
1392 | ;; less than its natural alignment. | |
0010688c | 1393 | (define_predicate "misaligned_operand" |
1394 | (and (match_code "mem") | |
4cd27706 | 1395 | (match_test "MEM_ALIGN (op) < GET_MODE_BITSIZE (mode)"))) |
ed30e0a6 | 1396 | |
9ad50766 | 1397 | ;; Return true if OP is a emms operation, known to be a PARALLEL. |
e2c0f47e | 1398 | (define_predicate "emms_operation" |
1399 | (match_code "parallel") | |
1400 | { | |
1401 | unsigned i; | |
1402 | ||
1403 | if (XVECLEN (op, 0) != 17) | |
9ad50766 | 1404 | return false; |
e2c0f47e | 1405 | |
1406 | for (i = 0; i < 8; i++) | |
1407 | { | |
1408 | rtx elt = XVECEXP (op, 0, i+1); | |
1409 | ||
1410 | if (GET_CODE (elt) != CLOBBER | |
1411 | || GET_CODE (SET_DEST (elt)) != REG | |
1412 | || GET_MODE (SET_DEST (elt)) != XFmode | |
1413 | || REGNO (SET_DEST (elt)) != FIRST_STACK_REG + i) | |
9ad50766 | 1414 | return false; |
e2c0f47e | 1415 | |
1416 | elt = XVECEXP (op, 0, i+9); | |
1417 | ||
1418 | if (GET_CODE (elt) != CLOBBER | |
1419 | || GET_CODE (SET_DEST (elt)) != REG | |
1420 | || GET_MODE (SET_DEST (elt)) != DImode | |
1421 | || REGNO (SET_DEST (elt)) != FIRST_MMX_REG + i) | |
9ad50766 | 1422 | return false; |
e2c0f47e | 1423 | } |
9ad50766 | 1424 | return true; |
e2c0f47e | 1425 | }) |
1426 | ||
9ad50766 | 1427 | ;; Return true if OP is a vzeroall operation, known to be a PARALLEL. |
ed30e0a6 | 1428 | (define_predicate "vzeroall_operation" |
1429 | (match_code "parallel") | |
1430 | { | |
e2c0f47e | 1431 | unsigned i, nregs = TARGET_64BIT ? 16 : 8; |
ed30e0a6 | 1432 | |
e2c0f47e | 1433 | if ((unsigned) XVECLEN (op, 0) != 1 + nregs) |
9ad50766 | 1434 | return false; |
ed30e0a6 | 1435 | |
e2c0f47e | 1436 | for (i = 0; i < nregs; i++) |
1437 | { | |
1438 | rtx elt = XVECEXP (op, 0, i+1); | |
1439 | ||
1440 | if (GET_CODE (elt) != SET | |
1441 | || GET_CODE (SET_DEST (elt)) != REG | |
1442 | || GET_MODE (SET_DEST (elt)) != V8SImode | |
1443 | || REGNO (SET_DEST (elt)) != SSE_REGNO (i) | |
1444 | || SET_SRC (elt) != CONST0_RTX (V8SImode)) | |
9ad50766 | 1445 | return false; |
e2c0f47e | 1446 | } |
9ad50766 | 1447 | return true; |
e2c0f47e | 1448 | }) |
1449 | ||
64f28d78 | 1450 | ;; return true if OP is a vzeroupper operation. |
1451 | (define_predicate "vzeroupper_operation" | |
7b41dbcf | 1452 | (and (match_code "unspec_volatile") |
1453 | (match_test "XINT (op, 1) == UNSPECV_VZEROUPPER"))) | |
64f28d78 | 1454 | |
06af5c80 | 1455 | ;; Return true if OP is an addsub vec_merge operation |
1456 | (define_predicate "addsub_vm_operator" | |
1457 | (match_code "vec_merge") | |
1458 | { | |
1459 | rtx op0, op1; | |
1460 | int swapped; | |
1461 | HOST_WIDE_INT mask; | |
1462 | int nunits, elt; | |
1463 | ||
1464 | op0 = XEXP (op, 0); | |
1465 | op1 = XEXP (op, 1); | |
1466 | ||
1467 | /* Sanity check. */ | |
1468 | if (GET_CODE (op0) == MINUS && GET_CODE (op1) == PLUS) | |
1469 | swapped = 0; | |
1470 | else if (GET_CODE (op0) == PLUS && GET_CODE (op1) == MINUS) | |
1471 | swapped = 1; | |
1472 | else | |
1473 | gcc_unreachable (); | |
1474 | ||
1475 | mask = INTVAL (XEXP (op, 2)); | |
1476 | nunits = GET_MODE_NUNITS (mode); | |
1477 | ||
1478 | for (elt = 0; elt < nunits; elt++) | |
1479 | { | |
1480 | /* bit clear: take from op0, set: take from op1 */ | |
1481 | int bit = !(mask & (HOST_WIDE_INT_1U << elt)); | |
1482 | ||
1483 | if (bit != ((elt & 1) ^ swapped)) | |
1484 | return false; | |
1485 | } | |
1486 | ||
1487 | return true; | |
1488 | }) | |
1489 | ||
1490 | ;; Return true if OP is an addsub vec_select/vec_concat operation | |
1491 | (define_predicate "addsub_vs_operator" | |
1492 | (and (match_code "vec_select") | |
1493 | (match_code "vec_concat" "0")) | |
1494 | { | |
1495 | rtx op0, op1; | |
1496 | bool swapped; | |
1497 | int nunits, elt; | |
1498 | ||
1499 | op0 = XEXP (XEXP (op, 0), 0); | |
1500 | op1 = XEXP (XEXP (op, 0), 1); | |
1501 | ||
1502 | /* Sanity check. */ | |
1503 | if (GET_CODE (op0) == MINUS && GET_CODE (op1) == PLUS) | |
1504 | swapped = false; | |
1505 | else if (GET_CODE (op0) == PLUS && GET_CODE (op1) == MINUS) | |
1506 | swapped = true; | |
1507 | else | |
1508 | gcc_unreachable (); | |
1509 | ||
1510 | nunits = GET_MODE_NUNITS (mode); | |
1511 | if (XVECLEN (XEXP (op, 1), 0) != nunits) | |
1512 | return false; | |
1513 | ||
1514 | /* We already checked that permutation is suitable for addsub, | |
1515 | so only look at the first element of the parallel. */ | |
1516 | elt = INTVAL (XVECEXP (XEXP (op, 1), 0, 0)); | |
04e14b44 | 1517 | |
06af5c80 | 1518 | return elt == (swapped ? nunits : 0); |
1519 | }) | |
1520 | ||
1521 | ;; Return true if OP is a parallel for an addsub vec_select. | |
1522 | (define_predicate "addsub_vs_parallel" | |
1523 | (and (match_code "parallel") | |
1524 | (match_code "const_int" "a")) | |
1525 | { | |
1526 | int nelt = XVECLEN (op, 0); | |
1527 | int elt, i; | |
1528 | ||
1529 | if (nelt < 2) | |
1530 | return false; | |
1531 | ||
1532 | /* Check that the permutation is suitable for addsub. | |
1533 | For example, { 0 9 2 11 4 13 6 15 } or { 8 1 10 3 12 5 14 7 }. */ | |
1534 | elt = INTVAL (XVECEXP (op, 0, 0)); | |
1535 | if (elt == 0) | |
1536 | { | |
1537 | for (i = 1; i < nelt; ++i) | |
1538 | if (INTVAL (XVECEXP (op, 0, i)) != (i + (i & 1) * nelt)) | |
1539 | return false; | |
1540 | } | |
1541 | else if (elt == nelt) | |
1542 | { | |
1543 | for (i = 1; i < nelt; ++i) | |
1544 | if (INTVAL (XVECEXP (op, 0, i)) != (elt + i - (i & 1) * nelt)) | |
1545 | return false; | |
1546 | } | |
1547 | else | |
1548 | return false; | |
1549 | ||
1550 | return true; | |
1551 | }) | |
1552 | ||
1553 | ;; Return true if OP is a parallel for a vbroadcast permute. | |
04e14b44 | 1554 | (define_predicate "avx_vbroadcast_operand" |
1555 | (and (match_code "parallel") | |
1556 | (match_code "const_int" "a")) | |
1557 | { | |
1558 | rtx elt = XVECEXP (op, 0, 0); | |
1559 | int i, nelt = XVECLEN (op, 0); | |
1560 | ||
1561 | /* Don't bother checking there are the right number of operands, | |
1562 | merely that they're all identical. */ | |
1563 | for (i = 1; i < nelt; ++i) | |
1564 | if (XVECEXP (op, 0, i) != elt) | |
1565 | return false; | |
1566 | return true; | |
1567 | }) | |
738630ee | 1568 | |
bafd306d | 1569 | ;; Return true if OP is a parallel for a palignr permute. |
1570 | (define_predicate "palignr_operand" | |
1571 | (and (match_code "parallel") | |
1572 | (match_code "const_int" "a")) | |
1573 | { | |
1574 | int elt = INTVAL (XVECEXP (op, 0, 0)); | |
1575 | int i, nelt = XVECLEN (op, 0); | |
1576 | ||
1577 | /* Check that an order in the permutation is suitable for palignr. | |
1578 | For example, {5 6 7 0 1 2 3 4} is "palignr 5, xmm, xmm". */ | |
1579 | for (i = 1; i < nelt; ++i) | |
1580 | if (INTVAL (XVECEXP (op, 0, i)) != ((elt + i) % nelt)) | |
1581 | return false; | |
1582 | return true; | |
1583 | }) | |
1584 | ||
738630ee | 1585 | ;; Return true if OP is a proper third operand to vpblendw256. |
1586 | (define_predicate "avx2_pblendw_operand" | |
1587 | (match_code "const_int") | |
1588 | { | |
1589 | HOST_WIDE_INT val = INTVAL (op); | |
1590 | HOST_WIDE_INT low = val & 0xff; | |
c450bad4 | 1591 | return val == ((low << 8) | low); |
738630ee | 1592 | }) |
ebdfd365 | 1593 | |
1594 | ;; Return true if OP is nonimmediate_operand or CONST_VECTOR. | |
1595 | (define_predicate "general_vector_operand" | |
1596 | (ior (match_operand 0 "nonimmediate_operand") | |
1597 | (match_code "const_vector"))) | |
d2ff59d6 | 1598 | |
1599 | ;; Return true if OP is either -1 constant or stored in register. | |
1600 | (define_predicate "register_or_constm1_operand" | |
1601 | (ior (match_operand 0 "register_operand") | |
1602 | (and (match_code "const_int") | |
1603 | (match_test "op == constm1_rtx")))) |