]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/rs6000/predicates.md
Update copyright years.
[thirdparty/gcc.git] / gcc / config / rs6000 / predicates.md
1 ;; Predicate definitions for POWER and PowerPC.
2 ;; Copyright (C) 2005-2015 Free Software Foundation, Inc.
3 ;;
4 ;; This file is part of GCC.
5 ;;
6 ;; GCC is free software; you can redistribute it and/or modify
7 ;; it under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version 3, or (at your option)
9 ;; any later version.
10 ;;
11 ;; GCC is distributed in the hope that it will be useful,
12 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 ;; GNU General Public License for more details.
15 ;;
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GCC; see the file COPYING3. If not see
18 ;; <http://www.gnu.org/licenses/>.
19
20 ;; Return 1 for anything except PARALLEL.
21 (define_predicate "any_operand"
22 (match_code "const_int,const_double,const_wide_int,const,symbol_ref,label_ref,subreg,reg,mem"))
23
24 ;; Return 1 for any PARALLEL.
25 (define_predicate "any_parallel_operand"
26 (match_code "parallel"))
27
28 ;; Return 1 if op is COUNT register.
29 (define_predicate "count_register_operand"
30 (and (match_code "reg")
31 (match_test "REGNO (op) == CTR_REGNO
32 || REGNO (op) > LAST_VIRTUAL_REGISTER")))
33
34 ;; Return 1 if op is an Altivec register.
35 (define_predicate "altivec_register_operand"
36 (match_operand 0 "register_operand")
37 {
38 if (GET_CODE (op) == SUBREG)
39 op = SUBREG_REG (op);
40
41 if (!REG_P (op))
42 return 0;
43
44 if (REGNO (op) > LAST_VIRTUAL_REGISTER)
45 return 1;
46
47 return ALTIVEC_REGNO_P (REGNO (op));
48 })
49
50 ;; Return 1 if op is a VSX register.
51 (define_predicate "vsx_register_operand"
52 (match_operand 0 "register_operand")
53 {
54 if (GET_CODE (op) == SUBREG)
55 op = SUBREG_REG (op);
56
57 if (!REG_P (op))
58 return 0;
59
60 if (REGNO (op) > LAST_VIRTUAL_REGISTER)
61 return 1;
62
63 return VSX_REGNO_P (REGNO (op));
64 })
65
66 ;; Return 1 if op is a vector register that operates on floating point vectors
67 ;; (either altivec or VSX).
68 (define_predicate "vfloat_operand"
69 (match_operand 0 "register_operand")
70 {
71 if (GET_CODE (op) == SUBREG)
72 op = SUBREG_REG (op);
73
74 if (!REG_P (op))
75 return 0;
76
77 if (REGNO (op) > LAST_VIRTUAL_REGISTER)
78 return 1;
79
80 return VFLOAT_REGNO_P (REGNO (op));
81 })
82
83 ;; Return 1 if op is a vector register that operates on integer vectors
84 ;; (only altivec, VSX doesn't support integer vectors)
85 (define_predicate "vint_operand"
86 (match_operand 0 "register_operand")
87 {
88 if (GET_CODE (op) == SUBREG)
89 op = SUBREG_REG (op);
90
91 if (!REG_P (op))
92 return 0;
93
94 if (REGNO (op) > LAST_VIRTUAL_REGISTER)
95 return 1;
96
97 return VINT_REGNO_P (REGNO (op));
98 })
99
100 ;; Return 1 if op is a vector register to do logical operations on (and, or,
101 ;; xor, etc.)
102 (define_predicate "vlogical_operand"
103 (match_operand 0 "register_operand")
104 {
105 if (GET_CODE (op) == SUBREG)
106 op = SUBREG_REG (op);
107
108 if (!REG_P (op))
109 return 0;
110
111 if (REGNO (op) > LAST_VIRTUAL_REGISTER)
112 return 1;
113
114 return VLOGICAL_REGNO_P (REGNO (op));
115 })
116
117 ;; Return 1 if op is the carry register.
118 (define_predicate "ca_operand"
119 (match_operand 0 "register_operand")
120 {
121 if (GET_CODE (op) == SUBREG)
122 op = SUBREG_REG (op);
123
124 if (!REG_P (op))
125 return 0;
126
127 return CA_REGNO_P (REGNO (op));
128 })
129
130 ;; Return 1 if op is a signed 5-bit constant integer.
131 (define_predicate "s5bit_cint_operand"
132 (and (match_code "const_int")
133 (match_test "INTVAL (op) >= -16 && INTVAL (op) <= 15")))
134
135 ;; Return 1 if op is a unsigned 3-bit constant integer.
136 (define_predicate "u3bit_cint_operand"
137 (and (match_code "const_int")
138 (match_test "INTVAL (op) >= 0 && INTVAL (op) <= 7")))
139
140 ;; Return 1 if op is a unsigned 5-bit constant integer.
141 (define_predicate "u5bit_cint_operand"
142 (and (match_code "const_int")
143 (match_test "INTVAL (op) >= 0 && INTVAL (op) <= 31")))
144
145 ;; Return 1 if op is a signed 8-bit constant integer.
146 ;; Integer multiplication complete more quickly
147 (define_predicate "s8bit_cint_operand"
148 (and (match_code "const_int")
149 (match_test "INTVAL (op) >= -128 && INTVAL (op) <= 127")))
150
151 ;; Return 1 if op is a unsigned 10-bit constant integer.
152 (define_predicate "u10bit_cint_operand"
153 (and (match_code "const_int")
154 (match_test "INTVAL (op) >= 0 && INTVAL (op) <= 1023")))
155
156 ;; Return 1 if op is a constant integer that can fit in a D field.
157 (define_predicate "short_cint_operand"
158 (and (match_code "const_int")
159 (match_test "satisfies_constraint_I (op)")))
160
161 ;; Return 1 if op is a constant integer that can fit in an unsigned D field.
162 (define_predicate "u_short_cint_operand"
163 (and (match_code "const_int")
164 (match_test "satisfies_constraint_K (op)")))
165
166 ;; Return 1 if op is a constant integer that cannot fit in a signed D field.
167 (define_predicate "non_short_cint_operand"
168 (and (match_code "const_int")
169 (match_test "(unsigned HOST_WIDE_INT)
170 (INTVAL (op) + 0x8000) >= 0x10000")))
171
172 ;; Return 1 if op is a positive constant integer that is an exact power of 2.
173 (define_predicate "exact_log2_cint_operand"
174 (and (match_code "const_int")
175 (match_test "INTVAL (op) > 0 && exact_log2 (INTVAL (op)) >= 0")))
176
177 ;; Match op = 0 or op = 1.
178 (define_predicate "const_0_to_1_operand"
179 (and (match_code "const_int")
180 (match_test "IN_RANGE (INTVAL (op), 0, 1)")))
181
182 ;; Match op = 0..3.
183 (define_predicate "const_0_to_3_operand"
184 (and (match_code "const_int")
185 (match_test "IN_RANGE (INTVAL (op), 0, 3)")))
186
187 ;; Match op = 2 or op = 3.
188 (define_predicate "const_2_to_3_operand"
189 (and (match_code "const_int")
190 (match_test "IN_RANGE (INTVAL (op), 2, 3)")))
191
192 ;; Match op = 0..15
193 (define_predicate "const_0_to_15_operand"
194 (and (match_code "const_int")
195 (match_test "IN_RANGE (INTVAL (op), 0, 15)")))
196
197 ;; Return 1 if op is a register that is not special.
198 (define_predicate "gpc_reg_operand"
199 (match_operand 0 "register_operand")
200 {
201 if ((TARGET_E500_DOUBLE || TARGET_SPE) && invalid_e500_subreg (op, mode))
202 return 0;
203
204 if (GET_CODE (op) == SUBREG)
205 op = SUBREG_REG (op);
206
207 if (!REG_P (op))
208 return 0;
209
210 if (REGNO (op) >= ARG_POINTER_REGNUM && !CA_REGNO_P (REGNO (op)))
211 return 1;
212
213 if (TARGET_VSX && VSX_REGNO_P (REGNO (op)))
214 return 1;
215
216 return INT_REGNO_P (REGNO (op)) || FP_REGNO_P (REGNO (op));
217 })
218
219 ;; Return 1 if op is a general purpose register. Unlike gpc_reg_operand, don't
220 ;; allow floating point or vector registers.
221 (define_predicate "int_reg_operand"
222 (match_operand 0 "register_operand")
223 {
224 if ((TARGET_E500_DOUBLE || TARGET_SPE) && invalid_e500_subreg (op, mode))
225 return 0;
226
227 if (GET_CODE (op) == SUBREG)
228 op = SUBREG_REG (op);
229
230 if (!REG_P (op))
231 return 0;
232
233 if (REGNO (op) >= FIRST_PSEUDO_REGISTER)
234 return 1;
235
236 return INT_REGNO_P (REGNO (op));
237 })
238
239 ;; Like int_reg_operand, but only return true for base registers
240 (define_predicate "base_reg_operand"
241 (match_operand 0 "int_reg_operand")
242 {
243 if (GET_CODE (op) == SUBREG)
244 op = SUBREG_REG (op);
245
246 if (!REG_P (op))
247 return 0;
248
249 return (REGNO (op) != FIRST_GPR_REGNO);
250 })
251
252 ;; Return 1 if op is a HTM specific SPR register.
253 (define_predicate "htm_spr_reg_operand"
254 (match_operand 0 "register_operand")
255 {
256 if (!TARGET_HTM)
257 return 0;
258
259 if (GET_CODE (op) == SUBREG)
260 op = SUBREG_REG (op);
261
262 if (!REG_P (op))
263 return 0;
264
265 switch (REGNO (op))
266 {
267 case TFHAR_REGNO:
268 case TFIAR_REGNO:
269 case TEXASR_REGNO:
270 return 1;
271 default:
272 break;
273 }
274
275 /* Unknown SPR. */
276 return 0;
277 })
278
279 ;; Return 1 if op is a general purpose register that is an even register
280 ;; which suitable for a load/store quad operation
281 (define_predicate "quad_int_reg_operand"
282 (match_operand 0 "register_operand")
283 {
284 HOST_WIDE_INT r;
285
286 if (!TARGET_QUAD_MEMORY && !TARGET_QUAD_MEMORY_ATOMIC)
287 return 0;
288
289 if (GET_CODE (op) == SUBREG)
290 op = SUBREG_REG (op);
291
292 if (!REG_P (op))
293 return 0;
294
295 r = REGNO (op);
296 if (r >= FIRST_PSEUDO_REGISTER)
297 return 1;
298
299 return (INT_REGNO_P (r) && ((r & 1) == 0));
300 })
301
302 ;; Return 1 if op is a register that is a condition register field.
303 (define_predicate "cc_reg_operand"
304 (match_operand 0 "register_operand")
305 {
306 if (GET_CODE (op) == SUBREG)
307 op = SUBREG_REG (op);
308
309 if (!REG_P (op))
310 return 0;
311
312 if (REGNO (op) > LAST_VIRTUAL_REGISTER)
313 return 1;
314
315 return CR_REGNO_P (REGNO (op));
316 })
317
318 ;; Return 1 if op is a register that is a condition register field not cr0.
319 (define_predicate "cc_reg_not_cr0_operand"
320 (match_operand 0 "register_operand")
321 {
322 if (GET_CODE (op) == SUBREG)
323 op = SUBREG_REG (op);
324
325 if (!REG_P (op))
326 return 0;
327
328 if (REGNO (op) > LAST_VIRTUAL_REGISTER)
329 return 1;
330
331 return CR_REGNO_NOT_CR0_P (REGNO (op));
332 })
333
334 ;; Return 1 if op is a register that is a condition register field and if generating microcode, not cr0.
335 (define_predicate "cc_reg_not_micro_cr0_operand"
336 (match_operand 0 "register_operand")
337 {
338 if (GET_CODE (op) == SUBREG)
339 op = SUBREG_REG (op);
340
341 if (!REG_P (op))
342 return 0;
343
344 if (REGNO (op) > LAST_VIRTUAL_REGISTER)
345 return 1;
346
347 if (rs6000_gen_cell_microcode)
348 return CR_REGNO_NOT_CR0_P (REGNO (op));
349 else
350 return CR_REGNO_P (REGNO (op));
351 })
352
353 ;; Return 1 if op is a constant integer valid for D field
354 ;; or non-special register register.
355 (define_predicate "reg_or_short_operand"
356 (if_then_else (match_code "const_int")
357 (match_operand 0 "short_cint_operand")
358 (match_operand 0 "gpc_reg_operand")))
359
360 ;; Return 1 if op is a constant integer valid whose negation is valid for
361 ;; D field or non-special register register.
362 ;; Do not allow a constant zero because all patterns that call this
363 ;; predicate use "addic r1,r2,-const" to set carry when r2 is greater than
364 ;; or equal to const, which does not work for zero.
365 (define_predicate "reg_or_neg_short_operand"
366 (if_then_else (match_code "const_int")
367 (match_test "satisfies_constraint_P (op)
368 && INTVAL (op) != 0")
369 (match_operand 0 "gpc_reg_operand")))
370
371 ;; Return 1 if op is a constant integer valid for DS field
372 ;; or non-special register.
373 (define_predicate "reg_or_aligned_short_operand"
374 (if_then_else (match_code "const_int")
375 (and (match_operand 0 "short_cint_operand")
376 (match_test "!(INTVAL (op) & 3)"))
377 (match_operand 0 "gpc_reg_operand")))
378
379 ;; Return 1 if op is a constant integer whose high-order 16 bits are zero
380 ;; or non-special register.
381 (define_predicate "reg_or_u_short_operand"
382 (if_then_else (match_code "const_int")
383 (match_operand 0 "u_short_cint_operand")
384 (match_operand 0 "gpc_reg_operand")))
385
386 ;; Return 1 if op is any constant integer
387 ;; or non-special register.
388 (define_predicate "reg_or_cint_operand"
389 (ior (match_code "const_int")
390 (match_operand 0 "gpc_reg_operand")))
391
392 ;; Return 1 if op is a constant integer valid for addition with addis, addi.
393 (define_predicate "add_cint_operand"
394 (and (match_code "const_int")
395 (match_test "((unsigned HOST_WIDE_INT) INTVAL (op)
396 + (mode == SImode ? 0x80000000 : 0x80008000))
397 < (unsigned HOST_WIDE_INT) 0x100000000ll")))
398
399 ;; Return 1 if op is a constant integer valid for addition
400 ;; or non-special register.
401 (define_predicate "reg_or_add_cint_operand"
402 (if_then_else (match_code "const_int")
403 (match_operand 0 "add_cint_operand")
404 (match_operand 0 "gpc_reg_operand")))
405
406 ;; Return 1 if op is a constant integer valid for subtraction
407 ;; or non-special register.
408 (define_predicate "reg_or_sub_cint_operand"
409 (if_then_else (match_code "const_int")
410 (match_test "(unsigned HOST_WIDE_INT)
411 (- UINTVAL (op) + (mode == SImode ? 0x80000000 : 0x80008000))
412 < (unsigned HOST_WIDE_INT) 0x100000000ll")
413 (match_operand 0 "gpc_reg_operand")))
414
415 ;; Return 1 if op is any 32-bit unsigned constant integer
416 ;; or non-special register.
417 (define_predicate "reg_or_logical_cint_operand"
418 (if_then_else (match_code "const_int")
419 (match_test "(GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
420 && INTVAL (op) >= 0)
421 || ((INTVAL (op) & GET_MODE_MASK (mode)
422 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0)")
423 (match_operand 0 "gpc_reg_operand")))
424
425 ;; Like reg_or_logical_cint_operand, but allow vsx registers
426 (define_predicate "vsx_reg_or_cint_operand"
427 (ior (match_operand 0 "vsx_register_operand")
428 (match_operand 0 "reg_or_logical_cint_operand")))
429
430 ;; Return 1 if operand is a CONST_DOUBLE that can be set in a register
431 ;; with no more than one instruction per word.
432 (define_predicate "easy_fp_constant"
433 (match_code "const_double")
434 {
435 long k[4];
436 REAL_VALUE_TYPE rv;
437
438 if (GET_MODE (op) != mode
439 || (!SCALAR_FLOAT_MODE_P (mode) && mode != DImode))
440 return 0;
441
442 /* Consider all constants with -msoft-float to be easy. */
443 if ((TARGET_SOFT_FLOAT || TARGET_E500_SINGLE
444 || (TARGET_HARD_FLOAT && (TARGET_SINGLE_FLOAT && ! TARGET_DOUBLE_FLOAT)))
445 && mode != DImode)
446 return 1;
447
448 /* The constant 0.0 is easy under VSX. */
449 if ((mode == SFmode || mode == DFmode || mode == SDmode || mode == DDmode)
450 && VECTOR_UNIT_VSX_P (DFmode) && op == CONST0_RTX (mode))
451 return 1;
452
453 if (DECIMAL_FLOAT_MODE_P (mode))
454 return 0;
455
456 /* If we are using V.4 style PIC, consider all constants to be hard. */
457 if (flag_pic && DEFAULT_ABI == ABI_V4)
458 return 0;
459
460 #ifdef TARGET_RELOCATABLE
461 /* Similarly if we are using -mrelocatable, consider all constants
462 to be hard. */
463 if (TARGET_RELOCATABLE)
464 return 0;
465 #endif
466
467 switch (mode)
468 {
469 case TFmode:
470 if (TARGET_E500_DOUBLE)
471 return 0;
472
473 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
474 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
475
476 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
477 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
478 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
479 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
480
481 case DFmode:
482 /* Force constants to memory before reload to utilize
483 compress_float_constant.
484 Avoid this when flag_unsafe_math_optimizations is enabled
485 because RDIV division to reciprocal optimization is not able
486 to regenerate the division. */
487 if (TARGET_E500_DOUBLE
488 || (!reload_in_progress && !reload_completed
489 && !flag_unsafe_math_optimizations))
490 return 0;
491
492 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
493 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
494
495 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
496 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
497
498 case SFmode:
499 /* Force constants to memory before reload to utilize
500 compress_float_constant.
501 Avoid this when flag_unsafe_math_optimizations is enabled
502 because RDIV division to reciprocal optimization is not able
503 to regenerate the division. */
504 if (!reload_in_progress && !reload_completed
505 && !flag_unsafe_math_optimizations)
506 return 0;
507
508 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
509 REAL_VALUE_TO_TARGET_SINGLE (rv, k[0]);
510
511 return num_insns_constant_wide (k[0]) == 1;
512
513 case DImode:
514 return (num_insns_constant (op, DImode) <= 2);
515
516 case SImode:
517 return 1;
518
519 default:
520 gcc_unreachable ();
521 }
522 })
523
524 ;; Return 1 if the operand must be loaded from memory. This is used by a
525 ;; define_split to insure constants get pushed to the constant pool before
526 ;; reload. If -ffast-math is used, easy_fp_constant will allow move insns to
527 ;; have constants in order not interfere with reciprocal estimation. However,
528 ;; with -mupper-regs support, these constants must be moved to the constant
529 ;; pool before register allocation.
530
531 (define_predicate "memory_fp_constant"
532 (match_code "const_double")
533 {
534 if (TARGET_VSX && op == CONST0_RTX (mode))
535 return 0;
536
537 if (!TARGET_HARD_FLOAT || !TARGET_FPRS
538 || (mode == SFmode && !TARGET_SINGLE_FLOAT)
539 || (mode == DFmode && !TARGET_DOUBLE_FLOAT))
540 return 0;
541
542 return 1;
543 })
544
545 ;; Return 1 if the operand is a CONST_VECTOR and can be loaded into a
546 ;; vector register without using memory.
547 (define_predicate "easy_vector_constant"
548 (match_code "const_vector")
549 {
550 /* As the paired vectors are actually FPRs it seems that there is
551 no easy way to load a CONST_VECTOR without using memory. */
552 if (TARGET_PAIRED_FLOAT)
553 return false;
554
555 if (VECTOR_MEM_ALTIVEC_OR_VSX_P (mode))
556 {
557 if (zero_constant (op, mode))
558 return true;
559
560 return easy_altivec_constant (op, mode);
561 }
562
563 if (SPE_VECTOR_MODE (mode))
564 {
565 int cst, cst2;
566 if (zero_constant (op, mode))
567 return true;
568 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
569 return false;
570
571 /* Limit SPE vectors to 15 bits signed. These we can generate with:
572 li r0, CONSTANT1
573 evmergelo r0, r0, r0
574 li r0, CONSTANT2
575
576 I don't know how efficient it would be to allow bigger constants,
577 considering we'll have an extra 'ori' for every 'li'. I doubt 5
578 instructions is better than a 64-bit memory load, but I don't
579 have the e500 timing specs. */
580 if (mode == V2SImode)
581 {
582 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
583 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
584 return cst >= -0x7fff && cst <= 0x7fff
585 && cst2 >= -0x7fff && cst2 <= 0x7fff;
586 }
587 }
588
589 return false;
590 })
591
592 ;; Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF.
593 (define_predicate "easy_vector_constant_add_self"
594 (and (match_code "const_vector")
595 (and (match_test "TARGET_ALTIVEC")
596 (match_test "easy_altivec_constant (op, mode)")))
597 {
598 HOST_WIDE_INT val;
599 int elt;
600 if (mode == V2DImode || mode == V2DFmode)
601 return 0;
602 elt = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (mode) - 1 : 0;
603 val = const_vector_elt_as_int (op, elt);
604 val = ((val & 0xff) ^ 0x80) - 0x80;
605 return EASY_VECTOR_15_ADD_SELF (val);
606 })
607
608 ;; Same as easy_vector_constant but only for EASY_VECTOR_MSB.
609 (define_predicate "easy_vector_constant_msb"
610 (and (match_code "const_vector")
611 (and (match_test "TARGET_ALTIVEC")
612 (match_test "easy_altivec_constant (op, mode)")))
613 {
614 HOST_WIDE_INT val;
615 int elt;
616 if (mode == V2DImode || mode == V2DFmode)
617 return 0;
618 elt = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (mode) - 1 : 0;
619 val = const_vector_elt_as_int (op, elt);
620 return EASY_VECTOR_MSB (val, GET_MODE_INNER (mode));
621 })
622
623 ;; Return 1 if operand is constant zero (scalars and vectors).
624 (define_predicate "zero_constant"
625 (and (match_code "const_int,const_double,const_wide_int,const_vector")
626 (match_test "op == CONST0_RTX (mode)")))
627
628 ;; Return 1 if operand is 0.0.
629 (define_predicate "zero_fp_constant"
630 (and (match_code "const_double")
631 (match_test "SCALAR_FLOAT_MODE_P (mode)
632 && op == CONST0_RTX (mode)")))
633
634 ;; Return 1 if the operand is in volatile memory. Note that during the
635 ;; RTL generation phase, memory_operand does not return TRUE for volatile
636 ;; memory references. So this function allows us to recognize volatile
637 ;; references where it's safe.
638 (define_predicate "volatile_mem_operand"
639 (and (and (match_code "mem")
640 (match_test "MEM_VOLATILE_P (op)"))
641 (if_then_else (match_test "reload_completed")
642 (match_operand 0 "memory_operand")
643 (if_then_else (match_test "reload_in_progress")
644 (match_test "strict_memory_address_p (mode, XEXP (op, 0))")
645 (match_test "memory_address_p (mode, XEXP (op, 0))")))))
646
647 ;; Return 1 if the operand is an offsettable memory operand.
648 (define_predicate "offsettable_mem_operand"
649 (and (match_operand 0 "memory_operand")
650 (match_test "offsettable_nonstrict_memref_p (op)")))
651
652 ;; Return 1 if the operand is suitable for load/store quad memory.
653 ;; This predicate only checks for non-atomic loads/stores (not lqarx/stqcx).
654 (define_predicate "quad_memory_operand"
655 (match_code "mem")
656 {
657 rtx addr, op0, op1;
658 int ret;
659
660 if (!TARGET_QUAD_MEMORY && !TARGET_SYNC_TI)
661 ret = 0;
662
663 else if (!memory_operand (op, mode))
664 ret = 0;
665
666 else if (GET_MODE_SIZE (GET_MODE (op)) != 16)
667 ret = 0;
668
669 else if (MEM_ALIGN (op) < 128)
670 ret = 0;
671
672 else
673 {
674 addr = XEXP (op, 0);
675 if (int_reg_operand (addr, Pmode))
676 ret = 1;
677
678 else if (GET_CODE (addr) != PLUS)
679 ret = 0;
680
681 else
682 {
683 op0 = XEXP (addr, 0);
684 op1 = XEXP (addr, 1);
685 ret = (int_reg_operand (op0, Pmode)
686 && GET_CODE (op1) == CONST_INT
687 && IN_RANGE (INTVAL (op1), -32768, 32767)
688 && (INTVAL (op1) & 15) == 0);
689 }
690 }
691
692 if (TARGET_DEBUG_ADDR)
693 {
694 fprintf (stderr, "\nquad_memory_operand, ret = %s\n", ret ? "true" : "false");
695 debug_rtx (op);
696 }
697
698 return ret;
699 })
700
701 ;; Return 1 if the operand is an indexed or indirect memory operand.
702 (define_predicate "indexed_or_indirect_operand"
703 (match_code "mem")
704 {
705 op = XEXP (op, 0);
706 if (VECTOR_MEM_ALTIVEC_P (mode)
707 && GET_CODE (op) == AND
708 && GET_CODE (XEXP (op, 1)) == CONST_INT
709 && INTVAL (XEXP (op, 1)) == -16)
710 op = XEXP (op, 0);
711
712 return indexed_or_indirect_address (op, mode);
713 })
714
715 ;; Like indexed_or_indirect_operand, but also allow a GPR register if direct
716 ;; moves are supported.
717 (define_predicate "reg_or_indexed_operand"
718 (match_code "mem,reg")
719 {
720 if (MEM_P (op))
721 return indexed_or_indirect_operand (op, mode);
722 else if (TARGET_DIRECT_MOVE)
723 return register_operand (op, mode);
724 return
725 0;
726 })
727
728 ;; Return 1 if the operand is an indexed or indirect memory operand with an
729 ;; AND -16 in it, used to recognize when we need to switch to Altivec loads
730 ;; to realign loops instead of VSX (altivec silently ignores the bottom bits,
731 ;; while VSX uses the full address and traps)
732 (define_predicate "altivec_indexed_or_indirect_operand"
733 (match_code "mem")
734 {
735 op = XEXP (op, 0);
736 if (VECTOR_MEM_ALTIVEC_OR_VSX_P (mode)
737 && GET_CODE (op) == AND
738 && GET_CODE (XEXP (op, 1)) == CONST_INT
739 && INTVAL (XEXP (op, 1)) == -16)
740 return indexed_or_indirect_address (XEXP (op, 0), mode);
741
742 return 0;
743 })
744
745 ;; Return 1 if the operand is an indexed or indirect address.
746 (define_special_predicate "indexed_or_indirect_address"
747 (and (match_test "REG_P (op)
748 || (GET_CODE (op) == PLUS
749 /* Omit testing REG_P (XEXP (op, 0)). */
750 && REG_P (XEXP (op, 1)))")
751 (match_operand 0 "address_operand")))
752
753 ;; Return 1 if the operand is an index-form address.
754 (define_special_predicate "indexed_address"
755 (match_test "(GET_CODE (op) == PLUS
756 && REG_P (XEXP (op, 0))
757 && REG_P (XEXP (op, 1)))"))
758
759 ;; Return 1 if the operand is a MEM with an update-form address. This may
760 ;; also include update-indexed form.
761 (define_special_predicate "update_address_mem"
762 (match_test "(MEM_P (op)
763 && (GET_CODE (XEXP (op, 0)) == PRE_INC
764 || GET_CODE (XEXP (op, 0)) == PRE_DEC
765 || GET_CODE (XEXP (op, 0)) == PRE_MODIFY))"))
766
767 ;; Return 1 if the operand is a MEM with an indexed-form address.
768 (define_special_predicate "indexed_address_mem"
769 (match_test "(MEM_P (op)
770 && (indexed_address (XEXP (op, 0), mode)
771 || (GET_CODE (XEXP (op, 0)) == PRE_MODIFY
772 && indexed_address (XEXP (XEXP (op, 0), 1), mode))))"))
773
774 ;; Used for the destination of the fix_truncdfsi2 expander.
775 ;; If stfiwx will be used, the result goes to memory; otherwise,
776 ;; we're going to emit a store and a load of a subreg, so the dest is a
777 ;; register.
778 (define_predicate "fix_trunc_dest_operand"
779 (if_then_else (match_test "! TARGET_E500_DOUBLE && TARGET_PPC_GFXOPT")
780 (match_operand 0 "memory_operand")
781 (match_operand 0 "gpc_reg_operand")))
782
783 ;; Return 1 if the operand is either a non-special register or can be used
784 ;; as the operand of a `mode' add insn.
785 (define_predicate "add_operand"
786 (if_then_else (match_code "const_int")
787 (match_test "satisfies_constraint_I (op)
788 || satisfies_constraint_L (op)")
789 (match_operand 0 "gpc_reg_operand")))
790
791 ;; Return 1 if the operand is either a non-special register, or 0, or -1.
792 (define_predicate "adde_operand"
793 (if_then_else (match_code "const_int")
794 (match_test "INTVAL (op) == 0 || INTVAL (op) == -1")
795 (match_operand 0 "gpc_reg_operand")))
796
797 ;; Return 1 if OP is a constant but not a valid add_operand.
798 (define_predicate "non_add_cint_operand"
799 (and (match_code "const_int")
800 (match_test "!satisfies_constraint_I (op)
801 && !satisfies_constraint_L (op)")))
802
803 ;; Return 1 if the operand is a constant that can be used as the operand
804 ;; of an OR or XOR.
805 (define_predicate "logical_const_operand"
806 (match_code "const_int")
807 {
808 HOST_WIDE_INT opl;
809
810 opl = INTVAL (op) & GET_MODE_MASK (mode);
811
812 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
813 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
814 })
815
816 ;; Return 1 if the operand is a non-special register or a constant that
817 ;; can be used as the operand of an OR or XOR.
818 (define_predicate "logical_operand"
819 (ior (match_operand 0 "gpc_reg_operand")
820 (match_operand 0 "logical_const_operand")))
821
822 ;; Return 1 if op is a constant that is not a logical operand, but could
823 ;; be split into one.
824 (define_predicate "non_logical_cint_operand"
825 (and (match_code "const_int,const_wide_int")
826 (and (not (match_operand 0 "logical_operand"))
827 (match_operand 0 "reg_or_logical_cint_operand"))))
828
829 ;; Return 1 if op is a constant that can be encoded in a 32-bit mask,
830 ;; suitable for use with rlwinm (no more than two 1->0 or 0->1
831 ;; transitions). Reject all ones and all zeros, since these should have
832 ;; been optimized away and confuse the making of MB and ME.
833 (define_predicate "mask_operand"
834 (match_code "const_int")
835 {
836 unsigned HOST_WIDE_INT c, lsb;
837
838 c = INTVAL (op);
839
840 if (TARGET_POWERPC64)
841 {
842 /* Fail if the mask is not 32-bit. */
843 if (mode == DImode && (c & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0)
844 return 0;
845
846 /* Fail if the mask wraps around because the upper 32-bits of the
847 mask will all be 1s, contrary to GCC's internal view. */
848 if ((c & 0x80000001) == 0x80000001)
849 return 0;
850 }
851
852 /* We don't change the number of transitions by inverting,
853 so make sure we start with the LS bit zero. */
854 if (c & 1)
855 c = ~c;
856
857 /* Reject all zeros or all ones. */
858 if (c == 0)
859 return 0;
860
861 /* Find the first transition. */
862 lsb = c & -c;
863
864 /* Invert to look for a second transition. */
865 c = ~c;
866
867 /* Erase first transition. */
868 c &= -lsb;
869
870 /* Find the second transition (if any). */
871 lsb = c & -c;
872
873 /* Match if all the bits above are 1's (or c is zero). */
874 return c == -lsb;
875 })
876
877 ;; Return 1 for the PowerPC64 rlwinm corner case.
878 (define_predicate "mask_operand_wrap"
879 (match_code "const_int")
880 {
881 unsigned HOST_WIDE_INT c, lsb;
882
883 c = INTVAL (op);
884
885 if ((c & 0x80000001) != 0x80000001)
886 return 0;
887
888 c = ~c;
889 if (c == 0)
890 return 0;
891
892 lsb = c & -c;
893 c = ~c;
894 c &= -lsb;
895 lsb = c & -c;
896 return c == -lsb;
897 })
898
899 ;; Return 1 if the operand is a constant that is a PowerPC64 mask
900 ;; suitable for use with rldicl or rldicr (no more than one 1->0 or 0->1
901 ;; transition). Reject all zeros, since zero should have been
902 ;; optimized away and confuses the making of MB and ME.
903 (define_predicate "mask64_operand"
904 (match_code "const_int")
905 {
906 unsigned HOST_WIDE_INT c, lsb;
907
908 c = INTVAL (op);
909
910 /* Reject all zeros. */
911 if (c == 0)
912 return 0;
913
914 /* We don't change the number of transitions by inverting,
915 so make sure we start with the LS bit zero. */
916 if (c & 1)
917 c = ~c;
918
919 /* Find the first transition. */
920 lsb = c & -c;
921
922 /* Match if all the bits above are 1's (or c is zero). */
923 return c == -lsb;
924 })
925
926 ;; Like mask64_operand, but allow up to three transitions. This
927 ;; predicate is used by insn patterns that generate two rldicl or
928 ;; rldicr machine insns.
929 (define_predicate "mask64_2_operand"
930 (match_code "const_int")
931 {
932 unsigned HOST_WIDE_INT c, lsb;
933
934 c = INTVAL (op);
935
936 /* Disallow all zeros. */
937 if (c == 0)
938 return 0;
939
940 /* We don't change the number of transitions by inverting,
941 so make sure we start with the LS bit zero. */
942 if (c & 1)
943 c = ~c;
944
945 /* Find the first transition. */
946 lsb = c & -c;
947
948 /* Invert to look for a second transition. */
949 c = ~c;
950
951 /* Erase first transition. */
952 c &= -lsb;
953
954 /* Find the second transition. */
955 lsb = c & -c;
956
957 /* Invert to look for a third transition. */
958 c = ~c;
959
960 /* Erase second transition. */
961 c &= -lsb;
962
963 /* Find the third transition (if any). */
964 lsb = c & -c;
965
966 /* Match if all the bits above are 1's (or c is zero). */
967 return c == -lsb;
968 })
969
970 ;; Match a mask_operand or a mask64_operand.
971 (define_predicate "any_mask_operand"
972 (ior (match_operand 0 "mask_operand")
973 (and (match_test "TARGET_POWERPC64 && mode == DImode")
974 (match_operand 0 "mask64_operand"))))
975
976 ;; Like and_operand, but also match constants that can be implemented
977 ;; with two rldicl or rldicr insns.
978 (define_predicate "and64_2_operand"
979 (ior (match_operand 0 "mask64_2_operand")
980 (if_then_else (match_test "fixed_regs[CR0_REGNO]")
981 (match_operand 0 "gpc_reg_operand")
982 (match_operand 0 "logical_operand"))))
983
984 ;; Return 1 if the operand is either a non-special register or a
985 ;; constant that can be used as the operand of a logical AND.
986 (define_predicate "and_operand"
987 (ior (match_operand 0 "mask_operand")
988 (and (match_test "TARGET_POWERPC64 && mode == DImode")
989 (match_operand 0 "mask64_operand"))
990 (if_then_else (match_test "fixed_regs[CR0_REGNO]")
991 (match_operand 0 "gpc_reg_operand")
992 (match_operand 0 "logical_operand"))))
993
994 ;; Return 1 if the operand is a constant that can be used as the operand
995 ;; of a logical AND, implemented with two rld* insns, and it cannot be done
996 ;; using just one insn.
997 (define_predicate "and_2rld_operand"
998 (and (match_operand 0 "and64_2_operand")
999 (not (match_operand 0 "and_operand"))))
1000
1001 ;; Return 1 if the operand is either a logical operand or a short cint operand.
1002 (define_predicate "scc_eq_operand"
1003 (ior (match_operand 0 "logical_operand")
1004 (match_operand 0 "short_cint_operand")))
1005
1006 ;; Return 1 if the operand is a general non-special register or memory operand.
1007 (define_predicate "reg_or_mem_operand"
1008 (ior (match_operand 0 "memory_operand")
1009 (ior (and (match_code "mem")
1010 (match_test "macho_lo_sum_memory_operand (op, mode)"))
1011 (ior (match_operand 0 "volatile_mem_operand")
1012 (match_operand 0 "gpc_reg_operand")))))
1013
1014 ;; Return 1 if the operand is either an easy FP constant or memory or reg.
1015 (define_predicate "reg_or_none500mem_operand"
1016 (if_then_else (match_code "mem")
1017 (and (match_test "!TARGET_E500_DOUBLE")
1018 (ior (match_operand 0 "memory_operand")
1019 (ior (match_test "macho_lo_sum_memory_operand (op, mode)")
1020 (match_operand 0 "volatile_mem_operand"))))
1021 (match_operand 0 "gpc_reg_operand")))
1022
1023 ;; Return 1 if the operand is CONST_DOUBLE 0, register or memory operand.
1024 (define_predicate "zero_reg_mem_operand"
1025 (ior (match_operand 0 "zero_fp_constant")
1026 (match_operand 0 "reg_or_mem_operand")))
1027
1028 ;; Return 1 if the operand is a CONST_INT and it is the element for 64-bit
1029 ;; data types inside of a vector that scalar instructions operate on
1030 (define_predicate "vsx_scalar_64bit"
1031 (match_code "const_int")
1032 {
1033 return (INTVAL (op) == VECTOR_ELEMENT_SCALAR_64BIT);
1034 })
1035
1036 ;; Return 1 if the operand is a general register or memory operand without
1037 ;; pre_inc or pre_dec or pre_modify, which produces invalid form of PowerPC
1038 ;; lwa instruction.
1039 (define_predicate "lwa_operand"
1040 (match_code "reg,subreg,mem")
1041 {
1042 rtx inner, addr, offset;
1043
1044 inner = op;
1045 if (reload_completed && GET_CODE (inner) == SUBREG)
1046 inner = SUBREG_REG (inner);
1047
1048 if (gpc_reg_operand (inner, mode))
1049 return true;
1050 if (!memory_operand (inner, mode))
1051 return false;
1052 if (!rs6000_gen_cell_microcode)
1053 return false;
1054
1055 addr = XEXP (inner, 0);
1056 if (GET_CODE (addr) == PRE_INC
1057 || GET_CODE (addr) == PRE_DEC
1058 || (GET_CODE (addr) == PRE_MODIFY
1059 && !legitimate_indexed_address_p (XEXP (addr, 1), 0)))
1060 return false;
1061 if (GET_CODE (addr) == LO_SUM
1062 && GET_CODE (XEXP (addr, 0)) == REG
1063 && GET_CODE (XEXP (addr, 1)) == CONST)
1064 addr = XEXP (XEXP (addr, 1), 0);
1065 if (GET_CODE (addr) != PLUS)
1066 return true;
1067 offset = XEXP (addr, 1);
1068 if (GET_CODE (offset) != CONST_INT)
1069 return true;
1070 return INTVAL (offset) % 4 == 0;
1071 })
1072
1073 ;; Return 1 if the operand, used inside a MEM, is a SYMBOL_REF.
1074 (define_predicate "symbol_ref_operand"
1075 (and (match_code "symbol_ref")
1076 (match_test "(mode == VOIDmode || GET_MODE (op) == mode)
1077 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))")))
1078
1079 ;; Return 1 if op is an operand that can be loaded via the GOT.
1080 ;; or non-special register register field no cr0
1081 (define_predicate "got_operand"
1082 (match_code "symbol_ref,const,label_ref"))
1083
1084 ;; Return 1 if op is a simple reference that can be loaded via the GOT,
1085 ;; excluding labels involving addition.
1086 (define_predicate "got_no_const_operand"
1087 (match_code "symbol_ref,label_ref"))
1088
1089 ;; Return 1 if op is a SYMBOL_REF for a TLS symbol.
1090 (define_predicate "rs6000_tls_symbol_ref"
1091 (and (match_code "symbol_ref")
1092 (match_test "RS6000_SYMBOL_REF_TLS_P (op)")))
1093
1094 ;; Return 1 if the operand, used inside a MEM, is a valid first argument
1095 ;; to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR.
1096 (define_predicate "call_operand"
1097 (if_then_else (match_code "reg")
1098 (match_test "REGNO (op) == LR_REGNO
1099 || REGNO (op) == CTR_REGNO
1100 || REGNO (op) >= FIRST_PSEUDO_REGISTER")
1101 (match_code "symbol_ref")))
1102
1103 ;; Return 1 if the operand is a SYMBOL_REF for a function known to be in
1104 ;; this file.
1105 (define_predicate "current_file_function_operand"
1106 (and (match_code "symbol_ref")
1107 (match_test "(DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
1108 && ((SYMBOL_REF_LOCAL_P (op)
1109 && ((DEFAULT_ABI != ABI_AIX
1110 && DEFAULT_ABI != ABI_ELFv2)
1111 || !SYMBOL_REF_EXTERNAL_P (op)))
1112 || (op == XEXP (DECL_RTL (current_function_decl),
1113 0)))")))
1114
1115 ;; Return 1 if this operand is a valid input for a move insn.
1116 (define_predicate "input_operand"
1117 (match_code "symbol_ref,const,reg,subreg,mem,
1118 const_double,const_wide_int,const_vector,const_int")
1119 {
1120 /* Memory is always valid. */
1121 if (memory_operand (op, mode))
1122 return 1;
1123
1124 /* For floating-point, easy constants are valid. */
1125 if (SCALAR_FLOAT_MODE_P (mode)
1126 && easy_fp_constant (op, mode))
1127 return 1;
1128
1129 /* Allow any integer constant. */
1130 if (GET_MODE_CLASS (mode) == MODE_INT
1131 && CONST_SCALAR_INT_P (op))
1132 return 1;
1133
1134 /* Allow easy vector constants. */
1135 if (GET_CODE (op) == CONST_VECTOR
1136 && easy_vector_constant (op, mode))
1137 return 1;
1138
1139 /* Do not allow invalid E500 subregs. */
1140 if ((TARGET_E500_DOUBLE || TARGET_SPE)
1141 && GET_CODE (op) == SUBREG
1142 && invalid_e500_subreg (op, mode))
1143 return 0;
1144
1145 /* For floating-point or multi-word mode, the only remaining valid type
1146 is a register. */
1147 if (SCALAR_FLOAT_MODE_P (mode)
1148 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1149 return register_operand (op, mode);
1150
1151 /* We don't allow moving the carry bit around. */
1152 if (ca_operand (op, mode))
1153 return 0;
1154
1155 /* The only cases left are integral modes one word or smaller (we
1156 do not get called for MODE_CC values). These can be in any
1157 register. */
1158 if (register_operand (op, mode))
1159 return 1;
1160
1161 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1162 to be valid. */
1163 if (DEFAULT_ABI == ABI_V4
1164 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1165 && small_data_operand (op, Pmode))
1166 return 1;
1167
1168 return 0;
1169 })
1170
1171 ;; Return 1 if this operand is a valid input for a vsx_splat insn.
1172 (define_predicate "splat_input_operand"
1173 (match_code "symbol_ref,const,reg,subreg,mem,
1174 const_double,const_wide_int,const_vector,const_int")
1175 {
1176 if (MEM_P (op))
1177 {
1178 if (! volatile_ok && MEM_VOLATILE_P (op))
1179 return 0;
1180 if (mode == DFmode)
1181 mode = V2DFmode;
1182 else if (mode == DImode)
1183 mode = V2DImode;
1184 else
1185 gcc_unreachable ();
1186 return memory_address_addr_space_p (mode, XEXP (op, 0),
1187 MEM_ADDR_SPACE (op));
1188 }
1189 return input_operand (op, mode);
1190 })
1191
1192 ;; Return true if OP is a non-immediate operand and not an invalid
1193 ;; SUBREG operation on the e500.
1194 (define_predicate "rs6000_nonimmediate_operand"
1195 (match_code "reg,subreg,mem")
1196 {
1197 if ((TARGET_E500_DOUBLE || TARGET_SPE)
1198 && GET_CODE (op) == SUBREG
1199 && invalid_e500_subreg (op, mode))
1200 return 0;
1201
1202 return nonimmediate_operand (op, mode);
1203 })
1204
1205 ;; Return true if operand is boolean operator.
1206 (define_predicate "boolean_operator"
1207 (match_code "and,ior,xor"))
1208
1209 ;; Return true if operand is OR-form of boolean operator.
1210 (define_predicate "boolean_or_operator"
1211 (match_code "ior,xor"))
1212
1213 ;; Return true if operand is an equality operator.
1214 (define_special_predicate "equality_operator"
1215 (match_code "eq,ne"))
1216
1217 ;; Return true if operand is MIN or MAX operator.
1218 (define_predicate "min_max_operator"
1219 (match_code "smin,smax,umin,umax"))
1220
1221 ;; Return 1 if OP is a comparison operation that is valid for a branch
1222 ;; instruction. We check the opcode against the mode of the CC value.
1223 ;; validate_condition_mode is an assertion.
1224 (define_predicate "branch_comparison_operator"
1225 (and (match_operand 0 "comparison_operator")
1226 (and (match_test "GET_MODE_CLASS (GET_MODE (XEXP (op, 0))) == MODE_CC")
1227 (match_test "validate_condition_mode (GET_CODE (op),
1228 GET_MODE (XEXP (op, 0))),
1229 1"))))
1230
1231 ;; Return 1 if OP is a valid comparison operator for "cbranch" instructions.
1232 ;; If we're assuming that FP operations cannot generate user-visible traps,
1233 ;; then on e500 we can use the ordered-signaling instructions to implement
1234 ;; the unordered-quiet FP comparison predicates modulo a reversal.
1235 (define_predicate "rs6000_cbranch_operator"
1236 (if_then_else (match_test "TARGET_HARD_FLOAT && !TARGET_FPRS")
1237 (if_then_else (match_test "flag_trapping_math")
1238 (match_operand 0 "ordered_comparison_operator")
1239 (ior (match_operand 0 "ordered_comparison_operator")
1240 (match_code ("unlt,unle,ungt,unge"))))
1241 (match_operand 0 "comparison_operator")))
1242
1243 ;; Return 1 if OP is an unsigned comparison operator.
1244 (define_predicate "unsigned_comparison_operator"
1245 (match_code "ltu,gtu,leu,geu"))
1246
1247 ;; Return 1 if OP is a signed comparison operator.
1248 (define_predicate "signed_comparison_operator"
1249 (match_code "lt,gt,le,ge"))
1250
1251 ;; Return 1 if OP is a comparison operation that is valid for an SCC insn --
1252 ;; it must be a positive comparison.
1253 (define_predicate "scc_comparison_operator"
1254 (and (match_operand 0 "branch_comparison_operator")
1255 (match_code "eq,lt,gt,ltu,gtu,unordered")))
1256
1257 ;; Return 1 if OP is a comparison operation whose inverse would be valid for
1258 ;; an SCC insn.
1259 (define_predicate "scc_rev_comparison_operator"
1260 (and (match_operand 0 "branch_comparison_operator")
1261 (match_code "ne,le,ge,leu,geu,ordered")))
1262
1263 ;; Return 1 if OP is a comparison operation that is valid for a branch
1264 ;; insn, which is true if the corresponding bit in the CC register is set.
1265 (define_predicate "branch_positive_comparison_operator"
1266 (and (match_operand 0 "branch_comparison_operator")
1267 (match_code "eq,lt,gt,ltu,gtu,unordered")))
1268
1269 ;; Return 1 if OP is a load multiple operation, known to be a PARALLEL.
1270 (define_predicate "load_multiple_operation"
1271 (match_code "parallel")
1272 {
1273 int count = XVECLEN (op, 0);
1274 unsigned int dest_regno;
1275 rtx src_addr;
1276 int i;
1277
1278 /* Perform a quick check so we don't blow up below. */
1279 if (count <= 1
1280 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1281 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
1282 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
1283 return 0;
1284
1285 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
1286 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
1287
1288 for (i = 1; i < count; i++)
1289 {
1290 rtx elt = XVECEXP (op, 0, i);
1291
1292 if (GET_CODE (elt) != SET
1293 || GET_CODE (SET_DEST (elt)) != REG
1294 || GET_MODE (SET_DEST (elt)) != SImode
1295 || REGNO (SET_DEST (elt)) != dest_regno + i
1296 || GET_CODE (SET_SRC (elt)) != MEM
1297 || GET_MODE (SET_SRC (elt)) != SImode
1298 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
1299 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
1300 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
1301 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
1302 return 0;
1303 }
1304
1305 return 1;
1306 })
1307
1308 ;; Return 1 if OP is a store multiple operation, known to be a PARALLEL.
1309 ;; The second vector element is a CLOBBER.
1310 (define_predicate "store_multiple_operation"
1311 (match_code "parallel")
1312 {
1313 int count = XVECLEN (op, 0) - 1;
1314 unsigned int src_regno;
1315 rtx dest_addr;
1316 int i;
1317
1318 /* Perform a quick check so we don't blow up below. */
1319 if (count <= 1
1320 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1321 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
1322 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
1323 return 0;
1324
1325 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
1326 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
1327
1328 for (i = 1; i < count; i++)
1329 {
1330 rtx elt = XVECEXP (op, 0, i + 1);
1331
1332 if (GET_CODE (elt) != SET
1333 || GET_CODE (SET_SRC (elt)) != REG
1334 || GET_MODE (SET_SRC (elt)) != SImode
1335 || REGNO (SET_SRC (elt)) != src_regno + i
1336 || GET_CODE (SET_DEST (elt)) != MEM
1337 || GET_MODE (SET_DEST (elt)) != SImode
1338 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
1339 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
1340 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
1341 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
1342 return 0;
1343 }
1344
1345 return 1;
1346 })
1347
1348 ;; Return 1 if OP is valid for a save_world call in prologue, known to be
1349 ;; a PARLLEL.
1350 (define_predicate "save_world_operation"
1351 (match_code "parallel")
1352 {
1353 int index;
1354 int i;
1355 rtx elt;
1356 int count = XVECLEN (op, 0);
1357
1358 if (count != 54)
1359 return 0;
1360
1361 index = 0;
1362 if (GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER
1363 || GET_CODE (XVECEXP (op, 0, index++)) != USE)
1364 return 0;
1365
1366 for (i=1; i <= 18; i++)
1367 {
1368 elt = XVECEXP (op, 0, index++);
1369 if (GET_CODE (elt) != SET
1370 || GET_CODE (SET_DEST (elt)) != MEM
1371 || ! memory_operand (SET_DEST (elt), DFmode)
1372 || GET_CODE (SET_SRC (elt)) != REG
1373 || GET_MODE (SET_SRC (elt)) != DFmode)
1374 return 0;
1375 }
1376
1377 for (i=1; i <= 12; i++)
1378 {
1379 elt = XVECEXP (op, 0, index++);
1380 if (GET_CODE (elt) != SET
1381 || GET_CODE (SET_DEST (elt)) != MEM
1382 || GET_CODE (SET_SRC (elt)) != REG
1383 || GET_MODE (SET_SRC (elt)) != V4SImode)
1384 return 0;
1385 }
1386
1387 for (i=1; i <= 19; i++)
1388 {
1389 elt = XVECEXP (op, 0, index++);
1390 if (GET_CODE (elt) != SET
1391 || GET_CODE (SET_DEST (elt)) != MEM
1392 || ! memory_operand (SET_DEST (elt), Pmode)
1393 || GET_CODE (SET_SRC (elt)) != REG
1394 || GET_MODE (SET_SRC (elt)) != Pmode)
1395 return 0;
1396 }
1397
1398 elt = XVECEXP (op, 0, index++);
1399 if (GET_CODE (elt) != SET
1400 || GET_CODE (SET_DEST (elt)) != MEM
1401 || ! memory_operand (SET_DEST (elt), Pmode)
1402 || GET_CODE (SET_SRC (elt)) != REG
1403 || REGNO (SET_SRC (elt)) != CR2_REGNO
1404 || GET_MODE (SET_SRC (elt)) != Pmode)
1405 return 0;
1406
1407 if (GET_CODE (XVECEXP (op, 0, index++)) != SET
1408 || GET_CODE (XVECEXP (op, 0, index++)) != SET)
1409 return 0;
1410 return 1;
1411 })
1412
1413 ;; Return 1 if OP is valid for a restore_world call in epilogue, known to be
1414 ;; a PARLLEL.
1415 (define_predicate "restore_world_operation"
1416 (match_code "parallel")
1417 {
1418 int index;
1419 int i;
1420 rtx elt;
1421 int count = XVECLEN (op, 0);
1422
1423 if (count != 59)
1424 return 0;
1425
1426 index = 0;
1427 if (GET_CODE (XVECEXP (op, 0, index++)) != RETURN
1428 || GET_CODE (XVECEXP (op, 0, index++)) != USE
1429 || GET_CODE (XVECEXP (op, 0, index++)) != USE
1430 || GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER)
1431 return 0;
1432
1433 elt = XVECEXP (op, 0, index++);
1434 if (GET_CODE (elt) != SET
1435 || GET_CODE (SET_SRC (elt)) != MEM
1436 || ! memory_operand (SET_SRC (elt), Pmode)
1437 || GET_CODE (SET_DEST (elt)) != REG
1438 || REGNO (SET_DEST (elt)) != CR2_REGNO
1439 || GET_MODE (SET_DEST (elt)) != Pmode)
1440 return 0;
1441
1442 for (i=1; i <= 19; i++)
1443 {
1444 elt = XVECEXP (op, 0, index++);
1445 if (GET_CODE (elt) != SET
1446 || GET_CODE (SET_SRC (elt)) != MEM
1447 || ! memory_operand (SET_SRC (elt), Pmode)
1448 || GET_CODE (SET_DEST (elt)) != REG
1449 || GET_MODE (SET_DEST (elt)) != Pmode)
1450 return 0;
1451 }
1452
1453 for (i=1; i <= 12; i++)
1454 {
1455 elt = XVECEXP (op, 0, index++);
1456 if (GET_CODE (elt) != SET
1457 || GET_CODE (SET_SRC (elt)) != MEM
1458 || GET_CODE (SET_DEST (elt)) != REG
1459 || GET_MODE (SET_DEST (elt)) != V4SImode)
1460 return 0;
1461 }
1462
1463 for (i=1; i <= 18; i++)
1464 {
1465 elt = XVECEXP (op, 0, index++);
1466 if (GET_CODE (elt) != SET
1467 || GET_CODE (SET_SRC (elt)) != MEM
1468 || ! memory_operand (SET_SRC (elt), DFmode)
1469 || GET_CODE (SET_DEST (elt)) != REG
1470 || GET_MODE (SET_DEST (elt)) != DFmode)
1471 return 0;
1472 }
1473
1474 if (GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER
1475 || GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER
1476 || GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER
1477 || GET_CODE (XVECEXP (op, 0, index++)) != CLOBBER
1478 || GET_CODE (XVECEXP (op, 0, index++)) != USE)
1479 return 0;
1480 return 1;
1481 })
1482
1483 ;; Return 1 if OP is valid for a vrsave call, known to be a PARALLEL.
1484 (define_predicate "vrsave_operation"
1485 (match_code "parallel")
1486 {
1487 int count = XVECLEN (op, 0);
1488 unsigned int dest_regno, src_regno;
1489 int i;
1490
1491 if (count <= 1
1492 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1493 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
1494 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE
1495 || XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != UNSPECV_SET_VRSAVE)
1496 return 0;
1497
1498 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
1499 src_regno = REGNO (XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 1));
1500
1501 if (dest_regno != VRSAVE_REGNO || src_regno != VRSAVE_REGNO)
1502 return 0;
1503
1504 for (i = 1; i < count; i++)
1505 {
1506 rtx elt = XVECEXP (op, 0, i);
1507
1508 if (GET_CODE (elt) != CLOBBER
1509 && GET_CODE (elt) != SET)
1510 return 0;
1511 }
1512
1513 return 1;
1514 })
1515
1516 ;; Return 1 if OP is valid for mfcr insn, known to be a PARALLEL.
1517 (define_predicate "mfcr_operation"
1518 (match_code "parallel")
1519 {
1520 int count = XVECLEN (op, 0);
1521 int i;
1522
1523 /* Perform a quick check so we don't blow up below. */
1524 if (count < 1
1525 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1526 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
1527 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
1528 return 0;
1529
1530 for (i = 0; i < count; i++)
1531 {
1532 rtx exp = XVECEXP (op, 0, i);
1533 rtx unspec;
1534 int maskval;
1535 rtx src_reg;
1536
1537 src_reg = XVECEXP (SET_SRC (exp), 0, 0);
1538
1539 if (GET_CODE (src_reg) != REG
1540 || GET_MODE (src_reg) != CCmode
1541 || ! CR_REGNO_P (REGNO (src_reg)))
1542 return 0;
1543
1544 if (GET_CODE (exp) != SET
1545 || GET_CODE (SET_DEST (exp)) != REG
1546 || GET_MODE (SET_DEST (exp)) != SImode
1547 || ! INT_REGNO_P (REGNO (SET_DEST (exp))))
1548 return 0;
1549 unspec = SET_SRC (exp);
1550 maskval = 1 << (MAX_CR_REGNO - REGNO (src_reg));
1551
1552 if (GET_CODE (unspec) != UNSPEC
1553 || XINT (unspec, 1) != UNSPEC_MOVESI_FROM_CR
1554 || XVECLEN (unspec, 0) != 2
1555 || XVECEXP (unspec, 0, 0) != src_reg
1556 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
1557 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
1558 return 0;
1559 }
1560 return 1;
1561 })
1562
1563 ;; Return 1 if OP is valid for mtcrf insn, known to be a PARALLEL.
1564 (define_predicate "mtcrf_operation"
1565 (match_code "parallel")
1566 {
1567 int count = XVECLEN (op, 0);
1568 int i;
1569 rtx src_reg;
1570
1571 /* Perform a quick check so we don't blow up below. */
1572 if (count < 1
1573 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1574 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
1575 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
1576 return 0;
1577 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
1578
1579 if (GET_CODE (src_reg) != REG
1580 || GET_MODE (src_reg) != SImode
1581 || ! INT_REGNO_P (REGNO (src_reg)))
1582 return 0;
1583
1584 for (i = 0; i < count; i++)
1585 {
1586 rtx exp = XVECEXP (op, 0, i);
1587 rtx unspec;
1588 int maskval;
1589
1590 if (GET_CODE (exp) != SET
1591 || GET_CODE (SET_DEST (exp)) != REG
1592 || GET_MODE (SET_DEST (exp)) != CCmode
1593 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
1594 return 0;
1595 unspec = SET_SRC (exp);
1596 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
1597
1598 if (GET_CODE (unspec) != UNSPEC
1599 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
1600 || XVECLEN (unspec, 0) != 2
1601 || XVECEXP (unspec, 0, 0) != src_reg
1602 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
1603 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
1604 return 0;
1605 }
1606 return 1;
1607 })
1608
1609 ;; Return 1 if OP is valid for crsave insn, known to be a PARALLEL.
1610 (define_predicate "crsave_operation"
1611 (match_code "parallel")
1612 {
1613 int count = XVECLEN (op, 0);
1614 int i;
1615
1616 for (i = 1; i < count; i++)
1617 {
1618 rtx exp = XVECEXP (op, 0, i);
1619
1620 if (GET_CODE (exp) != USE
1621 || GET_CODE (XEXP (exp, 0)) != REG
1622 || GET_MODE (XEXP (exp, 0)) != CCmode
1623 || ! CR_REGNO_P (REGNO (XEXP (exp, 0))))
1624 return 0;
1625 }
1626 return 1;
1627 })
1628
1629 ;; Return 1 if OP is valid for lmw insn, known to be a PARALLEL.
1630 (define_predicate "lmw_operation"
1631 (match_code "parallel")
1632 {
1633 int count = XVECLEN (op, 0);
1634 unsigned int dest_regno;
1635 rtx src_addr;
1636 unsigned int base_regno;
1637 HOST_WIDE_INT offset;
1638 int i;
1639
1640 /* Perform a quick check so we don't blow up below. */
1641 if (count <= 1
1642 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1643 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
1644 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
1645 return 0;
1646
1647 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
1648 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
1649
1650 if (dest_regno > 31
1651 || count != 32 - (int) dest_regno)
1652 return 0;
1653
1654 if (legitimate_indirect_address_p (src_addr, 0))
1655 {
1656 offset = 0;
1657 base_regno = REGNO (src_addr);
1658 if (base_regno == 0)
1659 return 0;
1660 }
1661 else if (rs6000_legitimate_offset_address_p (SImode, src_addr, false, false))
1662 {
1663 offset = INTVAL (XEXP (src_addr, 1));
1664 base_regno = REGNO (XEXP (src_addr, 0));
1665 }
1666 else
1667 return 0;
1668
1669 for (i = 0; i < count; i++)
1670 {
1671 rtx elt = XVECEXP (op, 0, i);
1672 rtx newaddr;
1673 rtx addr_reg;
1674 HOST_WIDE_INT newoffset;
1675
1676 if (GET_CODE (elt) != SET
1677 || GET_CODE (SET_DEST (elt)) != REG
1678 || GET_MODE (SET_DEST (elt)) != SImode
1679 || REGNO (SET_DEST (elt)) != dest_regno + i
1680 || GET_CODE (SET_SRC (elt)) != MEM
1681 || GET_MODE (SET_SRC (elt)) != SImode)
1682 return 0;
1683 newaddr = XEXP (SET_SRC (elt), 0);
1684 if (legitimate_indirect_address_p (newaddr, 0))
1685 {
1686 newoffset = 0;
1687 addr_reg = newaddr;
1688 }
1689 else if (rs6000_legitimate_offset_address_p (SImode, newaddr, false, false))
1690 {
1691 addr_reg = XEXP (newaddr, 0);
1692 newoffset = INTVAL (XEXP (newaddr, 1));
1693 }
1694 else
1695 return 0;
1696 if (REGNO (addr_reg) != base_regno
1697 || newoffset != offset + 4 * i)
1698 return 0;
1699 }
1700
1701 return 1;
1702 })
1703
1704 ;; Return 1 if OP is valid for stmw insn, known to be a PARALLEL.
1705 (define_predicate "stmw_operation"
1706 (match_code "parallel")
1707 {
1708 int count = XVECLEN (op, 0);
1709 unsigned int src_regno;
1710 rtx dest_addr;
1711 unsigned int base_regno;
1712 HOST_WIDE_INT offset;
1713 int i;
1714
1715 /* Perform a quick check so we don't blow up below. */
1716 if (count <= 1
1717 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1718 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
1719 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
1720 return 0;
1721
1722 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
1723 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
1724
1725 if (src_regno > 31
1726 || count != 32 - (int) src_regno)
1727 return 0;
1728
1729 if (legitimate_indirect_address_p (dest_addr, 0))
1730 {
1731 offset = 0;
1732 base_regno = REGNO (dest_addr);
1733 if (base_regno == 0)
1734 return 0;
1735 }
1736 else if (rs6000_legitimate_offset_address_p (SImode, dest_addr, false, false))
1737 {
1738 offset = INTVAL (XEXP (dest_addr, 1));
1739 base_regno = REGNO (XEXP (dest_addr, 0));
1740 }
1741 else
1742 return 0;
1743
1744 for (i = 0; i < count; i++)
1745 {
1746 rtx elt = XVECEXP (op, 0, i);
1747 rtx newaddr;
1748 rtx addr_reg;
1749 HOST_WIDE_INT newoffset;
1750
1751 if (GET_CODE (elt) != SET
1752 || GET_CODE (SET_SRC (elt)) != REG
1753 || GET_MODE (SET_SRC (elt)) != SImode
1754 || REGNO (SET_SRC (elt)) != src_regno + i
1755 || GET_CODE (SET_DEST (elt)) != MEM
1756 || GET_MODE (SET_DEST (elt)) != SImode)
1757 return 0;
1758 newaddr = XEXP (SET_DEST (elt), 0);
1759 if (legitimate_indirect_address_p (newaddr, 0))
1760 {
1761 newoffset = 0;
1762 addr_reg = newaddr;
1763 }
1764 else if (rs6000_legitimate_offset_address_p (SImode, newaddr, false, false))
1765 {
1766 addr_reg = XEXP (newaddr, 0);
1767 newoffset = INTVAL (XEXP (newaddr, 1));
1768 }
1769 else
1770 return 0;
1771 if (REGNO (addr_reg) != base_regno
1772 || newoffset != offset + 4 * i)
1773 return 0;
1774 }
1775
1776 return 1;
1777 })
1778
1779 ;; Return 1 if OP is a stack tie operand.
1780 (define_predicate "tie_operand"
1781 (match_code "parallel")
1782 {
1783 return (GET_CODE (XVECEXP (op, 0, 0)) == SET
1784 && GET_CODE (XEXP (XVECEXP (op, 0, 0), 0)) == MEM
1785 && GET_MODE (XEXP (XVECEXP (op, 0, 0), 0)) == BLKmode
1786 && XEXP (XVECEXP (op, 0, 0), 1) == const0_rtx);
1787 })
1788
1789 ;; Match a small code model toc reference (or medium and large
1790 ;; model toc references before reload).
1791 (define_predicate "small_toc_ref"
1792 (match_code "unspec,plus")
1793 {
1794 if (GET_CODE (op) == PLUS && add_cint_operand (XEXP (op, 1), mode))
1795 op = XEXP (op, 0);
1796
1797 return GET_CODE (op) == UNSPEC && XINT (op, 1) == UNSPEC_TOCREL;
1798 })
1799
1800 ;; Match the first insn (addis) in fusing the combination of addis and loads to
1801 ;; GPR registers on power8.
1802 (define_predicate "fusion_gpr_addis"
1803 (match_code "const_int,high,plus")
1804 {
1805 HOST_WIDE_INT value;
1806 rtx int_const;
1807
1808 if (GET_CODE (op) == HIGH)
1809 return 1;
1810
1811 if (CONST_INT_P (op))
1812 int_const = op;
1813
1814 else if (GET_CODE (op) == PLUS
1815 && base_reg_operand (XEXP (op, 0), Pmode)
1816 && CONST_INT_P (XEXP (op, 1)))
1817 int_const = XEXP (op, 1);
1818
1819 else
1820 return 0;
1821
1822 /* Power8 currently will only do the fusion if the top 11 bits of the addis
1823 value are all 1's or 0's. */
1824 value = INTVAL (int_const);
1825 if ((value & (HOST_WIDE_INT)0xffff) != 0)
1826 return 0;
1827
1828 if ((value & (HOST_WIDE_INT)0xffff0000) == 0)
1829 return 0;
1830
1831 return (IN_RANGE (value >> 16, -32, 31));
1832 })
1833
1834 ;; Match the second insn (lbz, lhz, lwz, ld) in fusing the combination of addis
1835 ;; and loads to GPR registers on power8.
1836 (define_predicate "fusion_gpr_mem_load"
1837 (match_code "mem,sign_extend,zero_extend")
1838 {
1839 rtx addr, base, offset;
1840
1841 /* Handle sign/zero extend. */
1842 if (GET_CODE (op) == ZERO_EXTEND
1843 || (TARGET_P8_FUSION_SIGN && GET_CODE (op) == SIGN_EXTEND))
1844 {
1845 op = XEXP (op, 0);
1846 mode = GET_MODE (op);
1847 }
1848
1849 if (!MEM_P (op))
1850 return 0;
1851
1852 switch (mode)
1853 {
1854 case QImode:
1855 case HImode:
1856 case SImode:
1857 break;
1858
1859 case DImode:
1860 if (!TARGET_POWERPC64)
1861 return 0;
1862 break;
1863
1864 default:
1865 return 0;
1866 }
1867
1868 addr = XEXP (op, 0);
1869 if (GET_CODE (addr) != PLUS && GET_CODE (addr) != LO_SUM)
1870 return 0;
1871
1872 base = XEXP (addr, 0);
1873 if (!base_reg_operand (base, GET_MODE (base)))
1874 return 0;
1875
1876 offset = XEXP (addr, 1);
1877
1878 if (GET_CODE (addr) == PLUS)
1879 return satisfies_constraint_I (offset);
1880
1881 else if (GET_CODE (addr) == LO_SUM)
1882 {
1883 if (TARGET_XCOFF || (TARGET_ELF && TARGET_POWERPC64))
1884 return small_toc_ref (offset, GET_MODE (offset));
1885
1886 else if (TARGET_ELF && !TARGET_POWERPC64)
1887 return CONSTANT_P (offset);
1888 }
1889
1890 return 0;
1891 })
1892
1893 ;; Match a GPR load (lbz, lhz, lwz, ld) that uses a combined address in the
1894 ;; memory field with both the addis and the memory offset. Sign extension
1895 ;; is not handled here, since lha and lwa are not fused.
1896 (define_predicate "fusion_gpr_mem_combo"
1897 (match_code "mem,zero_extend")
1898 {
1899 rtx addr, base, offset;
1900
1901 /* Handle zero extend. */
1902 if (GET_CODE (op) == ZERO_EXTEND)
1903 {
1904 op = XEXP (op, 0);
1905 mode = GET_MODE (op);
1906 }
1907
1908 if (!MEM_P (op))
1909 return 0;
1910
1911 switch (mode)
1912 {
1913 case QImode:
1914 case HImode:
1915 case SImode:
1916 break;
1917
1918 case DImode:
1919 if (!TARGET_POWERPC64)
1920 return 0;
1921 break;
1922
1923 default:
1924 return 0;
1925 }
1926
1927 addr = XEXP (op, 0);
1928 if (GET_CODE (addr) != PLUS && GET_CODE (addr) != LO_SUM)
1929 return 0;
1930
1931 base = XEXP (addr, 0);
1932 if (!fusion_gpr_addis (base, GET_MODE (base)))
1933 return 0;
1934
1935 offset = XEXP (addr, 1);
1936 if (GET_CODE (addr) == PLUS)
1937 return satisfies_constraint_I (offset);
1938
1939 else if (GET_CODE (addr) == LO_SUM)
1940 {
1941 if (TARGET_XCOFF || (TARGET_ELF && TARGET_POWERPC64))
1942 return small_toc_ref (offset, GET_MODE (offset));
1943
1944 else if (TARGET_ELF && !TARGET_POWERPC64)
1945 return CONSTANT_P (offset);
1946 }
1947
1948 return 0;
1949 })