]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/simplify-rtx.c
hwint.c: New.
[thirdparty/gcc.git] / gcc / simplify-rtx.c
CommitLineData
749a2da1 1/* RTL simplification functions for GNU compiler.
af841dbd 2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
acc2dd54 3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
eeef0e45 4 Free Software Foundation, Inc.
0cedb36c 5
1322177d 6This file is part of GCC.
0cedb36c 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
9dcd6f09 10Software Foundation; either version 3, or (at your option) any later
1322177d 11version.
0cedb36c 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
0cedb36c
JL
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
0cedb36c
JL
21
22
23#include "config.h"
0cedb36c 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
0cedb36c 27#include "rtl.h"
efdc7e19 28#include "tree.h"
0cedb36c
JL
29#include "tm_p.h"
30#include "regs.h"
31#include "hard-reg-set.h"
32#include "flags.h"
0cedb36c
JL
33#include "insn-config.h"
34#include "recog.h"
35#include "function.h"
36#include "expr.h"
718f9c0f 37#include "diagnostic-core.h"
0cedb36c 38#include "output.h"
eab5c70a 39#include "ggc.h"
7daebb7a 40#include "target.h"
0cedb36c
JL
41
42/* Simplification and canonicalization of RTL. */
43
3839069b
ZW
44/* Much code operates on (low, high) pairs; the low value is an
45 unsigned wide int, the high value a signed wide int. We
46 occasionally need to sign extend from low to high as if low were a
47 signed wide int. */
ba34d877 48#define HWI_SIGN_EXTEND(low) \
3839069b 49 ((((HOST_WIDE_INT) low) < 0) ? ((HOST_WIDE_INT) -1) : ((HOST_WIDE_INT) 0))
0cedb36c 50
f7d504c2
KG
51static rtx neg_const_int (enum machine_mode, const_rtx);
52static bool plus_minus_operand_p (const_rtx);
7e0b4eae 53static bool simplify_plus_minus_op_data_cmp (rtx, rtx);
1941069a 54static rtx simplify_plus_minus (enum rtx_code, enum machine_mode, rtx, rtx);
550d1387
GK
55static rtx simplify_immed_subreg (enum machine_mode, rtx, enum machine_mode,
56 unsigned int);
dd61aa98
RS
57static rtx simplify_associative_operation (enum rtx_code, enum machine_mode,
58 rtx, rtx);
c6fb08ad
PB
59static rtx simplify_relational_operation_1 (enum rtx_code, enum machine_mode,
60 enum machine_mode, rtx, rtx);
0a67e02c
PB
61static rtx simplify_unary_operation_1 (enum rtx_code, enum machine_mode, rtx);
62static rtx simplify_binary_operation_1 (enum rtx_code, enum machine_mode,
63 rtx, rtx, rtx, rtx);
aff8a8d5
CM
64\f
65/* Negate a CONST_INT rtx, truncating (because a conversion from a
23d1aac4 66 maximally negative number can overflow). */
aff8a8d5 67static rtx
f7d504c2 68neg_const_int (enum machine_mode mode, const_rtx i)
aff8a8d5 69{
2496c7bd 70 return gen_int_mode (- INTVAL (i), mode);
aff8a8d5
CM
71}
72
0b24db88
RS
73/* Test whether expression, X, is an immediate constant that represents
74 the most significant bit of machine mode MODE. */
75
b757b9f8 76bool
f7d504c2 77mode_signbit_p (enum machine_mode mode, const_rtx x)
0b24db88
RS
78{
79 unsigned HOST_WIDE_INT val;
80 unsigned int width;
81
82 if (GET_MODE_CLASS (mode) != MODE_INT)
83 return false;
84
85 width = GET_MODE_BITSIZE (mode);
86 if (width == 0)
87 return false;
b8698a0f 88
0b24db88 89 if (width <= HOST_BITS_PER_WIDE_INT
481683e1 90 && CONST_INT_P (x))
0b24db88
RS
91 val = INTVAL (x);
92 else if (width <= 2 * HOST_BITS_PER_WIDE_INT
93 && GET_CODE (x) == CONST_DOUBLE
94 && CONST_DOUBLE_LOW (x) == 0)
95 {
96 val = CONST_DOUBLE_HIGH (x);
97 width -= HOST_BITS_PER_WIDE_INT;
98 }
99 else
100 return false;
101
102 if (width < HOST_BITS_PER_WIDE_INT)
103 val &= ((unsigned HOST_WIDE_INT) 1 << width) - 1;
104 return val == ((unsigned HOST_WIDE_INT) 1 << (width - 1));
105}
749a2da1 106\f
786de7eb 107/* Make a binary operation by properly ordering the operands and
0cedb36c
JL
108 seeing if the expression folds. */
109
110rtx
46c5ad27
AJ
111simplify_gen_binary (enum rtx_code code, enum machine_mode mode, rtx op0,
112 rtx op1)
0cedb36c
JL
113{
114 rtx tem;
115
0cedb36c
JL
116 /* If this simplifies, do it. */
117 tem = simplify_binary_operation (code, mode, op0, op1);
0cedb36c
JL
118 if (tem)
119 return tem;
120
68162a97
ILT
121 /* Put complex operands first and constants second if commutative. */
122 if (GET_RTX_CLASS (code) == RTX_COMM_ARITH
123 && swap_commutative_operands_p (op0, op1))
124 tem = op0, op0 = op1, op1 = tem;
125
e16e3291 126 return gen_rtx_fmt_ee (code, mode, op0, op1);
0cedb36c
JL
127}
128\f
5a2aa3bd 129/* If X is a MEM referencing the constant pool, return the real value.
4ba5f925 130 Otherwise return X. */
732910b9 131rtx
46c5ad27 132avoid_constant_pool_reference (rtx x)
4ba5f925 133{
7daebb7a 134 rtx c, tmp, addr;
5a2aa3bd 135 enum machine_mode cmode;
bdb82177 136 HOST_WIDE_INT offset = 0;
5a2aa3bd 137
7daebb7a
RS
138 switch (GET_CODE (x))
139 {
140 case MEM:
141 break;
142
143 case FLOAT_EXTEND:
144 /* Handle float extensions of constant pool references. */
145 tmp = XEXP (x, 0);
146 c = avoid_constant_pool_reference (tmp);
147 if (c != tmp && GET_CODE (c) == CONST_DOUBLE)
148 {
149 REAL_VALUE_TYPE d;
150
151 REAL_VALUE_FROM_CONST_DOUBLE (d, c);
152 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
153 }
154 return x;
155
156 default:
157 return x;
158 }
159
d82a02fa
AK
160 if (GET_MODE (x) == BLKmode)
161 return x;
162
5a2aa3bd
RH
163 addr = XEXP (x, 0);
164
59e4e217 165 /* Call target hook to avoid the effects of -fpic etc.... */
5fd9b178 166 addr = targetm.delegitimize_address (addr);
7daebb7a 167
bdb82177
PB
168 /* Split the address into a base and integer offset. */
169 if (GET_CODE (addr) == CONST
170 && GET_CODE (XEXP (addr, 0)) == PLUS
481683e1 171 && CONST_INT_P (XEXP (XEXP (addr, 0), 1)))
bdb82177
PB
172 {
173 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
174 addr = XEXP (XEXP (addr, 0), 0);
175 }
176
11f3e4c7
RS
177 if (GET_CODE (addr) == LO_SUM)
178 addr = XEXP (addr, 1);
179
bdb82177
PB
180 /* If this is a constant pool reference, we can turn it into its
181 constant and hope that simplifications happen. */
182 if (GET_CODE (addr) == SYMBOL_REF
183 && CONSTANT_POOL_ADDRESS_P (addr))
5a2aa3bd 184 {
bdb82177
PB
185 c = get_pool_constant (addr);
186 cmode = get_pool_mode (addr);
187
188 /* If we're accessing the constant in a different mode than it was
189 originally stored, attempt to fix that up via subreg simplifications.
190 If that fails we have no choice but to return the original memory. */
191 if (offset != 0 || cmode != GET_MODE (x))
192 {
193 rtx tem = simplify_subreg (GET_MODE (x), c, cmode, offset);
194 if (tem && CONSTANT_P (tem))
195 return tem;
196 }
197 else
198 return c;
5a2aa3bd
RH
199 }
200
bdb82177 201 return x;
4ba5f925
JH
202}
203\f
b5b8b0ac
AO
204/* Simplify a MEM based on its attributes. This is the default
205 delegitimize_address target hook, and it's recommended that every
206 overrider call it. */
207
208rtx
209delegitimize_mem_from_attrs (rtx x)
210{
e0a80069
AO
211 /* MEMs without MEM_OFFSETs may have been offset, so we can't just
212 use their base addresses as equivalent. */
b5b8b0ac
AO
213 if (MEM_P (x)
214 && MEM_EXPR (x)
e0a80069 215 && MEM_OFFSET (x))
b5b8b0ac
AO
216 {
217 tree decl = MEM_EXPR (x);
218 enum machine_mode mode = GET_MODE (x);
219 HOST_WIDE_INT offset = 0;
220
221 switch (TREE_CODE (decl))
222 {
223 default:
224 decl = NULL;
225 break;
226
227 case VAR_DECL:
228 break;
229
230 case ARRAY_REF:
231 case ARRAY_RANGE_REF:
232 case COMPONENT_REF:
233 case BIT_FIELD_REF:
234 case REALPART_EXPR:
235 case IMAGPART_EXPR:
236 case VIEW_CONVERT_EXPR:
237 {
238 HOST_WIDE_INT bitsize, bitpos;
239 tree toffset;
240 int unsignedp = 0, volatilep = 0;
241
242 decl = get_inner_reference (decl, &bitsize, &bitpos, &toffset,
243 &mode, &unsignedp, &volatilep, false);
244 if (bitsize != GET_MODE_BITSIZE (mode)
245 || (bitpos % BITS_PER_UNIT)
246 || (toffset && !host_integerp (toffset, 0)))
247 decl = NULL;
248 else
249 {
250 offset += bitpos / BITS_PER_UNIT;
251 if (toffset)
252 offset += TREE_INT_CST_LOW (toffset);
253 }
254 break;
255 }
256 }
257
258 if (decl
259 && mode == GET_MODE (x)
260 && TREE_CODE (decl) == VAR_DECL
261 && (TREE_STATIC (decl)
262 || DECL_THREAD_LOCAL_P (decl))
263 && DECL_RTL_SET_P (decl)
264 && MEM_P (DECL_RTL (decl)))
265 {
266 rtx newx;
267
e0a80069 268 offset += INTVAL (MEM_OFFSET (x));
b5b8b0ac
AO
269
270 newx = DECL_RTL (decl);
271
272 if (MEM_P (newx))
273 {
274 rtx n = XEXP (newx, 0), o = XEXP (x, 0);
275
276 /* Avoid creating a new MEM needlessly if we already had
277 the same address. We do if there's no OFFSET and the
278 old address X is identical to NEWX, or if X is of the
279 form (plus NEWX OFFSET), or the NEWX is of the form
280 (plus Y (const_int Z)) and X is that with the offset
281 added: (plus Y (const_int Z+OFFSET)). */
282 if (!((offset == 0
283 || (GET_CODE (o) == PLUS
284 && GET_CODE (XEXP (o, 1)) == CONST_INT
285 && (offset == INTVAL (XEXP (o, 1))
286 || (GET_CODE (n) == PLUS
287 && GET_CODE (XEXP (n, 1)) == CONST_INT
288 && (INTVAL (XEXP (n, 1)) + offset
289 == INTVAL (XEXP (o, 1)))
290 && (n = XEXP (n, 0))))
291 && (o = XEXP (o, 0))))
292 && rtx_equal_p (o, n)))
293 x = adjust_address_nv (newx, mode, offset);
294 }
295 else if (GET_MODE (x) == GET_MODE (newx)
296 && offset == 0)
297 x = newx;
298 }
299 }
300
301 return x;
302}
303\f
d9c695ff
RK
304/* Make a unary operation by first seeing if it folds and otherwise making
305 the specified operation. */
306
307rtx
46c5ad27
AJ
308simplify_gen_unary (enum rtx_code code, enum machine_mode mode, rtx op,
309 enum machine_mode op_mode)
d9c695ff
RK
310{
311 rtx tem;
312
313 /* If this simplifies, use it. */
314 if ((tem = simplify_unary_operation (code, mode, op, op_mode)) != 0)
315 return tem;
316
317 return gen_rtx_fmt_e (code, mode, op);
318}
319
320/* Likewise for ternary operations. */
321
322rtx
46c5ad27
AJ
323simplify_gen_ternary (enum rtx_code code, enum machine_mode mode,
324 enum machine_mode op0_mode, rtx op0, rtx op1, rtx op2)
d9c695ff
RK
325{
326 rtx tem;
327
328 /* If this simplifies, use it. */
329 if (0 != (tem = simplify_ternary_operation (code, mode, op0_mode,
330 op0, op1, op2)))
331 return tem;
332
333 return gen_rtx_fmt_eee (code, mode, op0, op1, op2);
334}
c6fb08ad 335
141e454b 336/* Likewise, for relational operations.
c6fb08ad 337 CMP_MODE specifies mode comparison is done in. */
d9c695ff
RK
338
339rtx
46c5ad27
AJ
340simplify_gen_relational (enum rtx_code code, enum machine_mode mode,
341 enum machine_mode cmp_mode, rtx op0, rtx op1)
d9c695ff
RK
342{
343 rtx tem;
344
c6fb08ad
PB
345 if (0 != (tem = simplify_relational_operation (code, mode, cmp_mode,
346 op0, op1)))
347 return tem;
bc9c18c3 348
d9c695ff
RK
349 return gen_rtx_fmt_ee (code, mode, op0, op1);
350}
351\f
457eeaae
JJ
352/* If FN is NULL, replace all occurrences of OLD_RTX in X with copy_rtx (DATA)
353 and simplify the result. If FN is non-NULL, call this callback on each
354 X, if it returns non-NULL, replace X with its return value and simplify the
355 result. */
d9c695ff
RK
356
357rtx
3af4ba41 358simplify_replace_fn_rtx (rtx x, const_rtx old_rtx,
457eeaae 359 rtx (*fn) (rtx, const_rtx, void *), void *data)
d9c695ff
RK
360{
361 enum rtx_code code = GET_CODE (x);
362 enum machine_mode mode = GET_MODE (x);
077a148b 363 enum machine_mode op_mode;
4fb296d9
RS
364 const char *fmt;
365 rtx op0, op1, op2, newx, op;
366 rtvec vec, newvec;
367 int i, j;
d9c695ff 368
457eeaae 369 if (__builtin_expect (fn != NULL, 0))
3af4ba41 370 {
457eeaae
JJ
371 newx = fn (x, old_rtx, data);
372 if (newx)
373 return newx;
3af4ba41 374 }
457eeaae
JJ
375 else if (rtx_equal_p (x, old_rtx))
376 return copy_rtx ((rtx) data);
d9c695ff
RK
377
378 switch (GET_RTX_CLASS (code))
379 {
ec8e098d 380 case RTX_UNARY:
077a148b
RS
381 op0 = XEXP (x, 0);
382 op_mode = GET_MODE (op0);
3af4ba41 383 op0 = simplify_replace_fn_rtx (op0, old_rtx, fn, data);
077a148b
RS
384 if (op0 == XEXP (x, 0))
385 return x;
386 return simplify_gen_unary (code, mode, op0, op_mode);
d9c695ff 387
ec8e098d
PB
388 case RTX_BIN_ARITH:
389 case RTX_COMM_ARITH:
3af4ba41
RS
390 op0 = simplify_replace_fn_rtx (XEXP (x, 0), old_rtx, fn, data);
391 op1 = simplify_replace_fn_rtx (XEXP (x, 1), old_rtx, fn, data);
077a148b
RS
392 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
393 return x;
394 return simplify_gen_binary (code, mode, op0, op1);
395
ec8e098d
PB
396 case RTX_COMPARE:
397 case RTX_COMM_COMPARE:
077a148b
RS
398 op0 = XEXP (x, 0);
399 op1 = XEXP (x, 1);
400 op_mode = GET_MODE (op0) != VOIDmode ? GET_MODE (op0) : GET_MODE (op1);
3af4ba41
RS
401 op0 = simplify_replace_fn_rtx (op0, old_rtx, fn, data);
402 op1 = simplify_replace_fn_rtx (op1, old_rtx, fn, data);
077a148b
RS
403 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
404 return x;
405 return simplify_gen_relational (code, mode, op_mode, op0, op1);
d9c695ff 406
ec8e098d
PB
407 case RTX_TERNARY:
408 case RTX_BITFIELD_OPS:
077a148b
RS
409 op0 = XEXP (x, 0);
410 op_mode = GET_MODE (op0);
3af4ba41
RS
411 op0 = simplify_replace_fn_rtx (op0, old_rtx, fn, data);
412 op1 = simplify_replace_fn_rtx (XEXP (x, 1), old_rtx, fn, data);
413 op2 = simplify_replace_fn_rtx (XEXP (x, 2), old_rtx, fn, data);
077a148b
RS
414 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1) && op2 == XEXP (x, 2))
415 return x;
416 if (op_mode == VOIDmode)
417 op_mode = GET_MODE (op0);
418 return simplify_gen_ternary (code, mode, op_mode, op0, op1, op2);
d9c695ff 419
ec8e098d 420 case RTX_EXTRA:
949c5d62
JH
421 if (code == SUBREG)
422 {
3af4ba41 423 op0 = simplify_replace_fn_rtx (SUBREG_REG (x), old_rtx, fn, data);
077a148b
RS
424 if (op0 == SUBREG_REG (x))
425 return x;
426 op0 = simplify_gen_subreg (GET_MODE (x), op0,
949c5d62
JH
427 GET_MODE (SUBREG_REG (x)),
428 SUBREG_BYTE (x));
077a148b 429 return op0 ? op0 : x;
949c5d62 430 }
077a148b 431 break;
d9c695ff 432
ec8e098d 433 case RTX_OBJ:
60c86d4e 434 if (code == MEM)
077a148b 435 {
3af4ba41 436 op0 = simplify_replace_fn_rtx (XEXP (x, 0), old_rtx, fn, data);
077a148b
RS
437 if (op0 == XEXP (x, 0))
438 return x;
439 return replace_equiv_address_nv (x, op0);
440 }
f4e3e618
RH
441 else if (code == LO_SUM)
442 {
3af4ba41
RS
443 op0 = simplify_replace_fn_rtx (XEXP (x, 0), old_rtx, fn, data);
444 op1 = simplify_replace_fn_rtx (XEXP (x, 1), old_rtx, fn, data);
f4e3e618
RH
445
446 /* (lo_sum (high x) x) -> x */
447 if (GET_CODE (op0) == HIGH && rtx_equal_p (XEXP (op0, 0), op1))
448 return op1;
60c86d4e 449
077a148b
RS
450 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
451 return x;
f4e3e618
RH
452 return gen_rtx_LO_SUM (mode, op0, op1);
453 }
077a148b 454 break;
60c86d4e
RS
455
456 default:
077a148b 457 break;
d9c695ff 458 }
4fb296d9
RS
459
460 newx = x;
461 fmt = GET_RTX_FORMAT (code);
462 for (i = 0; fmt[i]; i++)
463 switch (fmt[i])
464 {
465 case 'E':
466 vec = XVEC (x, i);
467 newvec = XVEC (newx, i);
468 for (j = 0; j < GET_NUM_ELEM (vec); j++)
469 {
470 op = simplify_replace_fn_rtx (RTVEC_ELT (vec, j),
471 old_rtx, fn, data);
472 if (op != RTVEC_ELT (vec, j))
473 {
474 if (newvec == vec)
475 {
476 newvec = shallow_copy_rtvec (vec);
477 if (x == newx)
478 newx = shallow_copy_rtx (x);
479 XVEC (newx, i) = newvec;
480 }
481 RTVEC_ELT (newvec, j) = op;
482 }
483 }
484 break;
485
486 case 'e':
8a1eb57b 487 if (XEXP (x, i))
4fb296d9 488 {
8a1eb57b
UB
489 op = simplify_replace_fn_rtx (XEXP (x, i), old_rtx, fn, data);
490 if (op != XEXP (x, i))
491 {
492 if (x == newx)
493 newx = shallow_copy_rtx (x);
494 XEXP (newx, i) = op;
495 }
4fb296d9
RS
496 }
497 break;
498 }
499 return newx;
d9c695ff 500}
3af4ba41
RS
501
502/* Replace all occurrences of OLD_RTX in X with NEW_RTX and try to simplify the
503 resulting RTX. Return a new RTX which is as simplified as possible. */
504
505rtx
506simplify_replace_rtx (rtx x, const_rtx old_rtx, rtx new_rtx)
507{
508 return simplify_replace_fn_rtx (x, old_rtx, 0, new_rtx);
509}
d9c695ff 510\f
0cedb36c
JL
511/* Try to simplify a unary operation CODE whose output mode is to be
512 MODE with input operand OP whose mode was originally OP_MODE.
513 Return zero if no simplification can be made. */
0cedb36c 514rtx
46c5ad27
AJ
515simplify_unary_operation (enum rtx_code code, enum machine_mode mode,
516 rtx op, enum machine_mode op_mode)
0a67e02c
PB
517{
518 rtx trueop, tem;
519
0a67e02c
PB
520 trueop = avoid_constant_pool_reference (op);
521
522 tem = simplify_const_unary_operation (code, mode, trueop, op_mode);
523 if (tem)
524 return tem;
525
526 return simplify_unary_operation_1 (code, mode, op);
527}
528
529/* Perform some simplifications we can do even if the operands
530 aren't constant. */
531static rtx
532simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op)
533{
534 enum rtx_code reversed;
535 rtx temp;
536
537 switch (code)
538 {
539 case NOT:
540 /* (not (not X)) == X. */
541 if (GET_CODE (op) == NOT)
542 return XEXP (op, 0);
543
bd1ef757
PB
544 /* (not (eq X Y)) == (ne X Y), etc. if BImode or the result of the
545 comparison is all ones. */
0a67e02c
PB
546 if (COMPARISON_P (op)
547 && (mode == BImode || STORE_FLAG_VALUE == -1)
548 && ((reversed = reversed_comparison_code (op, NULL_RTX)) != UNKNOWN))
549 return simplify_gen_relational (reversed, mode, VOIDmode,
550 XEXP (op, 0), XEXP (op, 1));
551
552 /* (not (plus X -1)) can become (neg X). */
553 if (GET_CODE (op) == PLUS
554 && XEXP (op, 1) == constm1_rtx)
555 return simplify_gen_unary (NEG, mode, XEXP (op, 0), mode);
556
557 /* Similarly, (not (neg X)) is (plus X -1). */
558 if (GET_CODE (op) == NEG)
559 return plus_constant (XEXP (op, 0), -1);
560
561 /* (not (xor X C)) for C constant is (xor X D) with D = ~C. */
562 if (GET_CODE (op) == XOR
481683e1 563 && CONST_INT_P (XEXP (op, 1))
0a67e02c
PB
564 && (temp = simplify_unary_operation (NOT, mode,
565 XEXP (op, 1), mode)) != 0)
566 return simplify_gen_binary (XOR, mode, XEXP (op, 0), temp);
567
568 /* (not (plus X C)) for signbit C is (xor X D) with D = ~C. */
569 if (GET_CODE (op) == PLUS
481683e1 570 && CONST_INT_P (XEXP (op, 1))
0a67e02c
PB
571 && mode_signbit_p (mode, XEXP (op, 1))
572 && (temp = simplify_unary_operation (NOT, mode,
573 XEXP (op, 1), mode)) != 0)
574 return simplify_gen_binary (XOR, mode, XEXP (op, 0), temp);
575
576
577 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for
578 operands other than 1, but that is not valid. We could do a
579 similar simplification for (not (lshiftrt C X)) where C is
580 just the sign bit, but this doesn't seem common enough to
581 bother with. */
582 if (GET_CODE (op) == ASHIFT
583 && XEXP (op, 0) == const1_rtx)
584 {
585 temp = simplify_gen_unary (NOT, mode, const1_rtx, mode);
586 return simplify_gen_binary (ROTATE, mode, temp, XEXP (op, 1));
587 }
588
0a67e02c
PB
589 /* (not (ashiftrt foo C)) where C is the number of bits in FOO
590 minus 1 is (ge foo (const_int 0)) if STORE_FLAG_VALUE is -1,
591 so we can perform the above simplification. */
b8698a0f 592
0a67e02c
PB
593 if (STORE_FLAG_VALUE == -1
594 && GET_CODE (op) == ASHIFTRT
481683e1 595 && GET_CODE (XEXP (op, 1))
0a67e02c
PB
596 && INTVAL (XEXP (op, 1)) == GET_MODE_BITSIZE (mode) - 1)
597 return simplify_gen_relational (GE, mode, VOIDmode,
598 XEXP (op, 0), const0_rtx);
599
bd1ef757
PB
600
601 if (GET_CODE (op) == SUBREG
602 && subreg_lowpart_p (op)
603 && (GET_MODE_SIZE (GET_MODE (op))
604 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
605 && GET_CODE (SUBREG_REG (op)) == ASHIFT
606 && XEXP (SUBREG_REG (op), 0) == const1_rtx)
607 {
608 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op));
609 rtx x;
610
611 x = gen_rtx_ROTATE (inner_mode,
612 simplify_gen_unary (NOT, inner_mode, const1_rtx,
613 inner_mode),
614 XEXP (SUBREG_REG (op), 1));
615 return rtl_hooks.gen_lowpart_no_emit (mode, x);
616 }
617
618 /* Apply De Morgan's laws to reduce number of patterns for machines
619 with negating logical insns (and-not, nand, etc.). If result has
620 only one NOT, put it first, since that is how the patterns are
621 coded. */
622
623 if (GET_CODE (op) == IOR || GET_CODE (op) == AND)
624 {
625 rtx in1 = XEXP (op, 0), in2 = XEXP (op, 1);
626 enum machine_mode op_mode;
627
628 op_mode = GET_MODE (in1);
629 in1 = simplify_gen_unary (NOT, op_mode, in1, op_mode);
630
631 op_mode = GET_MODE (in2);
632 if (op_mode == VOIDmode)
633 op_mode = mode;
634 in2 = simplify_gen_unary (NOT, op_mode, in2, op_mode);
635
636 if (GET_CODE (in2) == NOT && GET_CODE (in1) != NOT)
637 {
638 rtx tem = in2;
639 in2 = in1; in1 = tem;
640 }
641
642 return gen_rtx_fmt_ee (GET_CODE (op) == IOR ? AND : IOR,
643 mode, in1, in2);
644 }
0a67e02c
PB
645 break;
646
647 case NEG:
648 /* (neg (neg X)) == X. */
649 if (GET_CODE (op) == NEG)
650 return XEXP (op, 0);
651
652 /* (neg (plus X 1)) can become (not X). */
653 if (GET_CODE (op) == PLUS
654 && XEXP (op, 1) == const1_rtx)
655 return simplify_gen_unary (NOT, mode, XEXP (op, 0), mode);
b8698a0f 656
0a67e02c
PB
657 /* Similarly, (neg (not X)) is (plus X 1). */
658 if (GET_CODE (op) == NOT)
659 return plus_constant (XEXP (op, 0), 1);
b8698a0f 660
0a67e02c
PB
661 /* (neg (minus X Y)) can become (minus Y X). This transformation
662 isn't safe for modes with signed zeros, since if X and Y are
663 both +0, (minus Y X) is the same as (minus X Y). If the
664 rounding mode is towards +infinity (or -infinity) then the two
665 expressions will be rounded differently. */
666 if (GET_CODE (op) == MINUS
667 && !HONOR_SIGNED_ZEROS (mode)
668 && !HONOR_SIGN_DEPENDENT_ROUNDING (mode))
669 return simplify_gen_binary (MINUS, mode, XEXP (op, 1), XEXP (op, 0));
b8698a0f 670
0a67e02c
PB
671 if (GET_CODE (op) == PLUS
672 && !HONOR_SIGNED_ZEROS (mode)
673 && !HONOR_SIGN_DEPENDENT_ROUNDING (mode))
674 {
675 /* (neg (plus A C)) is simplified to (minus -C A). */
481683e1 676 if (CONST_INT_P (XEXP (op, 1))
0a67e02c
PB
677 || GET_CODE (XEXP (op, 1)) == CONST_DOUBLE)
678 {
679 temp = simplify_unary_operation (NEG, mode, XEXP (op, 1), mode);
680 if (temp)
681 return simplify_gen_binary (MINUS, mode, temp, XEXP (op, 0));
682 }
683
684 /* (neg (plus A B)) is canonicalized to (minus (neg A) B). */
685 temp = simplify_gen_unary (NEG, mode, XEXP (op, 0), mode);
686 return simplify_gen_binary (MINUS, mode, temp, XEXP (op, 1));
687 }
688
689 /* (neg (mult A B)) becomes (mult (neg A) B).
690 This works even for floating-point values. */
691 if (GET_CODE (op) == MULT
692 && !HONOR_SIGN_DEPENDENT_ROUNDING (mode))
693 {
694 temp = simplify_gen_unary (NEG, mode, XEXP (op, 0), mode);
695 return simplify_gen_binary (MULT, mode, temp, XEXP (op, 1));
696 }
697
698 /* NEG commutes with ASHIFT since it is multiplication. Only do
699 this if we can then eliminate the NEG (e.g., if the operand
700 is a constant). */
701 if (GET_CODE (op) == ASHIFT)
702 {
703 temp = simplify_unary_operation (NEG, mode, XEXP (op, 0), mode);
704 if (temp)
705 return simplify_gen_binary (ASHIFT, mode, temp, XEXP (op, 1));
706 }
707
708 /* (neg (ashiftrt X C)) can be replaced by (lshiftrt X C) when
709 C is equal to the width of MODE minus 1. */
710 if (GET_CODE (op) == ASHIFTRT
481683e1 711 && CONST_INT_P (XEXP (op, 1))
0a67e02c
PB
712 && INTVAL (XEXP (op, 1)) == GET_MODE_BITSIZE (mode) - 1)
713 return simplify_gen_binary (LSHIFTRT, mode,
714 XEXP (op, 0), XEXP (op, 1));
715
716 /* (neg (lshiftrt X C)) can be replaced by (ashiftrt X C) when
717 C is equal to the width of MODE minus 1. */
718 if (GET_CODE (op) == LSHIFTRT
481683e1 719 && CONST_INT_P (XEXP (op, 1))
0a67e02c
PB
720 && INTVAL (XEXP (op, 1)) == GET_MODE_BITSIZE (mode) - 1)
721 return simplify_gen_binary (ASHIFTRT, mode,
722 XEXP (op, 0), XEXP (op, 1));
b8698a0f 723
bd1ef757
PB
724 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
725 if (GET_CODE (op) == XOR
726 && XEXP (op, 1) == const1_rtx
727 && nonzero_bits (XEXP (op, 0), mode) == 1)
728 return plus_constant (XEXP (op, 0), -1);
8305d786
RS
729
730 /* (neg (lt x 0)) is (ashiftrt X C) if STORE_FLAG_VALUE is 1. */
731 /* (neg (lt x 0)) is (lshiftrt X C) if STORE_FLAG_VALUE is -1. */
732 if (GET_CODE (op) == LT
71cca289
JJ
733 && XEXP (op, 1) == const0_rtx
734 && SCALAR_INT_MODE_P (GET_MODE (XEXP (op, 0))))
8305d786 735 {
0f2f71b5
RS
736 enum machine_mode inner = GET_MODE (XEXP (op, 0));
737 int isize = GET_MODE_BITSIZE (inner);
8305d786 738 if (STORE_FLAG_VALUE == 1)
0f2f71b5
RS
739 {
740 temp = simplify_gen_binary (ASHIFTRT, inner, XEXP (op, 0),
741 GEN_INT (isize - 1));
742 if (mode == inner)
743 return temp;
744 if (GET_MODE_BITSIZE (mode) > isize)
745 return simplify_gen_unary (SIGN_EXTEND, mode, temp, inner);
746 return simplify_gen_unary (TRUNCATE, mode, temp, inner);
747 }
8305d786 748 else if (STORE_FLAG_VALUE == -1)
0f2f71b5
RS
749 {
750 temp = simplify_gen_binary (LSHIFTRT, inner, XEXP (op, 0),
751 GEN_INT (isize - 1));
752 if (mode == inner)
753 return temp;
754 if (GET_MODE_BITSIZE (mode) > isize)
755 return simplify_gen_unary (ZERO_EXTEND, mode, temp, inner);
756 return simplify_gen_unary (TRUNCATE, mode, temp, inner);
757 }
8305d786 758 }
bd1ef757
PB
759 break;
760
761 case TRUNCATE:
762 /* We can't handle truncation to a partial integer mode here
763 because we don't know the real bitsize of the partial
764 integer mode. */
765 if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
766 break;
767
768 /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */
769 if ((GET_CODE (op) == SIGN_EXTEND
770 || GET_CODE (op) == ZERO_EXTEND)
771 && GET_MODE (XEXP (op, 0)) == mode)
772 return XEXP (op, 0);
773
774 /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
775 (OP:SI foo:SI) if OP is NEG or ABS. */
776 if ((GET_CODE (op) == ABS
777 || GET_CODE (op) == NEG)
778 && (GET_CODE (XEXP (op, 0)) == SIGN_EXTEND
779 || GET_CODE (XEXP (op, 0)) == ZERO_EXTEND)
780 && GET_MODE (XEXP (XEXP (op, 0), 0)) == mode)
781 return simplify_gen_unary (GET_CODE (op), mode,
782 XEXP (XEXP (op, 0), 0), mode);
783
e963b866
AN
784 /* (truncate:A (subreg:B (truncate:C X) 0)) is
785 (truncate:A X). */
bd1ef757
PB
786 if (GET_CODE (op) == SUBREG
787 && GET_CODE (SUBREG_REG (op)) == TRUNCATE
788 && subreg_lowpart_p (op))
e963b866
AN
789 return simplify_gen_unary (TRUNCATE, mode, XEXP (SUBREG_REG (op), 0),
790 GET_MODE (XEXP (SUBREG_REG (op), 0)));
bd1ef757
PB
791
792 /* If we know that the value is already truncated, we can
d3b72690
PB
793 replace the TRUNCATE with a SUBREG. Note that this is also
794 valid if TRULY_NOOP_TRUNCATION is false for the corresponding
795 modes we just have to apply a different definition for
b8698a0f 796 truncation. But don't do this for an (LSHIFTRT (MULT ...))
d3b72690
PB
797 since this will cause problems with the umulXi3_highpart
798 patterns. */
799 if ((TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
bd1ef757 800 GET_MODE_BITSIZE (GET_MODE (op)))
d3b72690 801 ? (num_sign_bit_copies (op, GET_MODE (op))
2332d585
AN
802 > (unsigned int) (GET_MODE_BITSIZE (GET_MODE (op))
803 - GET_MODE_BITSIZE (mode)))
d3b72690 804 : truncated_to_mode (mode, op))
bd1ef757
PB
805 && ! (GET_CODE (op) == LSHIFTRT
806 && GET_CODE (XEXP (op, 0)) == MULT))
807 return rtl_hooks.gen_lowpart_no_emit (mode, op);
808
809 /* A truncate of a comparison can be replaced with a subreg if
810 STORE_FLAG_VALUE permits. This is like the previous test,
811 but it works even if the comparison is done in a mode larger
812 than HOST_BITS_PER_WIDE_INT. */
813 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
814 && COMPARISON_P (op)
43c36287 815 && (STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
bd1ef757
PB
816 return rtl_hooks.gen_lowpart_no_emit (mode, op);
817 break;
818
819 case FLOAT_TRUNCATE:
15ed7b52
JG
820 if (DECIMAL_FLOAT_MODE_P (mode))
821 break;
822
bd1ef757
PB
823 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
824 if (GET_CODE (op) == FLOAT_EXTEND
825 && GET_MODE (XEXP (op, 0)) == mode)
826 return XEXP (op, 0);
827
828 /* (float_truncate:SF (float_truncate:DF foo:XF))
829 = (float_truncate:SF foo:XF).
830 This may eliminate double rounding, so it is unsafe.
831
832 (float_truncate:SF (float_extend:XF foo:DF))
833 = (float_truncate:SF foo:DF).
834
835 (float_truncate:DF (float_extend:XF foo:SF))
836 = (float_extend:SF foo:DF). */
837 if ((GET_CODE (op) == FLOAT_TRUNCATE
838 && flag_unsafe_math_optimizations)
839 || GET_CODE (op) == FLOAT_EXTEND)
840 return simplify_gen_unary (GET_MODE_SIZE (GET_MODE (XEXP (op,
841 0)))
842 > GET_MODE_SIZE (mode)
843 ? FLOAT_TRUNCATE : FLOAT_EXTEND,
844 mode,
845 XEXP (op, 0), mode);
846
847 /* (float_truncate (float x)) is (float x) */
848 if (GET_CODE (op) == FLOAT
849 && (flag_unsafe_math_optimizations
a0c64295
UB
850 || (SCALAR_FLOAT_MODE_P (GET_MODE (op))
851 && ((unsigned)significand_size (GET_MODE (op))
852 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (op, 0)))
853 - num_sign_bit_copies (XEXP (op, 0),
854 GET_MODE (XEXP (op, 0))))))))
bd1ef757
PB
855 return simplify_gen_unary (FLOAT, mode,
856 XEXP (op, 0),
857 GET_MODE (XEXP (op, 0)));
858
859 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
860 (OP:SF foo:SF) if OP is NEG or ABS. */
861 if ((GET_CODE (op) == ABS
862 || GET_CODE (op) == NEG)
863 && GET_CODE (XEXP (op, 0)) == FLOAT_EXTEND
864 && GET_MODE (XEXP (XEXP (op, 0), 0)) == mode)
865 return simplify_gen_unary (GET_CODE (op), mode,
866 XEXP (XEXP (op, 0), 0), mode);
867
868 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
869 is (float_truncate:SF x). */
870 if (GET_CODE (op) == SUBREG
871 && subreg_lowpart_p (op)
872 && GET_CODE (SUBREG_REG (op)) == FLOAT_TRUNCATE)
873 return SUBREG_REG (op);
874 break;
875
876 case FLOAT_EXTEND:
15ed7b52
JG
877 if (DECIMAL_FLOAT_MODE_P (mode))
878 break;
879
bd1ef757
PB
880 /* (float_extend (float_extend x)) is (float_extend x)
881
882 (float_extend (float x)) is (float x) assuming that double
883 rounding can't happen.
884 */
885 if (GET_CODE (op) == FLOAT_EXTEND
886 || (GET_CODE (op) == FLOAT
a0c64295 887 && SCALAR_FLOAT_MODE_P (GET_MODE (op))
bd1ef757
PB
888 && ((unsigned)significand_size (GET_MODE (op))
889 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (op, 0)))
890 - num_sign_bit_copies (XEXP (op, 0),
891 GET_MODE (XEXP (op, 0)))))))
892 return simplify_gen_unary (GET_CODE (op), mode,
893 XEXP (op, 0),
894 GET_MODE (XEXP (op, 0)));
895
896 break;
897
898 case ABS:
899 /* (abs (neg <foo>)) -> (abs <foo>) */
900 if (GET_CODE (op) == NEG)
901 return simplify_gen_unary (ABS, mode, XEXP (op, 0),
902 GET_MODE (XEXP (op, 0)));
903
904 /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
905 do nothing. */
906 if (GET_MODE (op) == VOIDmode)
907 break;
908
909 /* If operand is something known to be positive, ignore the ABS. */
910 if (GET_CODE (op) == FFS || GET_CODE (op) == ABS
911 || ((GET_MODE_BITSIZE (GET_MODE (op))
912 <= HOST_BITS_PER_WIDE_INT)
913 && ((nonzero_bits (op, GET_MODE (op))
43c36287 914 & ((unsigned HOST_WIDE_INT) 1
bd1ef757
PB
915 << (GET_MODE_BITSIZE (GET_MODE (op)) - 1)))
916 == 0)))
917 return op;
918
919 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
920 if (num_sign_bit_copies (op, mode) == GET_MODE_BITSIZE (mode))
921 return gen_rtx_NEG (mode, op);
922
923 break;
924
925 case FFS:
926 /* (ffs (*_extend <X>)) = (ffs <X>) */
927 if (GET_CODE (op) == SIGN_EXTEND
928 || GET_CODE (op) == ZERO_EXTEND)
929 return simplify_gen_unary (FFS, mode, XEXP (op, 0),
930 GET_MODE (XEXP (op, 0)));
931 break;
932
933 case POPCOUNT:
9f05adb0
RS
934 switch (GET_CODE (op))
935 {
936 case BSWAP:
937 case ZERO_EXTEND:
938 /* (popcount (zero_extend <X>)) = (popcount <X>) */
939 return simplify_gen_unary (POPCOUNT, mode, XEXP (op, 0),
940 GET_MODE (XEXP (op, 0)));
941
942 case ROTATE:
943 case ROTATERT:
944 /* Rotations don't affect popcount. */
945 if (!side_effects_p (XEXP (op, 1)))
946 return simplify_gen_unary (POPCOUNT, mode, XEXP (op, 0),
947 GET_MODE (XEXP (op, 0)));
948 break;
949
950 default:
951 break;
952 }
953 break;
954
bd1ef757 955 case PARITY:
9f05adb0
RS
956 switch (GET_CODE (op))
957 {
958 case NOT:
959 case BSWAP:
960 case ZERO_EXTEND:
961 case SIGN_EXTEND:
962 return simplify_gen_unary (PARITY, mode, XEXP (op, 0),
963 GET_MODE (XEXP (op, 0)));
964
965 case ROTATE:
966 case ROTATERT:
967 /* Rotations don't affect parity. */
968 if (!side_effects_p (XEXP (op, 1)))
969 return simplify_gen_unary (PARITY, mode, XEXP (op, 0),
970 GET_MODE (XEXP (op, 0)));
971 break;
972
973 default:
974 break;
975 }
976 break;
977
978 case BSWAP:
979 /* (bswap (bswap x)) -> x. */
980 if (GET_CODE (op) == BSWAP)
981 return XEXP (op, 0);
bd1ef757
PB
982 break;
983
984 case FLOAT:
985 /* (float (sign_extend <X>)) = (float <X>). */
986 if (GET_CODE (op) == SIGN_EXTEND)
987 return simplify_gen_unary (FLOAT, mode, XEXP (op, 0),
988 GET_MODE (XEXP (op, 0)));
0a67e02c
PB
989 break;
990
991 case SIGN_EXTEND:
992 /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
993 becomes just the MINUS if its mode is MODE. This allows
994 folding switch statements on machines using casesi (such as
995 the VAX). */
996 if (GET_CODE (op) == TRUNCATE
997 && GET_MODE (XEXP (op, 0)) == mode
998 && GET_CODE (XEXP (op, 0)) == MINUS
999 && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
1000 && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
1001 return XEXP (op, 0);
1002
1003 /* Check for a sign extension of a subreg of a promoted
1004 variable, where the promotion is sign-extended, and the
1005 target mode is the same as the variable's promotion. */
1006 if (GET_CODE (op) == SUBREG
1007 && SUBREG_PROMOTED_VAR_P (op)
1008 && ! SUBREG_PROMOTED_UNSIGNED_P (op)
4613543f
RS
1009 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (XEXP (op, 0))))
1010 return rtl_hooks.gen_lowpart_no_emit (mode, op);
0a67e02c 1011
561da6bc
JJ
1012 /* (sign_extend:M (sign_extend:N <X>)) is (sign_extend:M <X>).
1013 (sign_extend:M (zero_extend:N <X>)) is (zero_extend:M <X>). */
1014 if (GET_CODE (op) == SIGN_EXTEND || GET_CODE (op) == ZERO_EXTEND)
1015 {
1016 gcc_assert (GET_MODE_BITSIZE (mode)
1017 > GET_MODE_BITSIZE (GET_MODE (op)));
1018 return simplify_gen_unary (GET_CODE (op), mode, XEXP (op, 0),
1019 GET_MODE (XEXP (op, 0)));
1020 }
a5d8253f
JJ
1021
1022 /* (sign_extend:M (ashiftrt:N (ashift <X> (const_int I)) (const_int I)))
1023 is (sign_extend:M (subreg:O <X>)) if there is mode with
561da6bc
JJ
1024 GET_MODE_BITSIZE (N) - I bits.
1025 (sign_extend:M (lshiftrt:N (ashift <X> (const_int I)) (const_int I)))
1026 is similarly (zero_extend:M (subreg:O <X>)). */
1027 if ((GET_CODE (op) == ASHIFTRT || GET_CODE (op) == LSHIFTRT)
a5d8253f
JJ
1028 && GET_CODE (XEXP (op, 0)) == ASHIFT
1029 && CONST_INT_P (XEXP (op, 1))
1030 && XEXP (XEXP (op, 0), 1) == XEXP (op, 1)
1031 && GET_MODE_BITSIZE (GET_MODE (op)) > INTVAL (XEXP (op, 1)))
1032 {
1033 enum machine_mode tmode
1034 = mode_for_size (GET_MODE_BITSIZE (GET_MODE (op))
1035 - INTVAL (XEXP (op, 1)), MODE_INT, 1);
561da6bc
JJ
1036 gcc_assert (GET_MODE_BITSIZE (mode)
1037 > GET_MODE_BITSIZE (GET_MODE (op)));
a5d8253f
JJ
1038 if (tmode != BLKmode)
1039 {
1040 rtx inner =
1041 rtl_hooks.gen_lowpart_no_emit (tmode, XEXP (XEXP (op, 0), 0));
561da6bc
JJ
1042 return simplify_gen_unary (GET_CODE (op) == ASHIFTRT
1043 ? SIGN_EXTEND : ZERO_EXTEND,
1044 mode, inner, tmode);
a5d8253f
JJ
1045 }
1046 }
1047
0a67e02c 1048#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
d4ebfa65
BE
1049 /* As we do not know which address space the pointer is refering to,
1050 we can do this only if the target does not support different pointer
1051 or address modes depending on the address space. */
1052 if (target_default_pointer_address_modes_p ()
1053 && ! POINTERS_EXTEND_UNSIGNED
0a67e02c
PB
1054 && mode == Pmode && GET_MODE (op) == ptr_mode
1055 && (CONSTANT_P (op)
1056 || (GET_CODE (op) == SUBREG
1057 && REG_P (SUBREG_REG (op))
1058 && REG_POINTER (SUBREG_REG (op))
1059 && GET_MODE (SUBREG_REG (op)) == Pmode)))
1060 return convert_memory_address (Pmode, op);
1061#endif
1062 break;
1063
1064 case ZERO_EXTEND:
1065 /* Check for a zero extension of a subreg of a promoted
1066 variable, where the promotion is zero-extended, and the
1067 target mode is the same as the variable's promotion. */
1068 if (GET_CODE (op) == SUBREG
1069 && SUBREG_PROMOTED_VAR_P (op)
7443a71d 1070 && SUBREG_PROMOTED_UNSIGNED_P (op) > 0
4613543f
RS
1071 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (XEXP (op, 0))))
1072 return rtl_hooks.gen_lowpart_no_emit (mode, op);
0a67e02c 1073
a5d8253f
JJ
1074 /* (zero_extend:M (zero_extend:N <X>)) is (zero_extend:M <X>). */
1075 if (GET_CODE (op) == ZERO_EXTEND)
1076 return simplify_gen_unary (ZERO_EXTEND, mode, XEXP (op, 0),
1077 GET_MODE (XEXP (op, 0)));
1078
561da6bc
JJ
1079 /* (zero_extend:M (lshiftrt:N (ashift <X> (const_int I)) (const_int I)))
1080 is (zero_extend:M (subreg:O <X>)) if there is mode with
1081 GET_MODE_BITSIZE (N) - I bits. */
1082 if (GET_CODE (op) == LSHIFTRT
1083 && GET_CODE (XEXP (op, 0)) == ASHIFT
1084 && CONST_INT_P (XEXP (op, 1))
1085 && XEXP (XEXP (op, 0), 1) == XEXP (op, 1)
1086 && GET_MODE_BITSIZE (GET_MODE (op)) > INTVAL (XEXP (op, 1)))
1087 {
1088 enum machine_mode tmode
1089 = mode_for_size (GET_MODE_BITSIZE (GET_MODE (op))
1090 - INTVAL (XEXP (op, 1)), MODE_INT, 1);
1091 if (tmode != BLKmode)
1092 {
1093 rtx inner =
1094 rtl_hooks.gen_lowpart_no_emit (tmode, XEXP (XEXP (op, 0), 0));
1095 return simplify_gen_unary (ZERO_EXTEND, mode, inner, tmode);
1096 }
1097 }
1098
0a67e02c 1099#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
d4ebfa65
BE
1100 /* As we do not know which address space the pointer is refering to,
1101 we can do this only if the target does not support different pointer
1102 or address modes depending on the address space. */
1103 if (target_default_pointer_address_modes_p ()
1104 && POINTERS_EXTEND_UNSIGNED > 0
0a67e02c
PB
1105 && mode == Pmode && GET_MODE (op) == ptr_mode
1106 && (CONSTANT_P (op)
1107 || (GET_CODE (op) == SUBREG
1108 && REG_P (SUBREG_REG (op))
1109 && REG_POINTER (SUBREG_REG (op))
1110 && GET_MODE (SUBREG_REG (op)) == Pmode)))
1111 return convert_memory_address (Pmode, op);
1112#endif
1113 break;
1114
1115 default:
1116 break;
1117 }
b8698a0f 1118
0a67e02c
PB
1119 return 0;
1120}
1121
1122/* Try to compute the value of a unary operation CODE whose output mode is to
1123 be MODE with input operand OP whose mode was originally OP_MODE.
1124 Return zero if the value cannot be computed. */
1125rtx
1126simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode,
1127 rtx op, enum machine_mode op_mode)
0cedb36c 1128{
770ae6cc 1129 unsigned int width = GET_MODE_BITSIZE (mode);
0cedb36c 1130
d9deed68
JH
1131 if (code == VEC_DUPLICATE)
1132 {
41374e13 1133 gcc_assert (VECTOR_MODE_P (mode));
0a67e02c 1134 if (GET_MODE (op) != VOIDmode)
41374e13 1135 {
0a67e02c
PB
1136 if (!VECTOR_MODE_P (GET_MODE (op)))
1137 gcc_assert (GET_MODE_INNER (mode) == GET_MODE (op));
41374e13
NS
1138 else
1139 gcc_assert (GET_MODE_INNER (mode) == GET_MODE_INNER
0a67e02c 1140 (GET_MODE (op)));
41374e13 1141 }
481683e1 1142 if (CONST_INT_P (op) || GET_CODE (op) == CONST_DOUBLE
0a67e02c 1143 || GET_CODE (op) == CONST_VECTOR)
d9deed68
JH
1144 {
1145 int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
1146 unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size);
1147 rtvec v = rtvec_alloc (n_elts);
1148 unsigned int i;
1149
0a67e02c 1150 if (GET_CODE (op) != CONST_VECTOR)
d9deed68 1151 for (i = 0; i < n_elts; i++)
0a67e02c 1152 RTVEC_ELT (v, i) = op;
d9deed68
JH
1153 else
1154 {
0a67e02c 1155 enum machine_mode inmode = GET_MODE (op);
d9deed68
JH
1156 int in_elt_size = GET_MODE_SIZE (GET_MODE_INNER (inmode));
1157 unsigned in_n_elts = (GET_MODE_SIZE (inmode) / in_elt_size);
1158
41374e13
NS
1159 gcc_assert (in_n_elts < n_elts);
1160 gcc_assert ((n_elts % in_n_elts) == 0);
d9deed68 1161 for (i = 0; i < n_elts; i++)
0a67e02c 1162 RTVEC_ELT (v, i) = CONST_VECTOR_ELT (op, i % in_n_elts);
d9deed68
JH
1163 }
1164 return gen_rtx_CONST_VECTOR (mode, v);
1165 }
1166 }
1167
0a67e02c 1168 if (VECTOR_MODE_P (mode) && GET_CODE (op) == CONST_VECTOR)
852c8ba1
JH
1169 {
1170 int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
1171 unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size);
0a67e02c 1172 enum machine_mode opmode = GET_MODE (op);
852c8ba1
JH
1173 int op_elt_size = GET_MODE_SIZE (GET_MODE_INNER (opmode));
1174 unsigned op_n_elts = (GET_MODE_SIZE (opmode) / op_elt_size);
1175 rtvec v = rtvec_alloc (n_elts);
1176 unsigned int i;
1177
41374e13 1178 gcc_assert (op_n_elts == n_elts);
852c8ba1
JH
1179 for (i = 0; i < n_elts; i++)
1180 {
1181 rtx x = simplify_unary_operation (code, GET_MODE_INNER (mode),
0a67e02c 1182 CONST_VECTOR_ELT (op, i),
852c8ba1
JH
1183 GET_MODE_INNER (opmode));
1184 if (!x)
1185 return 0;
1186 RTVEC_ELT (v, i) = x;
1187 }
1188 return gen_rtx_CONST_VECTOR (mode, v);
1189 }
1190
0cedb36c
JL
1191 /* The order of these tests is critical so that, for example, we don't
1192 check the wrong mode (input vs. output) for a conversion operation,
1193 such as FIX. At some point, this should be simplified. */
1194
0a67e02c 1195 if (code == FLOAT && GET_MODE (op) == VOIDmode
481683e1 1196 && (GET_CODE (op) == CONST_DOUBLE || CONST_INT_P (op)))
0cedb36c
JL
1197 {
1198 HOST_WIDE_INT hv, lv;
1199 REAL_VALUE_TYPE d;
1200
481683e1 1201 if (CONST_INT_P (op))
0a67e02c 1202 lv = INTVAL (op), hv = HWI_SIGN_EXTEND (lv);
0cedb36c 1203 else
0a67e02c 1204 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
0cedb36c 1205
0cedb36c 1206 REAL_VALUE_FROM_INT (d, lv, hv, mode);
0cedb36c
JL
1207 d = real_value_truncate (mode, d);
1208 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
1209 }
0a67e02c
PB
1210 else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
1211 && (GET_CODE (op) == CONST_DOUBLE
481683e1 1212 || CONST_INT_P (op)))
0cedb36c
JL
1213 {
1214 HOST_WIDE_INT hv, lv;
1215 REAL_VALUE_TYPE d;
1216
481683e1 1217 if (CONST_INT_P (op))
0a67e02c 1218 lv = INTVAL (op), hv = HWI_SIGN_EXTEND (lv);
0cedb36c 1219 else
0a67e02c 1220 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
0cedb36c
JL
1221
1222 if (op_mode == VOIDmode)
1223 {
1224 /* We don't know how to interpret negative-looking numbers in
1225 this case, so don't try to fold those. */
1226 if (hv < 0)
1227 return 0;
1228 }
1229 else if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2)
1230 ;
1231 else
1232 hv = 0, lv &= GET_MODE_MASK (op_mode);
1233
0cedb36c 1234 REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv, mode);
0cedb36c
JL
1235 d = real_value_truncate (mode, d);
1236 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
1237 }
0cedb36c 1238
481683e1 1239 if (CONST_INT_P (op)
0cedb36c
JL
1240 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
1241 {
0a67e02c 1242 HOST_WIDE_INT arg0 = INTVAL (op);
b3694847 1243 HOST_WIDE_INT val;
0cedb36c
JL
1244
1245 switch (code)
1246 {
1247 case NOT:
1248 val = ~ arg0;
1249 break;
1250
1251 case NEG:
1252 val = - arg0;
1253 break;
1254
1255 case ABS:
1256 val = (arg0 >= 0 ? arg0 : - arg0);
1257 break;
1258
1259 case FFS:
0cedb36c 1260 arg0 &= GET_MODE_MASK (mode);
bd95721f 1261 val = ffs_hwi (arg0);
0cedb36c
JL
1262 break;
1263
2928cd7a
RH
1264 case CLZ:
1265 arg0 &= GET_MODE_MASK (mode);
7dba8395
RH
1266 if (arg0 == 0 && CLZ_DEFINED_VALUE_AT_ZERO (mode, val))
1267 ;
1268 else
1269 val = GET_MODE_BITSIZE (mode) - floor_log2 (arg0) - 1;
2928cd7a
RH
1270 break;
1271
1272 case CTZ:
1273 arg0 &= GET_MODE_MASK (mode);
7dba8395
RH
1274 if (arg0 == 0)
1275 {
1276 /* Even if the value at zero is undefined, we have to come
1277 up with some replacement. Seems good enough. */
1278 if (! CTZ_DEFINED_VALUE_AT_ZERO (mode, val))
1279 val = GET_MODE_BITSIZE (mode);
1280 }
1281 else
bd95721f 1282 val = ctz_hwi (arg0);
2928cd7a
RH
1283 break;
1284
1285 case POPCOUNT:
1286 arg0 &= GET_MODE_MASK (mode);
1287 val = 0;
1288 while (arg0)
1289 val++, arg0 &= arg0 - 1;
1290 break;
1291
1292 case PARITY:
1293 arg0 &= GET_MODE_MASK (mode);
1294 val = 0;
1295 while (arg0)
1296 val++, arg0 &= arg0 - 1;
1297 val &= 1;
1298 break;
1299
167fa32c 1300 case BSWAP:
9f05adb0
RS
1301 {
1302 unsigned int s;
1303
1304 val = 0;
1305 for (s = 0; s < width; s += 8)
1306 {
1307 unsigned int d = width - s - 8;
1308 unsigned HOST_WIDE_INT byte;
1309 byte = (arg0 >> s) & 0xff;
1310 val |= byte << d;
1311 }
1312 }
1313 break;
167fa32c 1314
0cedb36c
JL
1315 case TRUNCATE:
1316 val = arg0;
1317 break;
1318
1319 case ZERO_EXTEND:
4161da12
AO
1320 /* When zero-extending a CONST_INT, we need to know its
1321 original mode. */
41374e13 1322 gcc_assert (op_mode != VOIDmode);
0cedb36c
JL
1323 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
1324 {
1325 /* If we were really extending the mode,
1326 we would have to distinguish between zero-extension
1327 and sign-extension. */
41374e13 1328 gcc_assert (width == GET_MODE_BITSIZE (op_mode));
0cedb36c
JL
1329 val = arg0;
1330 }
1331 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
43c36287
EB
1332 val = arg0 & ~((unsigned HOST_WIDE_INT) (-1)
1333 << GET_MODE_BITSIZE (op_mode));
0cedb36c
JL
1334 else
1335 return 0;
1336 break;
1337
1338 case SIGN_EXTEND:
1339 if (op_mode == VOIDmode)
1340 op_mode = mode;
1341 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
1342 {
1343 /* If we were really extending the mode,
1344 we would have to distinguish between zero-extension
1345 and sign-extension. */
41374e13 1346 gcc_assert (width == GET_MODE_BITSIZE (op_mode));
0cedb36c
JL
1347 val = arg0;
1348 }
1349 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
1350 {
1351 val
43c36287
EB
1352 = arg0 & ~((unsigned HOST_WIDE_INT) (-1)
1353 << GET_MODE_BITSIZE (op_mode));
1354 if (val & ((unsigned HOST_WIDE_INT) 1
1355 << (GET_MODE_BITSIZE (op_mode) - 1)))
1356 val
1357 -= (unsigned HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
0cedb36c
JL
1358 }
1359 else
1360 return 0;
1361 break;
1362
1363 case SQRT:
6a51f4a0
RK
1364 case FLOAT_EXTEND:
1365 case FLOAT_TRUNCATE:
6f1a6c5b
RH
1366 case SS_TRUNCATE:
1367 case US_TRUNCATE:
e551ad26 1368 case SS_NEG:
14c931f1 1369 case US_NEG:
91c29f68 1370 case SS_ABS:
0cedb36c
JL
1371 return 0;
1372
1373 default:
41374e13 1374 gcc_unreachable ();
0cedb36c
JL
1375 }
1376
bb80db7b 1377 return gen_int_mode (val, mode);
0cedb36c
JL
1378 }
1379
1380 /* We can do some operations on integer CONST_DOUBLEs. Also allow
1381 for a DImode operation on a CONST_INT. */
0a67e02c 1382 else if (GET_MODE (op) == VOIDmode
4161da12 1383 && width <= HOST_BITS_PER_WIDE_INT * 2
0a67e02c 1384 && (GET_CODE (op) == CONST_DOUBLE
481683e1 1385 || CONST_INT_P (op)))
0cedb36c 1386 {
3839069b
ZW
1387 unsigned HOST_WIDE_INT l1, lv;
1388 HOST_WIDE_INT h1, hv;
0cedb36c 1389
0a67e02c
PB
1390 if (GET_CODE (op) == CONST_DOUBLE)
1391 l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
0cedb36c 1392 else
0a67e02c 1393 l1 = INTVAL (op), h1 = HWI_SIGN_EXTEND (l1);
0cedb36c
JL
1394
1395 switch (code)
1396 {
1397 case NOT:
1398 lv = ~ l1;
1399 hv = ~ h1;
1400 break;
1401
1402 case NEG:
1403 neg_double (l1, h1, &lv, &hv);
1404 break;
1405
1406 case ABS:
1407 if (h1 < 0)
1408 neg_double (l1, h1, &lv, &hv);
1409 else
1410 lv = l1, hv = h1;
1411 break;
1412
1413 case FFS:
1414 hv = 0;
bd95721f
RH
1415 if (l1 != 0)
1416 lv = ffs_hwi (l1);
1417 else if (h1 != 0)
1418 lv = HOST_BITS_PER_WIDE_INT + ffs_hwi (h1);
0cedb36c 1419 else
bd95721f 1420 lv = 0;
2928cd7a
RH
1421 break;
1422
1423 case CLZ:
1424 hv = 0;
cf42869d 1425 if (h1 != 0)
2928cd7a
RH
1426 lv = GET_MODE_BITSIZE (mode) - floor_log2 (h1) - 1
1427 - HOST_BITS_PER_WIDE_INT;
cf42869d
RS
1428 else if (l1 != 0)
1429 lv = GET_MODE_BITSIZE (mode) - floor_log2 (l1) - 1;
1430 else if (! CLZ_DEFINED_VALUE_AT_ZERO (mode, lv))
1431 lv = GET_MODE_BITSIZE (mode);
2928cd7a
RH
1432 break;
1433
1434 case CTZ:
1435 hv = 0;
cf42869d 1436 if (l1 != 0)
bd95721f 1437 lv = ctz_hwi (l1);
cf42869d 1438 else if (h1 != 0)
bd95721f 1439 lv = HOST_BITS_PER_WIDE_INT + ctz_hwi (h1);
cf42869d
RS
1440 else if (! CTZ_DEFINED_VALUE_AT_ZERO (mode, lv))
1441 lv = GET_MODE_BITSIZE (mode);
2928cd7a
RH
1442 break;
1443
1444 case POPCOUNT:
1445 hv = 0;
1446 lv = 0;
1447 while (l1)
1448 lv++, l1 &= l1 - 1;
1449 while (h1)
1450 lv++, h1 &= h1 - 1;
1451 break;
1452
1453 case PARITY:
1454 hv = 0;
1455 lv = 0;
1456 while (l1)
1457 lv++, l1 &= l1 - 1;
1458 while (h1)
1459 lv++, h1 &= h1 - 1;
1460 lv &= 1;
0cedb36c
JL
1461 break;
1462
9f05adb0
RS
1463 case BSWAP:
1464 {
1465 unsigned int s;
1466
1467 hv = 0;
1468 lv = 0;
1469 for (s = 0; s < width; s += 8)
1470 {
1471 unsigned int d = width - s - 8;
1472 unsigned HOST_WIDE_INT byte;
1473
1474 if (s < HOST_BITS_PER_WIDE_INT)
1475 byte = (l1 >> s) & 0xff;
1476 else
1477 byte = (h1 >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
1478
1479 if (d < HOST_BITS_PER_WIDE_INT)
1480 lv |= byte << d;
1481 else
1482 hv |= byte << (d - HOST_BITS_PER_WIDE_INT);
1483 }
1484 }
1485 break;
1486
0cedb36c
JL
1487 case TRUNCATE:
1488 /* This is just a change-of-mode, so do nothing. */
1489 lv = l1, hv = h1;
1490 break;
1491
1492 case ZERO_EXTEND:
41374e13 1493 gcc_assert (op_mode != VOIDmode);
4161da12
AO
1494
1495 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
0cedb36c
JL
1496 return 0;
1497
1498 hv = 0;
1499 lv = l1 & GET_MODE_MASK (op_mode);
1500 break;
1501
1502 case SIGN_EXTEND:
1503 if (op_mode == VOIDmode
1504 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
1505 return 0;
1506 else
1507 {
1508 lv = l1 & GET_MODE_MASK (op_mode);
1509 if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
43c36287 1510 && (lv & ((unsigned HOST_WIDE_INT) 1
0cedb36c 1511 << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
43c36287 1512 lv -= (unsigned HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
0cedb36c 1513
ba34d877 1514 hv = HWI_SIGN_EXTEND (lv);
0cedb36c
JL
1515 }
1516 break;
1517
1518 case SQRT:
1519 return 0;
1520
1521 default:
1522 return 0;
1523 }
1524
1525 return immed_double_const (lv, hv, mode);
1526 }
1527
0a67e02c 1528 else if (GET_CODE (op) == CONST_DOUBLE
3d8bf70f 1529 && SCALAR_FLOAT_MODE_P (mode))
0cedb36c 1530 {
4977bab6 1531 REAL_VALUE_TYPE d, t;
0a67e02c 1532 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
0cedb36c 1533
15e5ad76
ZW
1534 switch (code)
1535 {
1536 case SQRT:
4977bab6
ZW
1537 if (HONOR_SNANS (mode) && real_isnan (&d))
1538 return 0;
1539 real_sqrt (&t, mode, &d);
1540 d = t;
1541 break;
94313f35 1542 case ABS:
d49b6e1e 1543 d = real_value_abs (&d);
94313f35
RH
1544 break;
1545 case NEG:
d49b6e1e 1546 d = real_value_negate (&d);
94313f35
RH
1547 break;
1548 case FLOAT_TRUNCATE:
1549 d = real_value_truncate (mode, d);
1550 break;
1551 case FLOAT_EXTEND:
1552 /* All this does is change the mode. */
1553 break;
1554 case FIX:
1555 real_arithmetic (&d, FIX_TRUNC_EXPR, &d, NULL);
1556 break;
79ae63b1
JH
1557 case NOT:
1558 {
1559 long tmp[4];
1560 int i;
1561
0a67e02c 1562 real_to_target (tmp, &d, GET_MODE (op));
79ae63b1
JH
1563 for (i = 0; i < 4; i++)
1564 tmp[i] = ~tmp[i];
1565 real_from_target (&d, tmp, mode);
0a67e02c 1566 break;
79ae63b1 1567 }
15e5ad76 1568 default:
41374e13 1569 gcc_unreachable ();
15e5ad76
ZW
1570 }
1571 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
0cedb36c
JL
1572 }
1573
0a67e02c 1574 else if (GET_CODE (op) == CONST_DOUBLE
3d8bf70f 1575 && SCALAR_FLOAT_MODE_P (GET_MODE (op))
0cedb36c 1576 && GET_MODE_CLASS (mode) == MODE_INT
875eda9c 1577 && width <= 2*HOST_BITS_PER_WIDE_INT && width > 0)
0cedb36c 1578 {
875eda9c 1579 /* Although the overflow semantics of RTL's FIX and UNSIGNED_FIX
2067c116 1580 operators are intentionally left unspecified (to ease implementation
875eda9c
RS
1581 by target backends), for consistency, this routine implements the
1582 same semantics for constant folding as used by the middle-end. */
1583
0a67e02c
PB
1584 /* This was formerly used only for non-IEEE float.
1585 eggert@twinsun.com says it is safe for IEEE also. */
875eda9c
RS
1586 HOST_WIDE_INT xh, xl, th, tl;
1587 REAL_VALUE_TYPE x, t;
0a67e02c 1588 REAL_VALUE_FROM_CONST_DOUBLE (x, op);
15e5ad76
ZW
1589 switch (code)
1590 {
875eda9c
RS
1591 case FIX:
1592 if (REAL_VALUE_ISNAN (x))
1593 return const0_rtx;
1594
1595 /* Test against the signed upper bound. */
1596 if (width > HOST_BITS_PER_WIDE_INT)
1597 {
1598 th = ((unsigned HOST_WIDE_INT) 1
1599 << (width - HOST_BITS_PER_WIDE_INT - 1)) - 1;
1600 tl = -1;
1601 }
1602 else
1603 {
1604 th = 0;
1605 tl = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
1606 }
1607 real_from_integer (&t, VOIDmode, tl, th, 0);
1608 if (REAL_VALUES_LESS (t, x))
1609 {
1610 xh = th;
1611 xl = tl;
1612 break;
1613 }
1614
1615 /* Test against the signed lower bound. */
1616 if (width > HOST_BITS_PER_WIDE_INT)
1617 {
43c36287
EB
1618 th = (unsigned HOST_WIDE_INT) (-1)
1619 << (width - HOST_BITS_PER_WIDE_INT - 1);
875eda9c
RS
1620 tl = 0;
1621 }
1622 else
1623 {
1624 th = -1;
43c36287 1625 tl = (unsigned HOST_WIDE_INT) (-1) << (width - 1);
875eda9c
RS
1626 }
1627 real_from_integer (&t, VOIDmode, tl, th, 0);
1628 if (REAL_VALUES_LESS (x, t))
1629 {
1630 xh = th;
1631 xl = tl;
1632 break;
1633 }
1634 REAL_VALUE_TO_INT (&xl, &xh, x);
1635 break;
1636
1637 case UNSIGNED_FIX:
1638 if (REAL_VALUE_ISNAN (x) || REAL_VALUE_NEGATIVE (x))
1639 return const0_rtx;
1640
1641 /* Test against the unsigned upper bound. */
1642 if (width == 2*HOST_BITS_PER_WIDE_INT)
1643 {
1644 th = -1;
1645 tl = -1;
1646 }
1647 else if (width >= HOST_BITS_PER_WIDE_INT)
1648 {
1649 th = ((unsigned HOST_WIDE_INT) 1
1650 << (width - HOST_BITS_PER_WIDE_INT)) - 1;
1651 tl = -1;
1652 }
1653 else
1654 {
1655 th = 0;
1656 tl = ((unsigned HOST_WIDE_INT) 1 << width) - 1;
1657 }
1658 real_from_integer (&t, VOIDmode, tl, th, 1);
1659 if (REAL_VALUES_LESS (t, x))
1660 {
1661 xh = th;
1662 xl = tl;
1663 break;
1664 }
1665
1666 REAL_VALUE_TO_INT (&xl, &xh, x);
1667 break;
1668
15e5ad76 1669 default:
41374e13 1670 gcc_unreachable ();
15e5ad76 1671 }
875eda9c 1672 return immed_double_const (xl, xh, mode);
0cedb36c 1673 }
ba31d94e 1674
0a67e02c 1675 return NULL_RTX;
0cedb36c
JL
1676}
1677\f
9ce79a7a
RS
1678/* Subroutine of simplify_binary_operation to simplify a commutative,
1679 associative binary operation CODE with result mode MODE, operating
1680 on OP0 and OP1. CODE is currently one of PLUS, MULT, AND, IOR, XOR,
1681 SMIN, SMAX, UMIN or UMAX. Return zero if no simplification or
1682 canonicalization is possible. */
dd61aa98 1683
dd61aa98
RS
1684static rtx
1685simplify_associative_operation (enum rtx_code code, enum machine_mode mode,
1686 rtx op0, rtx op1)
1687{
1688 rtx tem;
1689
9ce79a7a
RS
1690 /* Linearize the operator to the left. */
1691 if (GET_CODE (op1) == code)
dd61aa98 1692 {
9ce79a7a
RS
1693 /* "(a op b) op (c op d)" becomes "((a op b) op c) op d)". */
1694 if (GET_CODE (op0) == code)
1695 {
1696 tem = simplify_gen_binary (code, mode, op0, XEXP (op1, 0));
1697 return simplify_gen_binary (code, mode, tem, XEXP (op1, 1));
1698 }
dd61aa98 1699
9ce79a7a
RS
1700 /* "a op (b op c)" becomes "(b op c) op a". */
1701 if (! swap_commutative_operands_p (op1, op0))
1702 return simplify_gen_binary (code, mode, op1, op0);
dd61aa98 1703
9ce79a7a
RS
1704 tem = op0;
1705 op0 = op1;
1706 op1 = tem;
dd61aa98
RS
1707 }
1708
9ce79a7a 1709 if (GET_CODE (op0) == code)
dd61aa98 1710 {
9ce79a7a
RS
1711 /* Canonicalize "(x op c) op y" as "(x op y) op c". */
1712 if (swap_commutative_operands_p (XEXP (op0, 1), op1))
1713 {
1714 tem = simplify_gen_binary (code, mode, XEXP (op0, 0), op1);
1715 return simplify_gen_binary (code, mode, tem, XEXP (op0, 1));
1716 }
1717
1718 /* Attempt to simplify "(a op b) op c" as "a op (b op c)". */
7e0b4eae 1719 tem = simplify_binary_operation (code, mode, XEXP (op0, 1), op1);
9ce79a7a
RS
1720 if (tem != 0)
1721 return simplify_gen_binary (code, mode, XEXP (op0, 0), tem);
1722
1723 /* Attempt to simplify "(a op b) op c" as "(a op c) op b". */
7e0b4eae 1724 tem = simplify_binary_operation (code, mode, XEXP (op0, 0), op1);
9ce79a7a
RS
1725 if (tem != 0)
1726 return simplify_gen_binary (code, mode, tem, XEXP (op0, 1));
dd61aa98
RS
1727 }
1728
1729 return 0;
1730}
1731
0a67e02c 1732
0cedb36c
JL
1733/* Simplify a binary operation CODE with result mode MODE, operating on OP0
1734 and OP1. Return 0 if no simplification is possible.
1735
1736 Don't use this for relational operations such as EQ or LT.
1737 Use simplify_relational_operation instead. */
0cedb36c 1738rtx
46c5ad27
AJ
1739simplify_binary_operation (enum rtx_code code, enum machine_mode mode,
1740 rtx op0, rtx op1)
0cedb36c 1741{
9ce79a7a 1742 rtx trueop0, trueop1;
0cedb36c
JL
1743 rtx tem;
1744
1745 /* Relational operations don't work here. We must know the mode
1746 of the operands in order to do the comparison correctly.
1747 Assuming a full word can give incorrect results.
1748 Consider comparing 128 with -128 in QImode. */
41374e13
NS
1749 gcc_assert (GET_RTX_CLASS (code) != RTX_COMPARE);
1750 gcc_assert (GET_RTX_CLASS (code) != RTX_COMM_COMPARE);
0cedb36c 1751
4ba5f925 1752 /* Make sure the constant is second. */
ec8e098d 1753 if (GET_RTX_CLASS (code) == RTX_COMM_ARITH
9ce79a7a 1754 && swap_commutative_operands_p (op0, op1))
4ba5f925
JH
1755 {
1756 tem = op0, op0 = op1, op1 = tem;
4ba5f925
JH
1757 }
1758
9ce79a7a
RS
1759 trueop0 = avoid_constant_pool_reference (op0);
1760 trueop1 = avoid_constant_pool_reference (op1);
1761
0a67e02c
PB
1762 tem = simplify_const_binary_operation (code, mode, trueop0, trueop1);
1763 if (tem)
1764 return tem;
1765 return simplify_binary_operation_1 (code, mode, op0, op1, trueop0, trueop1);
1766}
1767
1753331b
RS
1768/* Subroutine of simplify_binary_operation. Simplify a binary operation
1769 CODE with result mode MODE, operating on OP0 and OP1. If OP0 and/or
1770 OP1 are constant pool references, TRUEOP0 and TRUEOP1 represent the
1771 actual constants. */
1772
0a67e02c
PB
1773static rtx
1774simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
1775 rtx op0, rtx op1, rtx trueop0, rtx trueop1)
1776{
bd1ef757 1777 rtx tem, reversed, opleft, opright;
0a67e02c
PB
1778 HOST_WIDE_INT val;
1779 unsigned int width = GET_MODE_BITSIZE (mode);
1780
1781 /* Even if we can't compute a constant result,
1782 there are some cases worth simplifying. */
1783
1784 switch (code)
852c8ba1 1785 {
0a67e02c
PB
1786 case PLUS:
1787 /* Maybe simplify x + 0 to x. The two expressions are equivalent
1788 when x is NaN, infinite, or finite and nonzero. They aren't
1789 when x is -0 and the rounding mode is not towards -infinity,
1790 since (-0) + 0 is then 0. */
1791 if (!HONOR_SIGNED_ZEROS (mode) && trueop1 == CONST0_RTX (mode))
1792 return op0;
1793
1794 /* ((-a) + b) -> (b - a) and similarly for (a + (-b)). These
1795 transformations are safe even for IEEE. */
1796 if (GET_CODE (op0) == NEG)
1797 return simplify_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
1798 else if (GET_CODE (op1) == NEG)
1799 return simplify_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
1800
1801 /* (~a) + 1 -> -a */
1802 if (INTEGRAL_MODE_P (mode)
1803 && GET_CODE (op0) == NOT
1804 && trueop1 == const1_rtx)
1805 return simplify_gen_unary (NEG, mode, XEXP (op0, 0), mode);
1806
1807 /* Handle both-operands-constant cases. We can only add
1808 CONST_INTs to constants since the sum of relocatable symbols
1809 can't be handled by most assemblers. Don't add CONST_INT
1810 to CONST_INT since overflow won't be computed properly if wider
1811 than HOST_BITS_PER_WIDE_INT. */
1812
dd59ef13
RS
1813 if ((GET_CODE (op0) == CONST
1814 || GET_CODE (op0) == SYMBOL_REF
1815 || GET_CODE (op0) == LABEL_REF)
481683e1 1816 && CONST_INT_P (op1))
0a67e02c 1817 return plus_constant (op0, INTVAL (op1));
dd59ef13
RS
1818 else if ((GET_CODE (op1) == CONST
1819 || GET_CODE (op1) == SYMBOL_REF
1820 || GET_CODE (op1) == LABEL_REF)
481683e1 1821 && CONST_INT_P (op0))
0a67e02c
PB
1822 return plus_constant (op1, INTVAL (op0));
1823
1824 /* See if this is something like X * C - X or vice versa or
1825 if the multiplication is written as a shift. If so, we can
1826 distribute and make a new multiply, shift, or maybe just
1827 have X (if C is 2 in the example above). But don't make
1828 something more expensive than we had before. */
1829
6800ea5c 1830 if (SCALAR_INT_MODE_P (mode))
0a67e02c 1831 {
54fb1ae0 1832 double_int coeff0, coeff1;
0a67e02c
PB
1833 rtx lhs = op0, rhs = op1;
1834
54fb1ae0
AS
1835 coeff0 = double_int_one;
1836 coeff1 = double_int_one;
1837
0a67e02c 1838 if (GET_CODE (lhs) == NEG)
fab2f52c 1839 {
54fb1ae0 1840 coeff0 = double_int_minus_one;
fab2f52c
AO
1841 lhs = XEXP (lhs, 0);
1842 }
0a67e02c 1843 else if (GET_CODE (lhs) == MULT
481683e1 1844 && CONST_INT_P (XEXP (lhs, 1)))
fab2f52c 1845 {
54fb1ae0 1846 coeff0 = shwi_to_double_int (INTVAL (XEXP (lhs, 1)));
fab2f52c
AO
1847 lhs = XEXP (lhs, 0);
1848 }
0a67e02c 1849 else if (GET_CODE (lhs) == ASHIFT
481683e1 1850 && CONST_INT_P (XEXP (lhs, 1))
54fb1ae0 1851 && INTVAL (XEXP (lhs, 1)) >= 0
0a67e02c
PB
1852 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
1853 {
54fb1ae0
AS
1854 coeff0 = double_int_setbit (double_int_zero,
1855 INTVAL (XEXP (lhs, 1)));
0a67e02c
PB
1856 lhs = XEXP (lhs, 0);
1857 }
852c8ba1 1858
0a67e02c 1859 if (GET_CODE (rhs) == NEG)
fab2f52c 1860 {
54fb1ae0 1861 coeff1 = double_int_minus_one;
fab2f52c
AO
1862 rhs = XEXP (rhs, 0);
1863 }
0a67e02c 1864 else if (GET_CODE (rhs) == MULT
481683e1 1865 && CONST_INT_P (XEXP (rhs, 1)))
0a67e02c 1866 {
54fb1ae0 1867 coeff1 = shwi_to_double_int (INTVAL (XEXP (rhs, 1)));
fab2f52c 1868 rhs = XEXP (rhs, 0);
0a67e02c
PB
1869 }
1870 else if (GET_CODE (rhs) == ASHIFT
481683e1 1871 && CONST_INT_P (XEXP (rhs, 1))
0a67e02c
PB
1872 && INTVAL (XEXP (rhs, 1)) >= 0
1873 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
1874 {
54fb1ae0
AS
1875 coeff1 = double_int_setbit (double_int_zero,
1876 INTVAL (XEXP (rhs, 1)));
0a67e02c
PB
1877 rhs = XEXP (rhs, 0);
1878 }
1879
1880 if (rtx_equal_p (lhs, rhs))
1881 {
1882 rtx orig = gen_rtx_PLUS (mode, op0, op1);
fab2f52c 1883 rtx coeff;
54fb1ae0 1884 double_int val;
f40751dd 1885 bool speed = optimize_function_for_speed_p (cfun);
fab2f52c 1886
54fb1ae0
AS
1887 val = double_int_add (coeff0, coeff1);
1888 coeff = immed_double_int_const (val, mode);
fab2f52c
AO
1889
1890 tem = simplify_gen_binary (MULT, mode, lhs, coeff);
f40751dd 1891 return rtx_cost (tem, SET, speed) <= rtx_cost (orig, SET, speed)
0a67e02c
PB
1892 ? tem : 0;
1893 }
1894 }
1895
1896 /* (plus (xor X C1) C2) is (xor X (C1^C2)) if C2 is signbit. */
481683e1 1897 if ((CONST_INT_P (op1)
0a67e02c
PB
1898 || GET_CODE (op1) == CONST_DOUBLE)
1899 && GET_CODE (op0) == XOR
481683e1 1900 && (CONST_INT_P (XEXP (op0, 1))
0a67e02c
PB
1901 || GET_CODE (XEXP (op0, 1)) == CONST_DOUBLE)
1902 && mode_signbit_p (mode, op1))
1903 return simplify_gen_binary (XOR, mode, XEXP (op0, 0),
1904 simplify_gen_binary (XOR, mode, op1,
1905 XEXP (op0, 1)));
1906
bd1ef757 1907 /* Canonicalize (plus (mult (neg B) C) A) to (minus A (mult B C)). */
4bf371ea
RG
1908 if (!HONOR_SIGN_DEPENDENT_ROUNDING (mode)
1909 && GET_CODE (op0) == MULT
bd1ef757
PB
1910 && GET_CODE (XEXP (op0, 0)) == NEG)
1911 {
1912 rtx in1, in2;
1913
1914 in1 = XEXP (XEXP (op0, 0), 0);
1915 in2 = XEXP (op0, 1);
1916 return simplify_gen_binary (MINUS, mode, op1,
1917 simplify_gen_binary (MULT, mode,
1918 in1, in2));
1919 }
1920
1921 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
1922 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
1923 is 1. */
1924 if (COMPARISON_P (op0)
1925 && ((STORE_FLAG_VALUE == -1 && trueop1 == const1_rtx)
1926 || (STORE_FLAG_VALUE == 1 && trueop1 == constm1_rtx))
1927 && (reversed = reversed_comparison (op0, mode)))
1928 return
1929 simplify_gen_unary (NEG, mode, reversed, mode);
1930
0a67e02c
PB
1931 /* If one of the operands is a PLUS or a MINUS, see if we can
1932 simplify this by the associative law.
1933 Don't use the associative law for floating point.
1934 The inaccuracy makes it nonassociative,
1935 and subtle programs can break if operations are associated. */
1936
1937 if (INTEGRAL_MODE_P (mode)
1938 && (plus_minus_operand_p (op0)
1939 || plus_minus_operand_p (op1))
1941069a 1940 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
0a67e02c
PB
1941 return tem;
1942
1943 /* Reassociate floating point addition only when the user
a1a82611 1944 specifies associative math operations. */
0a67e02c 1945 if (FLOAT_MODE_P (mode)
a1a82611 1946 && flag_associative_math)
852c8ba1 1947 {
0a67e02c
PB
1948 tem = simplify_associative_operation (code, mode, op0, op1);
1949 if (tem)
1950 return tem;
852c8ba1 1951 }
0a67e02c 1952 break;
852c8ba1 1953
0a67e02c 1954 case COMPARE:
0a67e02c
PB
1955 /* Convert (compare (gt (flags) 0) (lt (flags) 0)) to (flags). */
1956 if (((GET_CODE (op0) == GT && GET_CODE (op1) == LT)
1957 || (GET_CODE (op0) == GTU && GET_CODE (op1) == LTU))
1958 && XEXP (op0, 1) == const0_rtx && XEXP (op1, 1) == const0_rtx)
3198b947 1959 {
0a67e02c
PB
1960 rtx xop00 = XEXP (op0, 0);
1961 rtx xop10 = XEXP (op1, 0);
3198b947 1962
0a67e02c
PB
1963#ifdef HAVE_cc0
1964 if (GET_CODE (xop00) == CC0 && GET_CODE (xop10) == CC0)
1965#else
1966 if (REG_P (xop00) && REG_P (xop10)
1967 && GET_MODE (xop00) == GET_MODE (xop10)
1968 && REGNO (xop00) == REGNO (xop10)
1969 && GET_MODE_CLASS (GET_MODE (xop00)) == MODE_CC
1970 && GET_MODE_CLASS (GET_MODE (xop10)) == MODE_CC)
1971#endif
1972 return xop00;
3198b947 1973 }
0a67e02c
PB
1974 break;
1975
1976 case MINUS:
1977 /* We can't assume x-x is 0 even with non-IEEE floating point,
1978 but since it is zero except in very strange circumstances, we
81d2fb02 1979 will treat it as zero with -ffinite-math-only. */
0a67e02c
PB
1980 if (rtx_equal_p (trueop0, trueop1)
1981 && ! side_effects_p (op0)
81d2fb02 1982 && (!FLOAT_MODE_P (mode) || !HONOR_NANS (mode)))
0a67e02c
PB
1983 return CONST0_RTX (mode);
1984
1985 /* Change subtraction from zero into negation. (0 - x) is the
1986 same as -x when x is NaN, infinite, or finite and nonzero.
1987 But if the mode has signed zeros, and does not round towards
1988 -infinity, then 0 - 0 is 0, not -0. */
1989 if (!HONOR_SIGNED_ZEROS (mode) && trueop0 == CONST0_RTX (mode))
1990 return simplify_gen_unary (NEG, mode, op1, mode);
1991
1992 /* (-1 - a) is ~a. */
1993 if (trueop0 == constm1_rtx)
1994 return simplify_gen_unary (NOT, mode, op1, mode);
1995
1996 /* Subtracting 0 has no effect unless the mode has signed zeros
1997 and supports rounding towards -infinity. In such a case,
1998 0 - 0 is -0. */
1999 if (!(HONOR_SIGNED_ZEROS (mode)
2000 && HONOR_SIGN_DEPENDENT_ROUNDING (mode))
2001 && trueop1 == CONST0_RTX (mode))
2002 return op0;
2003
2004 /* See if this is something like X * C - X or vice versa or
2005 if the multiplication is written as a shift. If so, we can
2006 distribute and make a new multiply, shift, or maybe just
2007 have X (if C is 2 in the example above). But don't make
2008 something more expensive than we had before. */
2009
6800ea5c 2010 if (SCALAR_INT_MODE_P (mode))
3198b947 2011 {
54fb1ae0 2012 double_int coeff0, negcoeff1;
0a67e02c 2013 rtx lhs = op0, rhs = op1;
3198b947 2014
54fb1ae0
AS
2015 coeff0 = double_int_one;
2016 negcoeff1 = double_int_minus_one;
2017
0a67e02c 2018 if (GET_CODE (lhs) == NEG)
fab2f52c 2019 {
54fb1ae0 2020 coeff0 = double_int_minus_one;
fab2f52c
AO
2021 lhs = XEXP (lhs, 0);
2022 }
0a67e02c 2023 else if (GET_CODE (lhs) == MULT
481683e1 2024 && CONST_INT_P (XEXP (lhs, 1)))
0a67e02c 2025 {
54fb1ae0 2026 coeff0 = shwi_to_double_int (INTVAL (XEXP (lhs, 1)));
fab2f52c 2027 lhs = XEXP (lhs, 0);
0a67e02c
PB
2028 }
2029 else if (GET_CODE (lhs) == ASHIFT
481683e1 2030 && CONST_INT_P (XEXP (lhs, 1))
0a67e02c
PB
2031 && INTVAL (XEXP (lhs, 1)) >= 0
2032 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
2033 {
54fb1ae0
AS
2034 coeff0 = double_int_setbit (double_int_zero,
2035 INTVAL (XEXP (lhs, 1)));
0a67e02c
PB
2036 lhs = XEXP (lhs, 0);
2037 }
3198b947 2038
0a67e02c 2039 if (GET_CODE (rhs) == NEG)
fab2f52c 2040 {
54fb1ae0 2041 negcoeff1 = double_int_one;
fab2f52c
AO
2042 rhs = XEXP (rhs, 0);
2043 }
0a67e02c 2044 else if (GET_CODE (rhs) == MULT
481683e1 2045 && CONST_INT_P (XEXP (rhs, 1)))
0a67e02c 2046 {
54fb1ae0 2047 negcoeff1 = shwi_to_double_int (-INTVAL (XEXP (rhs, 1)));
fab2f52c 2048 rhs = XEXP (rhs, 0);
0a67e02c
PB
2049 }
2050 else if (GET_CODE (rhs) == ASHIFT
481683e1 2051 && CONST_INT_P (XEXP (rhs, 1))
0a67e02c
PB
2052 && INTVAL (XEXP (rhs, 1)) >= 0
2053 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
2054 {
54fb1ae0
AS
2055 negcoeff1 = double_int_setbit (double_int_zero,
2056 INTVAL (XEXP (rhs, 1)));
2057 negcoeff1 = double_int_neg (negcoeff1);
0a67e02c
PB
2058 rhs = XEXP (rhs, 0);
2059 }
2060
2061 if (rtx_equal_p (lhs, rhs))
2062 {
2063 rtx orig = gen_rtx_MINUS (mode, op0, op1);
fab2f52c 2064 rtx coeff;
54fb1ae0 2065 double_int val;
f40751dd 2066 bool speed = optimize_function_for_speed_p (cfun);
fab2f52c 2067
54fb1ae0
AS
2068 val = double_int_add (coeff0, negcoeff1);
2069 coeff = immed_double_int_const (val, mode);
fab2f52c
AO
2070
2071 tem = simplify_gen_binary (MULT, mode, lhs, coeff);
f40751dd 2072 return rtx_cost (tem, SET, speed) <= rtx_cost (orig, SET, speed)
0a67e02c
PB
2073 ? tem : 0;
2074 }
3198b947
RH
2075 }
2076
0a67e02c
PB
2077 /* (a - (-b)) -> (a + b). True even for IEEE. */
2078 if (GET_CODE (op1) == NEG)
2079 return simplify_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
3198b947 2080
0a67e02c
PB
2081 /* (-x - c) may be simplified as (-c - x). */
2082 if (GET_CODE (op0) == NEG
481683e1 2083 && (CONST_INT_P (op1)
0a67e02c 2084 || GET_CODE (op1) == CONST_DOUBLE))
79ae63b1 2085 {
0a67e02c
PB
2086 tem = simplify_unary_operation (NEG, mode, op1, mode);
2087 if (tem)
2088 return simplify_gen_binary (MINUS, mode, tem, XEXP (op0, 0));
2089 }
79ae63b1 2090
0a67e02c 2091 /* Don't let a relocatable value get a negative coeff. */
481683e1 2092 if (CONST_INT_P (op1) && GET_MODE (op0) != VOIDmode)
0a67e02c
PB
2093 return simplify_gen_binary (PLUS, mode,
2094 op0,
2095 neg_const_int (mode, op1));
2096
2097 /* (x - (x & y)) -> (x & ~y) */
2098 if (GET_CODE (op1) == AND)
2099 {
2100 if (rtx_equal_p (op0, XEXP (op1, 0)))
79ae63b1 2101 {
0a67e02c
PB
2102 tem = simplify_gen_unary (NOT, mode, XEXP (op1, 1),
2103 GET_MODE (XEXP (op1, 1)));
2104 return simplify_gen_binary (AND, mode, op0, tem);
2105 }
2106 if (rtx_equal_p (op0, XEXP (op1, 1)))
2107 {
2108 tem = simplify_gen_unary (NOT, mode, XEXP (op1, 0),
2109 GET_MODE (XEXP (op1, 0)));
2110 return simplify_gen_binary (AND, mode, op0, tem);
79ae63b1 2111 }
79ae63b1 2112 }
1941069a 2113
bd1ef757
PB
2114 /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
2115 by reversing the comparison code if valid. */
2116 if (STORE_FLAG_VALUE == 1
2117 && trueop0 == const1_rtx
2118 && COMPARISON_P (op1)
2119 && (reversed = reversed_comparison (op1, mode)))
2120 return reversed;
2121
2122 /* Canonicalize (minus A (mult (neg B) C)) to (plus (mult B C) A). */
4bf371ea
RG
2123 if (!HONOR_SIGN_DEPENDENT_ROUNDING (mode)
2124 && GET_CODE (op1) == MULT
bd1ef757
PB
2125 && GET_CODE (XEXP (op1, 0)) == NEG)
2126 {
2127 rtx in1, in2;
2128
2129 in1 = XEXP (XEXP (op1, 0), 0);
2130 in2 = XEXP (op1, 1);
2131 return simplify_gen_binary (PLUS, mode,
2132 simplify_gen_binary (MULT, mode,
2133 in1, in2),
2134 op0);
2135 }
2136
2137 /* Canonicalize (minus (neg A) (mult B C)) to
2138 (minus (mult (neg B) C) A). */
4bf371ea
RG
2139 if (!HONOR_SIGN_DEPENDENT_ROUNDING (mode)
2140 && GET_CODE (op1) == MULT
bd1ef757
PB
2141 && GET_CODE (op0) == NEG)
2142 {
2143 rtx in1, in2;
2144
2145 in1 = simplify_gen_unary (NEG, mode, XEXP (op1, 0), mode);
2146 in2 = XEXP (op1, 1);
2147 return simplify_gen_binary (MINUS, mode,
2148 simplify_gen_binary (MULT, mode,
2149 in1, in2),
2150 XEXP (op0, 0));
2151 }
2152
1941069a
PB
2153 /* If one of the operands is a PLUS or a MINUS, see if we can
2154 simplify this by the associative law. This will, for example,
2155 canonicalize (minus A (plus B C)) to (minus (minus A B) C).
2156 Don't use the associative law for floating point.
2157 The inaccuracy makes it nonassociative,
2158 and subtle programs can break if operations are associated. */
2159
2160 if (INTEGRAL_MODE_P (mode)
2161 && (plus_minus_operand_p (op0)
2162 || plus_minus_operand_p (op1))
2163 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
2164 return tem;
0a67e02c 2165 break;
15e5ad76 2166
0a67e02c
PB
2167 case MULT:
2168 if (trueop1 == constm1_rtx)
2169 return simplify_gen_unary (NEG, mode, op0, mode);
2170
29b40d79
BS
2171 if (GET_CODE (op0) == NEG)
2172 {
2173 rtx temp = simplify_unary_operation (NEG, mode, op1, mode);
2174 if (temp)
2175 return simplify_gen_binary (MULT, mode, XEXP (op0, 0), temp);
2176 }
2177 if (GET_CODE (op1) == NEG)
2178 {
2179 rtx temp = simplify_unary_operation (NEG, mode, op0, mode);
2180 if (temp)
2181 return simplify_gen_binary (MULT, mode, temp, XEXP (op1, 0));
2182 }
2183
0a67e02c
PB
2184 /* Maybe simplify x * 0 to 0. The reduction is not valid if
2185 x is NaN, since x * 0 is then also NaN. Nor is it valid
2186 when the mode has signed zeros, since multiplying a negative
2187 number by 0 will give -0, not 0. */
2188 if (!HONOR_NANS (mode)
2189 && !HONOR_SIGNED_ZEROS (mode)
2190 && trueop1 == CONST0_RTX (mode)
2191 && ! side_effects_p (op0))
2192 return op1;
2193
2194 /* In IEEE floating point, x*1 is not equivalent to x for
2195 signalling NaNs. */
2196 if (!HONOR_SNANS (mode)
2197 && trueop1 == CONST1_RTX (mode))
2198 return op0;
2199
2200 /* Convert multiply by constant power of two into shift unless
2201 we are still generating RTL. This test is a kludge. */
481683e1 2202 if (CONST_INT_P (trueop1)
43c36287 2203 && (val = exact_log2 (UINTVAL (trueop1))) >= 0
0a67e02c
PB
2204 /* If the mode is larger than the host word size, and the
2205 uppermost bit is set, then this isn't a power of two due
2206 to implicit sign extension. */
2207 && (width <= HOST_BITS_PER_WIDE_INT
2208 || val != HOST_BITS_PER_WIDE_INT - 1))
2209 return simplify_gen_binary (ASHIFT, mode, op0, GEN_INT (val));
2210
fab2f52c 2211 /* Likewise for multipliers wider than a word. */
1753331b
RS
2212 if (GET_CODE (trueop1) == CONST_DOUBLE
2213 && (GET_MODE (trueop1) == VOIDmode
2214 || GET_MODE_CLASS (GET_MODE (trueop1)) == MODE_INT)
2215 && GET_MODE (op0) == mode
2216 && CONST_DOUBLE_LOW (trueop1) == 0
2217 && (val = exact_log2 (CONST_DOUBLE_HIGH (trueop1))) >= 0)
fab2f52c
AO
2218 return simplify_gen_binary (ASHIFT, mode, op0,
2219 GEN_INT (val + HOST_BITS_PER_WIDE_INT));
2220
0a67e02c
PB
2221 /* x*2 is x+x and x*(-1) is -x */
2222 if (GET_CODE (trueop1) == CONST_DOUBLE
3d8bf70f 2223 && SCALAR_FLOAT_MODE_P (GET_MODE (trueop1))
50cd60be 2224 && !DECIMAL_FLOAT_MODE_P (GET_MODE (trueop1))
0a67e02c
PB
2225 && GET_MODE (op0) == mode)
2226 {
2227 REAL_VALUE_TYPE d;
2228 REAL_VALUE_FROM_CONST_DOUBLE (d, trueop1);
15e5ad76 2229
0a67e02c
PB
2230 if (REAL_VALUES_EQUAL (d, dconst2))
2231 return simplify_gen_binary (PLUS, mode, op0, copy_rtx (op0));
3e4093b6 2232
1753331b
RS
2233 if (!HONOR_SNANS (mode)
2234 && REAL_VALUES_EQUAL (d, dconstm1))
0a67e02c
PB
2235 return simplify_gen_unary (NEG, mode, op0, mode);
2236 }
15e5ad76 2237
1753331b
RS
2238 /* Optimize -x * -x as x * x. */
2239 if (FLOAT_MODE_P (mode)
2240 && GET_CODE (op0) == NEG
2241 && GET_CODE (op1) == NEG
2242 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
2243 && !side_effects_p (XEXP (op0, 0)))
2244 return simplify_gen_binary (MULT, mode, XEXP (op0, 0), XEXP (op1, 0));
2245
2246 /* Likewise, optimize abs(x) * abs(x) as x * x. */
2247 if (SCALAR_FLOAT_MODE_P (mode)
2248 && GET_CODE (op0) == ABS
2249 && GET_CODE (op1) == ABS
2250 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
2251 && !side_effects_p (XEXP (op0, 0)))
2252 return simplify_gen_binary (MULT, mode, XEXP (op0, 0), XEXP (op1, 0));
2253
0a67e02c
PB
2254 /* Reassociate multiplication, but for floating point MULTs
2255 only when the user specifies unsafe math optimizations. */
2256 if (! FLOAT_MODE_P (mode)
2257 || flag_unsafe_math_optimizations)
2258 {
2259 tem = simplify_associative_operation (code, mode, op0, op1);
2260 if (tem)
2261 return tem;
2262 }
2263 break;
6355b2d5 2264
0a67e02c 2265 case IOR:
a82e045d 2266 if (trueop1 == CONST0_RTX (mode))
0a67e02c 2267 return op0;
481683e1 2268 if (CONST_INT_P (trueop1)
43c36287 2269 && ((UINTVAL (trueop1) & GET_MODE_MASK (mode))
0a67e02c
PB
2270 == GET_MODE_MASK (mode)))
2271 return op1;
2272 if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
2273 return op0;
2274 /* A | (~A) -> -1 */
2275 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
2276 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
2277 && ! side_effects_p (op0)
3f2960d5 2278 && SCALAR_INT_MODE_P (mode))
0a67e02c 2279 return constm1_rtx;
bd1ef757
PB
2280
2281 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
481683e1 2282 if (CONST_INT_P (op1)
bd1ef757 2283 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
43c36287 2284 && (nonzero_bits (op0, mode) & ~UINTVAL (op1)) == 0)
bd1ef757 2285 return op1;
b8698a0f 2286
49e7a9d4
RS
2287 /* Canonicalize (X & C1) | C2. */
2288 if (GET_CODE (op0) == AND
481683e1
SZ
2289 && CONST_INT_P (trueop1)
2290 && CONST_INT_P (XEXP (op0, 1)))
49e7a9d4
RS
2291 {
2292 HOST_WIDE_INT mask = GET_MODE_MASK (mode);
2293 HOST_WIDE_INT c1 = INTVAL (XEXP (op0, 1));
2294 HOST_WIDE_INT c2 = INTVAL (trueop1);
2295
2296 /* If (C1&C2) == C1, then (X&C1)|C2 becomes X. */
2297 if ((c1 & c2) == c1
2298 && !side_effects_p (XEXP (op0, 0)))
2299 return trueop1;
2300
2301 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
2302 if (((c1|c2) & mask) == mask)
2303 return simplify_gen_binary (IOR, mode, XEXP (op0, 0), op1);
2304
2305 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
2306 if (((c1 & ~c2) & mask) != (c1 & mask))
2307 {
2308 tem = simplify_gen_binary (AND, mode, XEXP (op0, 0),
2309 gen_int_mode (c1 & ~c2, mode));
2310 return simplify_gen_binary (IOR, mode, tem, op1);
2311 }
2312 }
2313
bd1ef757
PB
2314 /* Convert (A & B) | A to A. */
2315 if (GET_CODE (op0) == AND
2316 && (rtx_equal_p (XEXP (op0, 0), op1)
2317 || rtx_equal_p (XEXP (op0, 1), op1))
2318 && ! side_effects_p (XEXP (op0, 0))
2319 && ! side_effects_p (XEXP (op0, 1)))
2320 return op1;
2321
2322 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
2323 mode size to (rotate A CX). */
2324
2325 if (GET_CODE (op1) == ASHIFT
2326 || GET_CODE (op1) == SUBREG)
2327 {
2328 opleft = op1;
2329 opright = op0;
2330 }
2331 else
2332 {
2333 opright = op1;
2334 opleft = op0;
2335 }
2336
2337 if (GET_CODE (opleft) == ASHIFT && GET_CODE (opright) == LSHIFTRT
2338 && rtx_equal_p (XEXP (opleft, 0), XEXP (opright, 0))
481683e1
SZ
2339 && CONST_INT_P (XEXP (opleft, 1))
2340 && CONST_INT_P (XEXP (opright, 1))
bd1ef757
PB
2341 && (INTVAL (XEXP (opleft, 1)) + INTVAL (XEXP (opright, 1))
2342 == GET_MODE_BITSIZE (mode)))
2343 return gen_rtx_ROTATE (mode, XEXP (opright, 0), XEXP (opleft, 1));
2344
2345 /* Same, but for ashift that has been "simplified" to a wider mode
2346 by simplify_shift_const. */
2347
2348 if (GET_CODE (opleft) == SUBREG
2349 && GET_CODE (SUBREG_REG (opleft)) == ASHIFT
2350 && GET_CODE (opright) == LSHIFTRT
2351 && GET_CODE (XEXP (opright, 0)) == SUBREG
2352 && GET_MODE (opleft) == GET_MODE (XEXP (opright, 0))
2353 && SUBREG_BYTE (opleft) == SUBREG_BYTE (XEXP (opright, 0))
2354 && (GET_MODE_SIZE (GET_MODE (opleft))
2355 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (opleft))))
2356 && rtx_equal_p (XEXP (SUBREG_REG (opleft), 0),
2357 SUBREG_REG (XEXP (opright, 0)))
481683e1
SZ
2358 && CONST_INT_P (XEXP (SUBREG_REG (opleft), 1))
2359 && CONST_INT_P (XEXP (opright, 1))
bd1ef757
PB
2360 && (INTVAL (XEXP (SUBREG_REG (opleft), 1)) + INTVAL (XEXP (opright, 1))
2361 == GET_MODE_BITSIZE (mode)))
2362 return gen_rtx_ROTATE (mode, XEXP (opright, 0),
01578564 2363 XEXP (SUBREG_REG (opleft), 1));
bd1ef757
PB
2364
2365 /* If we have (ior (and (X C1) C2)), simplify this by making
2366 C1 as small as possible if C1 actually changes. */
481683e1 2367 if (CONST_INT_P (op1)
bd1ef757
PB
2368 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2369 || INTVAL (op1) > 0)
2370 && GET_CODE (op0) == AND
481683e1
SZ
2371 && CONST_INT_P (XEXP (op0, 1))
2372 && CONST_INT_P (op1)
43c36287 2373 && (UINTVAL (XEXP (op0, 1)) & UINTVAL (op1)) != 0)
bd1ef757
PB
2374 return simplify_gen_binary (IOR, mode,
2375 simplify_gen_binary
2376 (AND, mode, XEXP (op0, 0),
43c36287
EB
2377 GEN_INT (UINTVAL (XEXP (op0, 1))
2378 & ~UINTVAL (op1))),
bd1ef757
PB
2379 op1);
2380
2381 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
2382 a (sign_extend (plus ...)). Then check if OP1 is a CONST_INT and
2383 the PLUS does not affect any of the bits in OP1: then we can do
2384 the IOR as a PLUS and we can associate. This is valid if OP1
2385 can be safely shifted left C bits. */
481683e1 2386 if (CONST_INT_P (trueop1) && GET_CODE (op0) == ASHIFTRT
bd1ef757 2387 && GET_CODE (XEXP (op0, 0)) == PLUS
481683e1
SZ
2388 && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
2389 && CONST_INT_P (XEXP (op0, 1))
bd1ef757
PB
2390 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
2391 {
2392 int count = INTVAL (XEXP (op0, 1));
2393 HOST_WIDE_INT mask = INTVAL (trueop1) << count;
2394
2395 if (mask >> count == INTVAL (trueop1)
2396 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
2397 return simplify_gen_binary (ASHIFTRT, mode,
2398 plus_constant (XEXP (op0, 0), mask),
2399 XEXP (op0, 1));
2400 }
2401
0a67e02c
PB
2402 tem = simplify_associative_operation (code, mode, op0, op1);
2403 if (tem)
2404 return tem;
2405 break;
2406
2407 case XOR:
a82e045d 2408 if (trueop1 == CONST0_RTX (mode))
0a67e02c 2409 return op0;
481683e1 2410 if (CONST_INT_P (trueop1)
43c36287 2411 && ((UINTVAL (trueop1) & GET_MODE_MASK (mode))
0a67e02c
PB
2412 == GET_MODE_MASK (mode)))
2413 return simplify_gen_unary (NOT, mode, op0, mode);
f5d1572a 2414 if (rtx_equal_p (trueop0, trueop1)
0a67e02c
PB
2415 && ! side_effects_p (op0)
2416 && GET_MODE_CLASS (mode) != MODE_CC)
6bd13540 2417 return CONST0_RTX (mode);
0a67e02c
PB
2418
2419 /* Canonicalize XOR of the most significant bit to PLUS. */
481683e1 2420 if ((CONST_INT_P (op1)
0a67e02c
PB
2421 || GET_CODE (op1) == CONST_DOUBLE)
2422 && mode_signbit_p (mode, op1))
2423 return simplify_gen_binary (PLUS, mode, op0, op1);
2424 /* (xor (plus X C1) C2) is (xor X (C1^C2)) if C1 is signbit. */
481683e1 2425 if ((CONST_INT_P (op1)
0a67e02c
PB
2426 || GET_CODE (op1) == CONST_DOUBLE)
2427 && GET_CODE (op0) == PLUS
481683e1 2428 && (CONST_INT_P (XEXP (op0, 1))
0a67e02c
PB
2429 || GET_CODE (XEXP (op0, 1)) == CONST_DOUBLE)
2430 && mode_signbit_p (mode, XEXP (op0, 1)))
2431 return simplify_gen_binary (XOR, mode, XEXP (op0, 0),
2432 simplify_gen_binary (XOR, mode, op1,
2433 XEXP (op0, 1)));
bd1ef757
PB
2434
2435 /* If we are XORing two things that have no bits in common,
2436 convert them into an IOR. This helps to detect rotation encoded
2437 using those methods and possibly other simplifications. */
2438
2439 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2440 && (nonzero_bits (op0, mode)
2441 & nonzero_bits (op1, mode)) == 0)
2442 return (simplify_gen_binary (IOR, mode, op0, op1));
2443
2444 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
2445 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
2446 (NOT y). */
2447 {
2448 int num_negated = 0;
2449
2450 if (GET_CODE (op0) == NOT)
2451 num_negated++, op0 = XEXP (op0, 0);
2452 if (GET_CODE (op1) == NOT)
2453 num_negated++, op1 = XEXP (op1, 0);
2454
2455 if (num_negated == 2)
2456 return simplify_gen_binary (XOR, mode, op0, op1);
2457 else if (num_negated == 1)
2458 return simplify_gen_unary (NOT, mode,
2459 simplify_gen_binary (XOR, mode, op0, op1),
2460 mode);
2461 }
2462
2463 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
2464 correspond to a machine insn or result in further simplifications
2465 if B is a constant. */
2466
2467 if (GET_CODE (op0) == AND
2468 && rtx_equal_p (XEXP (op0, 1), op1)
2469 && ! side_effects_p (op1))
2470 return simplify_gen_binary (AND, mode,
2471 simplify_gen_unary (NOT, mode,
2472 XEXP (op0, 0), mode),
2473 op1);
2474
2475 else if (GET_CODE (op0) == AND
2476 && rtx_equal_p (XEXP (op0, 0), op1)
2477 && ! side_effects_p (op1))
2478 return simplify_gen_binary (AND, mode,
2479 simplify_gen_unary (NOT, mode,
2480 XEXP (op0, 1), mode),
2481 op1);
2482
2483 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
2484 comparison if STORE_FLAG_VALUE is 1. */
2485 if (STORE_FLAG_VALUE == 1
2486 && trueop1 == const1_rtx
2487 && COMPARISON_P (op0)
2488 && (reversed = reversed_comparison (op0, mode)))
2489 return reversed;
2490
2491 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
2492 is (lt foo (const_int 0)), so we can perform the above
2493 simplification if STORE_FLAG_VALUE is 1. */
2494
2495 if (STORE_FLAG_VALUE == 1
2496 && trueop1 == const1_rtx
2497 && GET_CODE (op0) == LSHIFTRT
481683e1 2498 && CONST_INT_P (XEXP (op0, 1))
bd1ef757
PB
2499 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
2500 return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx);
2501
2502 /* (xor (comparison foo bar) (const_int sign-bit))
2503 when STORE_FLAG_VALUE is the sign bit. */
2504 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2505 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
2506 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
2507 && trueop1 == const_true_rtx
2508 && COMPARISON_P (op0)
2509 && (reversed = reversed_comparison (op0, mode)))
2510 return reversed;
2511
0a67e02c
PB
2512 tem = simplify_associative_operation (code, mode, op0, op1);
2513 if (tem)
2514 return tem;
2515 break;
2516
2517 case AND:
3f2960d5
RH
2518 if (trueop1 == CONST0_RTX (mode) && ! side_effects_p (op0))
2519 return trueop1;
f5a17c43 2520 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
dc5b3407
ZD
2521 {
2522 HOST_WIDE_INT nzop0 = nonzero_bits (trueop0, mode);
f5a17c43 2523 HOST_WIDE_INT nzop1;
481683e1 2524 if (CONST_INT_P (trueop1))
f5a17c43
BS
2525 {
2526 HOST_WIDE_INT val1 = INTVAL (trueop1);
2527 /* If we are turning off bits already known off in OP0, we need
2528 not do an AND. */
2529 if ((nzop0 & ~val1) == 0)
2530 return op0;
2531 }
2532 nzop1 = nonzero_bits (trueop1, mode);
dc5b3407 2533 /* If we are clearing all the nonzero bits, the result is zero. */
f5a17c43
BS
2534 if ((nzop1 & nzop0) == 0
2535 && !side_effects_p (op0) && !side_effects_p (op1))
dc5b3407
ZD
2536 return CONST0_RTX (mode);
2537 }
f5d1572a 2538 if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0)
0a67e02c
PB
2539 && GET_MODE_CLASS (mode) != MODE_CC)
2540 return op0;
2541 /* A & (~A) -> 0 */
2542 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
2543 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
2544 && ! side_effects_p (op0)
2545 && GET_MODE_CLASS (mode) != MODE_CC)
3f2960d5 2546 return CONST0_RTX (mode);
0a67e02c
PB
2547
2548 /* Transform (and (extend X) C) into (zero_extend (and X C)) if
2549 there are no nonzero bits of C outside of X's mode. */
2550 if ((GET_CODE (op0) == SIGN_EXTEND
2551 || GET_CODE (op0) == ZERO_EXTEND)
481683e1 2552 && CONST_INT_P (trueop1)
0a67e02c
PB
2553 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2554 && (~GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))
43c36287 2555 & UINTVAL (trueop1)) == 0)
0a67e02c
PB
2556 {
2557 enum machine_mode imode = GET_MODE (XEXP (op0, 0));
2558 tem = simplify_gen_binary (AND, imode, XEXP (op0, 0),
2559 gen_int_mode (INTVAL (trueop1),
2560 imode));
2561 return simplify_gen_unary (ZERO_EXTEND, mode, tem, imode);
2562 }
2563
fcaf7e12
AN
2564 /* Transform (and (truncate X) C) into (truncate (and X C)). This way
2565 we might be able to further simplify the AND with X and potentially
2566 remove the truncation altogether. */
2567 if (GET_CODE (op0) == TRUNCATE && CONST_INT_P (trueop1))
2568 {
2569 rtx x = XEXP (op0, 0);
2570 enum machine_mode xmode = GET_MODE (x);
2571 tem = simplify_gen_binary (AND, xmode, x,
2572 gen_int_mode (INTVAL (trueop1), xmode));
2573 return simplify_gen_unary (TRUNCATE, mode, tem, xmode);
2574 }
2575
49e7a9d4
RS
2576 /* Canonicalize (A | C1) & C2 as (A & C2) | (C1 & C2). */
2577 if (GET_CODE (op0) == IOR
481683e1
SZ
2578 && CONST_INT_P (trueop1)
2579 && CONST_INT_P (XEXP (op0, 1)))
49e7a9d4
RS
2580 {
2581 HOST_WIDE_INT tmp = INTVAL (trueop1) & INTVAL (XEXP (op0, 1));
2582 return simplify_gen_binary (IOR, mode,
2583 simplify_gen_binary (AND, mode,
2584 XEXP (op0, 0), op1),
2585 gen_int_mode (tmp, mode));
2586 }
2587
bd1ef757
PB
2588 /* Convert (A ^ B) & A to A & (~B) since the latter is often a single
2589 insn (and may simplify more). */
2590 if (GET_CODE (op0) == XOR
2591 && rtx_equal_p (XEXP (op0, 0), op1)
2592 && ! side_effects_p (op1))
2593 return simplify_gen_binary (AND, mode,
2594 simplify_gen_unary (NOT, mode,
2595 XEXP (op0, 1), mode),
2596 op1);
2597
2598 if (GET_CODE (op0) == XOR
2599 && rtx_equal_p (XEXP (op0, 1), op1)
2600 && ! side_effects_p (op1))
2601 return simplify_gen_binary (AND, mode,
2602 simplify_gen_unary (NOT, mode,
2603 XEXP (op0, 0), mode),
2604 op1);
2605
2606 /* Similarly for (~(A ^ B)) & A. */
2607 if (GET_CODE (op0) == NOT
2608 && GET_CODE (XEXP (op0, 0)) == XOR
2609 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
2610 && ! side_effects_p (op1))
2611 return simplify_gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
2612
2613 if (GET_CODE (op0) == NOT
2614 && GET_CODE (XEXP (op0, 0)) == XOR
2615 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
2616 && ! side_effects_p (op1))
2617 return simplify_gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
2618
2619 /* Convert (A | B) & A to A. */
2620 if (GET_CODE (op0) == IOR
2621 && (rtx_equal_p (XEXP (op0, 0), op1)
2622 || rtx_equal_p (XEXP (op0, 1), op1))
2623 && ! side_effects_p (XEXP (op0, 0))
2624 && ! side_effects_p (XEXP (op0, 1)))
2625 return op1;
2626
0a67e02c
PB
2627 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
2628 ((A & N) + B) & M -> (A + B) & M
2629 Similarly if (N & M) == 0,
2630 ((A | N) + B) & M -> (A + B) & M
dc5b3407
ZD
2631 and for - instead of + and/or ^ instead of |.
2632 Also, if (N & M) == 0, then
2633 (A +- N) & M -> A & M. */
481683e1 2634 if (CONST_INT_P (trueop1)
0a67e02c 2635 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
43c36287
EB
2636 && ~UINTVAL (trueop1)
2637 && (UINTVAL (trueop1) & (UINTVAL (trueop1) + 1)) == 0
0a67e02c
PB
2638 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS))
2639 {
2640 rtx pmop[2];
2641 int which;
2642
2643 pmop[0] = XEXP (op0, 0);
2644 pmop[1] = XEXP (op0, 1);
2645
481683e1 2646 if (CONST_INT_P (pmop[1])
43c36287 2647 && (UINTVAL (pmop[1]) & UINTVAL (trueop1)) == 0)
dc5b3407
ZD
2648 return simplify_gen_binary (AND, mode, pmop[0], op1);
2649
0a67e02c
PB
2650 for (which = 0; which < 2; which++)
2651 {
2652 tem = pmop[which];
2653 switch (GET_CODE (tem))
6355b2d5 2654 {
0a67e02c 2655 case AND:
481683e1 2656 if (CONST_INT_P (XEXP (tem, 1))
43c36287
EB
2657 && (UINTVAL (XEXP (tem, 1)) & UINTVAL (trueop1))
2658 == UINTVAL (trueop1))
0a67e02c 2659 pmop[which] = XEXP (tem, 0);
6355b2d5 2660 break;
0a67e02c
PB
2661 case IOR:
2662 case XOR:
481683e1 2663 if (CONST_INT_P (XEXP (tem, 1))
43c36287 2664 && (UINTVAL (XEXP (tem, 1)) & UINTVAL (trueop1)) == 0)
0a67e02c 2665 pmop[which] = XEXP (tem, 0);
6355b2d5 2666 break;
6355b2d5
JJ
2667 default:
2668 break;
2669 }
2670 }
2671
0a67e02c
PB
2672 if (pmop[0] != XEXP (op0, 0) || pmop[1] != XEXP (op0, 1))
2673 {
2674 tem = simplify_gen_binary (GET_CODE (op0), mode,
2675 pmop[0], pmop[1]);
2676 return simplify_gen_binary (code, mode, tem, op1);
2677 }
2678 }
f79db4f6
AP
2679
2680 /* (and X (ior (not X) Y) -> (and X Y) */
2681 if (GET_CODE (op1) == IOR
2682 && GET_CODE (XEXP (op1, 0)) == NOT
2683 && op0 == XEXP (XEXP (op1, 0), 0))
2684 return simplify_gen_binary (AND, mode, op0, XEXP (op1, 1));
2685
2686 /* (and (ior (not X) Y) X) -> (and X Y) */
2687 if (GET_CODE (op0) == IOR
2688 && GET_CODE (XEXP (op0, 0)) == NOT
2689 && op1 == XEXP (XEXP (op0, 0), 0))
2690 return simplify_gen_binary (AND, mode, op1, XEXP (op0, 1));
2691
0a67e02c
PB
2692 tem = simplify_associative_operation (code, mode, op0, op1);
2693 if (tem)
2694 return tem;
2695 break;
762297d9 2696
0a67e02c
PB
2697 case UDIV:
2698 /* 0/x is 0 (or x&0 if x has side-effects). */
3f2960d5
RH
2699 if (trueop0 == CONST0_RTX (mode))
2700 {
2701 if (side_effects_p (op1))
2702 return simplify_gen_binary (AND, mode, op1, trueop0);
2703 return trueop0;
2704 }
2705 /* x/1 is x. */
2706 if (trueop1 == CONST1_RTX (mode))
2707 return rtl_hooks.gen_lowpart_no_emit (mode, op0);
2708 /* Convert divide by power of two into shift. */
481683e1 2709 if (CONST_INT_P (trueop1)
43c36287 2710 && (val = exact_log2 (UINTVAL (trueop1))) > 0)
3f2960d5
RH
2711 return simplify_gen_binary (LSHIFTRT, mode, op0, GEN_INT (val));
2712 break;
d284eb28 2713
0a67e02c
PB
2714 case DIV:
2715 /* Handle floating point and integers separately. */
3d8bf70f 2716 if (SCALAR_FLOAT_MODE_P (mode))
0a67e02c
PB
2717 {
2718 /* Maybe change 0.0 / x to 0.0. This transformation isn't
2719 safe for modes with NaNs, since 0.0 / 0.0 will then be
2720 NaN rather than 0.0. Nor is it safe for modes with signed
2721 zeros, since dividing 0 by a negative number gives -0.0 */
2722 if (trueop0 == CONST0_RTX (mode)
2723 && !HONOR_NANS (mode)
2724 && !HONOR_SIGNED_ZEROS (mode)
2725 && ! side_effects_p (op1))
2726 return op0;
2727 /* x/1.0 is x. */
2728 if (trueop1 == CONST1_RTX (mode)
2729 && !HONOR_SNANS (mode))
2730 return op0;
0cedb36c 2731
0a67e02c
PB
2732 if (GET_CODE (trueop1) == CONST_DOUBLE
2733 && trueop1 != CONST0_RTX (mode))
2734 {
2735 REAL_VALUE_TYPE d;
2736 REAL_VALUE_FROM_CONST_DOUBLE (d, trueop1);
0cedb36c 2737
0a67e02c
PB
2738 /* x/-1.0 is -x. */
2739 if (REAL_VALUES_EQUAL (d, dconstm1)
2740 && !HONOR_SNANS (mode))
2741 return simplify_gen_unary (NEG, mode, op0, mode);
0cedb36c 2742
0a67e02c 2743 /* Change FP division by a constant into multiplication.
a1a82611
RE
2744 Only do this with -freciprocal-math. */
2745 if (flag_reciprocal_math
0a67e02c
PB
2746 && !REAL_VALUES_EQUAL (d, dconst0))
2747 {
2748 REAL_ARITHMETIC (d, RDIV_EXPR, dconst1, d);
2749 tem = CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2750 return simplify_gen_binary (MULT, mode, op0, tem);
2751 }
2752 }
2753 }
0cedb36c 2754 else
0cedb36c 2755 {
0a67e02c 2756 /* 0/x is 0 (or x&0 if x has side-effects). */
0e1b8b10
ILT
2757 if (trueop0 == CONST0_RTX (mode)
2758 && !cfun->can_throw_non_call_exceptions)
3f2960d5
RH
2759 {
2760 if (side_effects_p (op1))
2761 return simplify_gen_binary (AND, mode, op1, trueop0);
2762 return trueop0;
2763 }
0a67e02c 2764 /* x/1 is x. */
3f2960d5 2765 if (trueop1 == CONST1_RTX (mode))
9ce921ab 2766 return rtl_hooks.gen_lowpart_no_emit (mode, op0);
0a67e02c
PB
2767 /* x/-1 is -x. */
2768 if (trueop1 == constm1_rtx)
2769 {
9ce921ab 2770 rtx x = rtl_hooks.gen_lowpart_no_emit (mode, op0);
0a67e02c
PB
2771 return simplify_gen_unary (NEG, mode, x, mode);
2772 }
2773 }
2774 break;
0cedb36c 2775
0a67e02c
PB
2776 case UMOD:
2777 /* 0%x is 0 (or x&0 if x has side-effects). */
3f2960d5
RH
2778 if (trueop0 == CONST0_RTX (mode))
2779 {
2780 if (side_effects_p (op1))
2781 return simplify_gen_binary (AND, mode, op1, trueop0);
2782 return trueop0;
2783 }
2784 /* x%1 is 0 (of x&0 if x has side-effects). */
2785 if (trueop1 == CONST1_RTX (mode))
2786 {
2787 if (side_effects_p (op0))
2788 return simplify_gen_binary (AND, mode, op0, CONST0_RTX (mode));
2789 return CONST0_RTX (mode);
2790 }
2791 /* Implement modulus by power of two as AND. */
481683e1 2792 if (CONST_INT_P (trueop1)
43c36287 2793 && exact_log2 (UINTVAL (trueop1)) > 0)
3f2960d5
RH
2794 return simplify_gen_binary (AND, mode, op0,
2795 GEN_INT (INTVAL (op1) - 1));
2796 break;
0cedb36c 2797
0a67e02c
PB
2798 case MOD:
2799 /* 0%x is 0 (or x&0 if x has side-effects). */
3f2960d5
RH
2800 if (trueop0 == CONST0_RTX (mode))
2801 {
2802 if (side_effects_p (op1))
2803 return simplify_gen_binary (AND, mode, op1, trueop0);
2804 return trueop0;
2805 }
2806 /* x%1 and x%-1 is 0 (or x&0 if x has side-effects). */
2807 if (trueop1 == CONST1_RTX (mode) || trueop1 == constm1_rtx)
2808 {
2809 if (side_effects_p (op0))
2810 return simplify_gen_binary (AND, mode, op0, CONST0_RTX (mode));
2811 return CONST0_RTX (mode);
2812 }
2813 break;
0cedb36c 2814
0a67e02c
PB
2815 case ROTATERT:
2816 case ROTATE:
2817 case ASHIFTRT:
70233f37
RS
2818 if (trueop1 == CONST0_RTX (mode))
2819 return op0;
2820 if (trueop0 == CONST0_RTX (mode) && ! side_effects_p (op1))
2821 return op0;
0a67e02c 2822 /* Rotating ~0 always results in ~0. */
481683e1 2823 if (CONST_INT_P (trueop0) && width <= HOST_BITS_PER_WIDE_INT
43c36287 2824 && UINTVAL (trueop0) == GET_MODE_MASK (mode)
0a67e02c
PB
2825 && ! side_effects_p (op1))
2826 return op0;
cbc9503d 2827 canonicalize_shift:
481683e1 2828 if (SHIFT_COUNT_TRUNCATED && CONST_INT_P (op1))
cbc9503d
RS
2829 {
2830 val = INTVAL (op1) & (GET_MODE_BITSIZE (mode) - 1);
2831 if (val != INTVAL (op1))
2832 return simplify_gen_binary (code, mode, op0, GEN_INT (val));
2833 }
70233f37 2834 break;
9d317251 2835
0a67e02c 2836 case ASHIFT:
e551ad26 2837 case SS_ASHIFT:
14c931f1 2838 case US_ASHIFT:
70233f37
RS
2839 if (trueop1 == CONST0_RTX (mode))
2840 return op0;
2841 if (trueop0 == CONST0_RTX (mode) && ! side_effects_p (op1))
2842 return op0;
cbc9503d 2843 goto canonicalize_shift;
70233f37 2844
0a67e02c 2845 case LSHIFTRT:
3f2960d5 2846 if (trueop1 == CONST0_RTX (mode))
0a67e02c 2847 return op0;
3f2960d5 2848 if (trueop0 == CONST0_RTX (mode) && ! side_effects_p (op1))
0a67e02c 2849 return op0;
70233f37
RS
2850 /* Optimize (lshiftrt (clz X) C) as (eq X 0). */
2851 if (GET_CODE (op0) == CLZ
481683e1 2852 && CONST_INT_P (trueop1)
70233f37 2853 && STORE_FLAG_VALUE == 1
e40122f0 2854 && INTVAL (trueop1) < (HOST_WIDE_INT)width)
70233f37
RS
2855 {
2856 enum machine_mode imode = GET_MODE (XEXP (op0, 0));
2857 unsigned HOST_WIDE_INT zero_val = 0;
2858
2859 if (CLZ_DEFINED_VALUE_AT_ZERO (imode, zero_val)
2860 && zero_val == GET_MODE_BITSIZE (imode)
2861 && INTVAL (trueop1) == exact_log2 (zero_val))
2862 return simplify_gen_relational (EQ, mode, imode,
2863 XEXP (op0, 0), const0_rtx);
2864 }
cbc9503d 2865 goto canonicalize_shift;
9d317251 2866
0a67e02c
PB
2867 case SMIN:
2868 if (width <= HOST_BITS_PER_WIDE_INT
481683e1 2869 && CONST_INT_P (trueop1)
43c36287 2870 && UINTVAL (trueop1) == (unsigned HOST_WIDE_INT) 1 << (width -1)
0a67e02c
PB
2871 && ! side_effects_p (op0))
2872 return op1;
2873 if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
2874 return op0;
2875 tem = simplify_associative_operation (code, mode, op0, op1);
2876 if (tem)
2877 return tem;
2878 break;
0cedb36c 2879
0a67e02c
PB
2880 case SMAX:
2881 if (width <= HOST_BITS_PER_WIDE_INT
481683e1 2882 && CONST_INT_P (trueop1)
43c36287 2883 && (UINTVAL (trueop1) == GET_MODE_MASK (mode) >> 1)
0a67e02c
PB
2884 && ! side_effects_p (op0))
2885 return op1;
2886 if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
2887 return op0;
2888 tem = simplify_associative_operation (code, mode, op0, op1);
2889 if (tem)
2890 return tem;
2891 break;
0cedb36c 2892
0a67e02c 2893 case UMIN:
3f2960d5 2894 if (trueop1 == CONST0_RTX (mode) && ! side_effects_p (op0))
0a67e02c
PB
2895 return op1;
2896 if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
2897 return op0;
2898 tem = simplify_associative_operation (code, mode, op0, op1);
2899 if (tem)
2900 return tem;
2901 break;
0cedb36c 2902
0a67e02c
PB
2903 case UMAX:
2904 if (trueop1 == constm1_rtx && ! side_effects_p (op0))
2905 return op1;
2906 if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
2907 return op0;
2908 tem = simplify_associative_operation (code, mode, op0, op1);
2909 if (tem)
2910 return tem;
2911 break;
0cedb36c 2912
0a67e02c
PB
2913 case SS_PLUS:
2914 case US_PLUS:
2915 case SS_MINUS:
2916 case US_MINUS:
14c931f1
CF
2917 case SS_MULT:
2918 case US_MULT:
2919 case SS_DIV:
2920 case US_DIV:
0a67e02c
PB
2921 /* ??? There are simplifications that can be done. */
2922 return 0;
0cedb36c 2923
0a67e02c
PB
2924 case VEC_SELECT:
2925 if (!VECTOR_MODE_P (mode))
2926 {
2927 gcc_assert (VECTOR_MODE_P (GET_MODE (trueop0)));
2928 gcc_assert (mode == GET_MODE_INNER (GET_MODE (trueop0)));
2929 gcc_assert (GET_CODE (trueop1) == PARALLEL);
2930 gcc_assert (XVECLEN (trueop1, 0) == 1);
481683e1 2931 gcc_assert (CONST_INT_P (XVECEXP (trueop1, 0, 0)));
0a67e02c
PB
2932
2933 if (GET_CODE (trueop0) == CONST_VECTOR)
2934 return CONST_VECTOR_ELT (trueop0, INTVAL (XVECEXP
2935 (trueop1, 0, 0)));
7f97f938
UB
2936
2937 /* Extract a scalar element from a nested VEC_SELECT expression
2938 (with optional nested VEC_CONCAT expression). Some targets
2939 (i386) extract scalar element from a vector using chain of
2940 nested VEC_SELECT expressions. When input operand is a memory
2941 operand, this operation can be simplified to a simple scalar
2942 load from an offseted memory address. */
2943 if (GET_CODE (trueop0) == VEC_SELECT)
2944 {
2945 rtx op0 = XEXP (trueop0, 0);
2946 rtx op1 = XEXP (trueop0, 1);
2947
2948 enum machine_mode opmode = GET_MODE (op0);
2949 int elt_size = GET_MODE_SIZE (GET_MODE_INNER (opmode));
2950 int n_elts = GET_MODE_SIZE (opmode) / elt_size;
2951
2952 int i = INTVAL (XVECEXP (trueop1, 0, 0));
2953 int elem;
2954
2955 rtvec vec;
2956 rtx tmp_op, tmp;
2957
2958 gcc_assert (GET_CODE (op1) == PARALLEL);
2959 gcc_assert (i < n_elts);
2960
2961 /* Select element, pointed by nested selector. */
3743c639 2962 elem = INTVAL (XVECEXP (op1, 0, i));
7f97f938
UB
2963
2964 /* Handle the case when nested VEC_SELECT wraps VEC_CONCAT. */
2965 if (GET_CODE (op0) == VEC_CONCAT)
2966 {
2967 rtx op00 = XEXP (op0, 0);
2968 rtx op01 = XEXP (op0, 1);
2969
2970 enum machine_mode mode00, mode01;
2971 int n_elts00, n_elts01;
2972
2973 mode00 = GET_MODE (op00);
2974 mode01 = GET_MODE (op01);
2975
2976 /* Find out number of elements of each operand. */
2977 if (VECTOR_MODE_P (mode00))
2978 {
2979 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode00));
2980 n_elts00 = GET_MODE_SIZE (mode00) / elt_size;
2981 }
2982 else
2983 n_elts00 = 1;
2984
2985 if (VECTOR_MODE_P (mode01))
2986 {
2987 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode01));
2988 n_elts01 = GET_MODE_SIZE (mode01) / elt_size;
2989 }
2990 else
2991 n_elts01 = 1;
2992
2993 gcc_assert (n_elts == n_elts00 + n_elts01);
2994
2995 /* Select correct operand of VEC_CONCAT
2996 and adjust selector. */
2997 if (elem < n_elts01)
2998 tmp_op = op00;
2999 else
3000 {
3001 tmp_op = op01;
3002 elem -= n_elts00;
3003 }
3004 }
3005 else
3006 tmp_op = op0;
3007
3008 vec = rtvec_alloc (1);
3009 RTVEC_ELT (vec, 0) = GEN_INT (elem);
3010
3011 tmp = gen_rtx_fmt_ee (code, mode,
3012 tmp_op, gen_rtx_PARALLEL (VOIDmode, vec));
3013 return tmp;
3014 }
0e159e0f
AP
3015 if (GET_CODE (trueop0) == VEC_DUPLICATE
3016 && GET_MODE (XEXP (trueop0, 0)) == mode)
3017 return XEXP (trueop0, 0);
0a67e02c
PB
3018 }
3019 else
3020 {
3021 gcc_assert (VECTOR_MODE_P (GET_MODE (trueop0)));
3022 gcc_assert (GET_MODE_INNER (mode)
3023 == GET_MODE_INNER (GET_MODE (trueop0)));
3024 gcc_assert (GET_CODE (trueop1) == PARALLEL);
0cedb36c 3025
0a67e02c
PB
3026 if (GET_CODE (trueop0) == CONST_VECTOR)
3027 {
3028 int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
3029 unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size);
3030 rtvec v = rtvec_alloc (n_elts);
3031 unsigned int i;
0cedb36c 3032
0a67e02c
PB
3033 gcc_assert (XVECLEN (trueop1, 0) == (int) n_elts);
3034 for (i = 0; i < n_elts; i++)
3035 {
3036 rtx x = XVECEXP (trueop1, 0, i);
0cedb36c 3037
481683e1 3038 gcc_assert (CONST_INT_P (x));
0a67e02c
PB
3039 RTVEC_ELT (v, i) = CONST_VECTOR_ELT (trueop0,
3040 INTVAL (x));
0cedb36c
JL
3041 }
3042
0a67e02c 3043 return gen_rtx_CONST_VECTOR (mode, v);
dd61aa98 3044 }
0a67e02c 3045 }
bd1ef757
PB
3046
3047 if (XVECLEN (trueop1, 0) == 1
481683e1 3048 && CONST_INT_P (XVECEXP (trueop1, 0, 0))
bd1ef757
PB
3049 && GET_CODE (trueop0) == VEC_CONCAT)
3050 {
3051 rtx vec = trueop0;
3052 int offset = INTVAL (XVECEXP (trueop1, 0, 0)) * GET_MODE_SIZE (mode);
3053
3054 /* Try to find the element in the VEC_CONCAT. */
3055 while (GET_MODE (vec) != mode
3056 && GET_CODE (vec) == VEC_CONCAT)
3057 {
3058 HOST_WIDE_INT vec_size = GET_MODE_SIZE (GET_MODE (XEXP (vec, 0)));
3059 if (offset < vec_size)
3060 vec = XEXP (vec, 0);
3061 else
3062 {
3063 offset -= vec_size;
3064 vec = XEXP (vec, 1);
3065 }
3066 vec = avoid_constant_pool_reference (vec);
3067 }
3068
3069 if (GET_MODE (vec) == mode)
3070 return vec;
3071 }
3072
0a67e02c
PB
3073 return 0;
3074 case VEC_CONCAT:
3075 {
3076 enum machine_mode op0_mode = (GET_MODE (trueop0) != VOIDmode
3077 ? GET_MODE (trueop0)
3078 : GET_MODE_INNER (mode));
3079 enum machine_mode op1_mode = (GET_MODE (trueop1) != VOIDmode
3080 ? GET_MODE (trueop1)
3081 : GET_MODE_INNER (mode));
3082
3083 gcc_assert (VECTOR_MODE_P (mode));
3084 gcc_assert (GET_MODE_SIZE (op0_mode) + GET_MODE_SIZE (op1_mode)
3085 == GET_MODE_SIZE (mode));
3086
3087 if (VECTOR_MODE_P (op0_mode))
3088 gcc_assert (GET_MODE_INNER (mode)
3089 == GET_MODE_INNER (op0_mode));
3090 else
3091 gcc_assert (GET_MODE_INNER (mode) == op0_mode);
0cedb36c 3092
0a67e02c
PB
3093 if (VECTOR_MODE_P (op1_mode))
3094 gcc_assert (GET_MODE_INNER (mode)
3095 == GET_MODE_INNER (op1_mode));
3096 else
3097 gcc_assert (GET_MODE_INNER (mode) == op1_mode);
3098
3099 if ((GET_CODE (trueop0) == CONST_VECTOR
481683e1 3100 || CONST_INT_P (trueop0)
0a67e02c
PB
3101 || GET_CODE (trueop0) == CONST_DOUBLE)
3102 && (GET_CODE (trueop1) == CONST_VECTOR
481683e1 3103 || CONST_INT_P (trueop1)
0a67e02c
PB
3104 || GET_CODE (trueop1) == CONST_DOUBLE))
3105 {
3106 int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
3107 unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size);
3108 rtvec v = rtvec_alloc (n_elts);
3109 unsigned int i;
3110 unsigned in_n_elts = 1;
c877353c 3111
0a67e02c
PB
3112 if (VECTOR_MODE_P (op0_mode))
3113 in_n_elts = (GET_MODE_SIZE (op0_mode) / elt_size);
3114 for (i = 0; i < n_elts; i++)
3115 {
3116 if (i < in_n_elts)
3117 {
3118 if (!VECTOR_MODE_P (op0_mode))
3119 RTVEC_ELT (v, i) = trueop0;
3120 else
3121 RTVEC_ELT (v, i) = CONST_VECTOR_ELT (trueop0, i);
3122 }
3123 else
3124 {
3125 if (!VECTOR_MODE_P (op1_mode))
3126 RTVEC_ELT (v, i) = trueop1;
3127 else
3128 RTVEC_ELT (v, i) = CONST_VECTOR_ELT (trueop1,
3129 i - in_n_elts);
3130 }
3131 }
0cedb36c 3132
0a67e02c
PB
3133 return gen_rtx_CONST_VECTOR (mode, v);
3134 }
3135 }
3136 return 0;
0cedb36c 3137
0a67e02c
PB
3138 default:
3139 gcc_unreachable ();
3140 }
0cedb36c 3141
0a67e02c
PB
3142 return 0;
3143}
0cedb36c 3144
0a67e02c
PB
3145rtx
3146simplify_const_binary_operation (enum rtx_code code, enum machine_mode mode,
3147 rtx op0, rtx op1)
3148{
3149 HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3150 HOST_WIDE_INT val;
3151 unsigned int width = GET_MODE_BITSIZE (mode);
0cedb36c 3152
0a67e02c
PB
3153 if (VECTOR_MODE_P (mode)
3154 && code != VEC_CONCAT
3155 && GET_CODE (op0) == CONST_VECTOR
3156 && GET_CODE (op1) == CONST_VECTOR)
3157 {
3158 unsigned n_elts = GET_MODE_NUNITS (mode);
3159 enum machine_mode op0mode = GET_MODE (op0);
3160 unsigned op0_n_elts = GET_MODE_NUNITS (op0mode);
3161 enum machine_mode op1mode = GET_MODE (op1);
3162 unsigned op1_n_elts = GET_MODE_NUNITS (op1mode);
3163 rtvec v = rtvec_alloc (n_elts);
3164 unsigned int i;
0cedb36c 3165
0a67e02c
PB
3166 gcc_assert (op0_n_elts == n_elts);
3167 gcc_assert (op1_n_elts == n_elts);
3168 for (i = 0; i < n_elts; i++)
3169 {
3170 rtx x = simplify_binary_operation (code, GET_MODE_INNER (mode),
3171 CONST_VECTOR_ELT (op0, i),
3172 CONST_VECTOR_ELT (op1, i));
3173 if (!x)
3174 return 0;
3175 RTVEC_ELT (v, i) = x;
3176 }
0cedb36c 3177
0a67e02c
PB
3178 return gen_rtx_CONST_VECTOR (mode, v);
3179 }
0cedb36c 3180
0a67e02c
PB
3181 if (VECTOR_MODE_P (mode)
3182 && code == VEC_CONCAT
d1f0728e
UB
3183 && (CONST_INT_P (op0)
3184 || GET_CODE (op0) == CONST_DOUBLE
3185 || GET_CODE (op0) == CONST_FIXED)
3186 && (CONST_INT_P (op1)
3187 || GET_CODE (op1) == CONST_DOUBLE
3188 || GET_CODE (op1) == CONST_FIXED))
0a67e02c
PB
3189 {
3190 unsigned n_elts = GET_MODE_NUNITS (mode);
3191 rtvec v = rtvec_alloc (n_elts);
0cedb36c 3192
0a67e02c
PB
3193 gcc_assert (n_elts >= 2);
3194 if (n_elts == 2)
3195 {
3196 gcc_assert (GET_CODE (op0) != CONST_VECTOR);
3197 gcc_assert (GET_CODE (op1) != CONST_VECTOR);
dd61aa98 3198
0a67e02c
PB
3199 RTVEC_ELT (v, 0) = op0;
3200 RTVEC_ELT (v, 1) = op1;
3201 }
3202 else
3203 {
3204 unsigned op0_n_elts = GET_MODE_NUNITS (GET_MODE (op0));
3205 unsigned op1_n_elts = GET_MODE_NUNITS (GET_MODE (op1));
3206 unsigned i;
0cedb36c 3207
0a67e02c
PB
3208 gcc_assert (GET_CODE (op0) == CONST_VECTOR);
3209 gcc_assert (GET_CODE (op1) == CONST_VECTOR);
3210 gcc_assert (op0_n_elts + op1_n_elts == n_elts);
0cedb36c 3211
0a67e02c
PB
3212 for (i = 0; i < op0_n_elts; ++i)
3213 RTVEC_ELT (v, i) = XVECEXP (op0, 0, i);
3214 for (i = 0; i < op1_n_elts; ++i)
3215 RTVEC_ELT (v, op0_n_elts+i) = XVECEXP (op1, 0, i);
3216 }
0b24db88 3217
0a67e02c
PB
3218 return gen_rtx_CONST_VECTOR (mode, v);
3219 }
0cedb36c 3220
3d8bf70f 3221 if (SCALAR_FLOAT_MODE_P (mode)
0a67e02c
PB
3222 && GET_CODE (op0) == CONST_DOUBLE
3223 && GET_CODE (op1) == CONST_DOUBLE
3224 && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3225 {
3226 if (code == AND
3227 || code == IOR
3228 || code == XOR)
3229 {
3230 long tmp0[4];
3231 long tmp1[4];
3232 REAL_VALUE_TYPE r;
3233 int i;
a0ee8b5f 3234
0a67e02c
PB
3235 real_to_target (tmp0, CONST_DOUBLE_REAL_VALUE (op0),
3236 GET_MODE (op0));
3237 real_to_target (tmp1, CONST_DOUBLE_REAL_VALUE (op1),
3238 GET_MODE (op1));
3239 for (i = 0; i < 4; i++)
a0ee8b5f 3240 {
0a67e02c
PB
3241 switch (code)
3242 {
3243 case AND:
3244 tmp0[i] &= tmp1[i];
3245 break;
3246 case IOR:
3247 tmp0[i] |= tmp1[i];
3248 break;
3249 case XOR:
3250 tmp0[i] ^= tmp1[i];
3251 break;
3252 default:
3253 gcc_unreachable ();
3254 }
a0ee8b5f 3255 }
0a67e02c
PB
3256 real_from_target (&r, tmp0, mode);
3257 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
3258 }
3259 else
3260 {
3261 REAL_VALUE_TYPE f0, f1, value, result;
3262 bool inexact;
a0ee8b5f 3263
0a67e02c
PB
3264 REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3265 REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
3266 real_convert (&f0, mode, &f0);
3267 real_convert (&f1, mode, &f1);
df62f18a 3268
0a67e02c
PB
3269 if (HONOR_SNANS (mode)
3270 && (REAL_VALUE_ISNAN (f0) || REAL_VALUE_ISNAN (f1)))
3271 return 0;
0cedb36c 3272
0a67e02c
PB
3273 if (code == DIV
3274 && REAL_VALUES_EQUAL (f1, dconst0)
3275 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
3276 return 0;
0cedb36c 3277
0a67e02c
PB
3278 if (MODE_HAS_INFINITIES (mode) && HONOR_NANS (mode)
3279 && flag_trapping_math
3280 && REAL_VALUE_ISINF (f0) && REAL_VALUE_ISINF (f1))
0cedb36c 3281 {
0a67e02c
PB
3282 int s0 = REAL_VALUE_NEGATIVE (f0);
3283 int s1 = REAL_VALUE_NEGATIVE (f1);
0cedb36c 3284
0a67e02c 3285 switch (code)
1e9b78b0 3286 {
0a67e02c
PB
3287 case PLUS:
3288 /* Inf + -Inf = NaN plus exception. */
3289 if (s0 != s1)
3290 return 0;
3291 break;
3292 case MINUS:
3293 /* Inf - Inf = NaN plus exception. */
3294 if (s0 == s1)
3295 return 0;
3296 break;
3297 case DIV:
3298 /* Inf / Inf = NaN plus exception. */
3299 return 0;
3300 default:
3301 break;
0cedb36c
JL
3302 }
3303 }
0cedb36c 3304
0a67e02c
PB
3305 if (code == MULT && MODE_HAS_INFINITIES (mode) && HONOR_NANS (mode)
3306 && flag_trapping_math
3307 && ((REAL_VALUE_ISINF (f0) && REAL_VALUES_EQUAL (f1, dconst0))
3308 || (REAL_VALUE_ISINF (f1)
3309 && REAL_VALUES_EQUAL (f0, dconst0))))
3310 /* Inf * 0 = NaN plus exception. */
3311 return 0;
852c8ba1 3312
0a67e02c
PB
3313 inexact = real_arithmetic (&value, rtx_to_tree_code (code),
3314 &f0, &f1);
3315 real_convert (&result, mode, &value);
41374e13 3316
68328cda
EB
3317 /* Don't constant fold this floating point operation if
3318 the result has overflowed and flag_trapping_math. */
3319
3320 if (flag_trapping_math
3321 && MODE_HAS_INFINITIES (mode)
3322 && REAL_VALUE_ISINF (result)
3323 && !REAL_VALUE_ISINF (f0)
3324 && !REAL_VALUE_ISINF (f1))
3325 /* Overflow plus exception. */
3326 return 0;
3327
0a67e02c
PB
3328 /* Don't constant fold this floating point operation if the
3329 result may dependent upon the run-time rounding mode and
3330 flag_rounding_math is set, or if GCC's software emulation
3331 is unable to accurately represent the result. */
852c8ba1 3332
0a67e02c 3333 if ((flag_rounding_math
4099e2c2 3334 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
0a67e02c
PB
3335 && (inexact || !real_identical (&result, &value)))
3336 return NULL_RTX;
d9deed68 3337
0a67e02c 3338 return CONST_DOUBLE_FROM_REAL_VALUE (result, mode);
0cedb36c 3339 }
0cedb36c
JL
3340 }
3341
0a67e02c
PB
3342 /* We can fold some multi-word operations. */
3343 if (GET_MODE_CLASS (mode) == MODE_INT
fd7de64c
AS
3344 && width == HOST_BITS_PER_DOUBLE_INT
3345 && (CONST_DOUBLE_P (op0) || CONST_INT_P (op0))
3346 && (CONST_DOUBLE_P (op1) || CONST_INT_P (op1)))
0a67e02c 3347 {
fd7de64c 3348 double_int o0, o1, res, tmp;
0cedb36c 3349
fd7de64c
AS
3350 o0 = rtx_to_double_int (op0);
3351 o1 = rtx_to_double_int (op1);
0cedb36c 3352
0a67e02c
PB
3353 switch (code)
3354 {
3355 case MINUS:
3356 /* A - B == A + (-B). */
fd7de64c 3357 o1 = double_int_neg (o1);
0cedb36c 3358
0a67e02c 3359 /* Fall through.... */
0cedb36c 3360
0a67e02c 3361 case PLUS:
fd7de64c 3362 res = double_int_add (o0, o1);
0a67e02c 3363 break;
0cedb36c 3364
0a67e02c 3365 case MULT:
fd7de64c 3366 res = double_int_mul (o0, o1);
0a67e02c 3367 break;
0cedb36c 3368
0a67e02c 3369 case DIV:
fd7de64c
AS
3370 if (div_and_round_double (TRUNC_DIV_EXPR, 0,
3371 o0.low, o0.high, o1.low, o1.high,
3372 &res.low, &res.high,
3373 &tmp.low, &tmp.high))
0a67e02c
PB
3374 return 0;
3375 break;
0cedb36c 3376
0a67e02c 3377 case MOD:
fd7de64c
AS
3378 if (div_and_round_double (TRUNC_DIV_EXPR, 0,
3379 o0.low, o0.high, o1.low, o1.high,
3380 &tmp.low, &tmp.high,
3381 &res.low, &res.high))
0a67e02c
PB
3382 return 0;
3383 break;
0cedb36c 3384
0a67e02c 3385 case UDIV:
fd7de64c
AS
3386 if (div_and_round_double (TRUNC_DIV_EXPR, 1,
3387 o0.low, o0.high, o1.low, o1.high,
3388 &res.low, &res.high,
3389 &tmp.low, &tmp.high))
0a67e02c
PB
3390 return 0;
3391 break;
0cedb36c 3392
0a67e02c 3393 case UMOD:
fd7de64c
AS
3394 if (div_and_round_double (TRUNC_DIV_EXPR, 1,
3395 o0.low, o0.high, o1.low, o1.high,
3396 &tmp.low, &tmp.high,
3397 &res.low, &res.high))
0a67e02c
PB
3398 return 0;
3399 break;
0cedb36c 3400
0a67e02c 3401 case AND:
fd7de64c 3402 res = double_int_and (o0, o1);
0a67e02c 3403 break;
0cedb36c 3404
0a67e02c 3405 case IOR:
fd7de64c 3406 res = double_int_ior (o0, o1);
0a67e02c 3407 break;
0cedb36c 3408
0a67e02c 3409 case XOR:
fd7de64c 3410 res = double_int_xor (o0, o1);
0a67e02c 3411 break;
0cedb36c 3412
0a67e02c 3413 case SMIN:
fd7de64c 3414 res = double_int_smin (o0, o1);
0a67e02c 3415 break;
0cedb36c 3416
0a67e02c 3417 case SMAX:
fd7de64c 3418 res = double_int_smax (o0, o1);
0a67e02c 3419 break;
0cedb36c 3420
0a67e02c 3421 case UMIN:
fd7de64c 3422 res = double_int_umin (o0, o1);
0a67e02c 3423 break;
0cedb36c 3424
0a67e02c 3425 case UMAX:
fd7de64c 3426 res = double_int_umax (o0, o1);
0a67e02c 3427 break;
0cedb36c 3428
0a67e02c
PB
3429 case LSHIFTRT: case ASHIFTRT:
3430 case ASHIFT:
3431 case ROTATE: case ROTATERT:
fd7de64c
AS
3432 {
3433 unsigned HOST_WIDE_INT cnt;
3434
3435 if (SHIFT_COUNT_TRUNCATED)
3436 o1 = double_int_zext (o1, GET_MODE_BITSIZE (mode));
3437
3438 if (!double_int_fits_in_uhwi_p (o1)
3439 || double_int_to_uhwi (o1) >= GET_MODE_BITSIZE (mode))
3440 return 0;
3441
3442 cnt = double_int_to_uhwi (o1);
3443
3444 if (code == LSHIFTRT || code == ASHIFTRT)
3445 res = double_int_rshift (o0, cnt, GET_MODE_BITSIZE (mode),
3446 code == ASHIFTRT);
3447 else if (code == ASHIFT)
3448 res = double_int_lshift (o0, cnt, GET_MODE_BITSIZE (mode),
3449 true);
3450 else if (code == ROTATE)
3451 res = double_int_lrotate (o0, cnt, GET_MODE_BITSIZE (mode));
3452 else /* code == ROTATERT */
3453 res = double_int_rrotate (o0, cnt, GET_MODE_BITSIZE (mode));
3454 }
0a67e02c 3455 break;
0cedb36c 3456
0a67e02c
PB
3457 default:
3458 return 0;
3459 }
0cedb36c 3460
fd7de64c 3461 return immed_double_int_const (res, mode);
0a67e02c 3462 }
0cedb36c 3463
481683e1 3464 if (CONST_INT_P (op0) && CONST_INT_P (op1)
0a67e02c
PB
3465 && width <= HOST_BITS_PER_WIDE_INT && width != 0)
3466 {
3467 /* Get the integer argument values in two forms:
3468 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
0cedb36c 3469
0a67e02c
PB
3470 arg0 = INTVAL (op0);
3471 arg1 = INTVAL (op1);
0cedb36c 3472
0a67e02c
PB
3473 if (width < HOST_BITS_PER_WIDE_INT)
3474 {
43c36287
EB
3475 arg0 &= ((unsigned HOST_WIDE_INT) 1 << width) - 1;
3476 arg1 &= ((unsigned HOST_WIDE_INT) 1 << width) - 1;
0cedb36c 3477
0a67e02c 3478 arg0s = arg0;
43c36287
EB
3479 if (arg0s & ((unsigned HOST_WIDE_INT) 1 << (width - 1)))
3480 arg0s |= ((unsigned HOST_WIDE_INT) (-1) << width);
4f5c0f7e 3481
0a67e02c 3482 arg1s = arg1;
43c36287
EB
3483 if (arg1s & ((unsigned HOST_WIDE_INT) 1 << (width - 1)))
3484 arg1s |= ((unsigned HOST_WIDE_INT) (-1) << width);
0a67e02c
PB
3485 }
3486 else
3487 {
3488 arg0s = arg0;
3489 arg1s = arg1;
3490 }
b8698a0f 3491
0a67e02c 3492 /* Compute the value of the arithmetic. */
b8698a0f 3493
0a67e02c
PB
3494 switch (code)
3495 {
3496 case PLUS:
3497 val = arg0s + arg1s;
3498 break;
b8698a0f 3499
0a67e02c
PB
3500 case MINUS:
3501 val = arg0s - arg1s;
3502 break;
b8698a0f 3503
0a67e02c
PB
3504 case MULT:
3505 val = arg0s * arg1s;
3506 break;
b8698a0f 3507
0a67e02c
PB
3508 case DIV:
3509 if (arg1s == 0
43c36287
EB
3510 || ((unsigned HOST_WIDE_INT) arg0s
3511 == (unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
0a67e02c
PB
3512 && arg1s == -1))
3513 return 0;
3514 val = arg0s / arg1s;
3515 break;
b8698a0f 3516
0a67e02c
PB
3517 case MOD:
3518 if (arg1s == 0
43c36287
EB
3519 || ((unsigned HOST_WIDE_INT) arg0s
3520 == (unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
0a67e02c
PB
3521 && arg1s == -1))
3522 return 0;
3523 val = arg0s % arg1s;
3524 break;
b8698a0f 3525
0a67e02c
PB
3526 case UDIV:
3527 if (arg1 == 0
43c36287
EB
3528 || ((unsigned HOST_WIDE_INT) arg0s
3529 == (unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
0a67e02c
PB
3530 && arg1s == -1))
3531 return 0;
3532 val = (unsigned HOST_WIDE_INT) arg0 / arg1;
3533 break;
b8698a0f 3534
0a67e02c
PB
3535 case UMOD:
3536 if (arg1 == 0
43c36287
EB
3537 || ((unsigned HOST_WIDE_INT) arg0s
3538 == (unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
0a67e02c
PB
3539 && arg1s == -1))
3540 return 0;
3541 val = (unsigned HOST_WIDE_INT) arg0 % arg1;
3542 break;
b8698a0f 3543
0a67e02c
PB
3544 case AND:
3545 val = arg0 & arg1;
3546 break;
b8698a0f 3547
0a67e02c
PB
3548 case IOR:
3549 val = arg0 | arg1;
3550 break;
b8698a0f 3551
0a67e02c
PB
3552 case XOR:
3553 val = arg0 ^ arg1;
3554 break;
b8698a0f 3555
0a67e02c
PB
3556 case LSHIFTRT:
3557 case ASHIFT:
3558 case ASHIFTRT:
3559 /* Truncate the shift if SHIFT_COUNT_TRUNCATED, otherwise make sure
3560 the value is in range. We can't return any old value for
3561 out-of-range arguments because either the middle-end (via
3562 shift_truncation_mask) or the back-end might be relying on
3563 target-specific knowledge. Nor can we rely on
3564 shift_truncation_mask, since the shift might not be part of an
3565 ashlM3, lshrM3 or ashrM3 instruction. */
3566 if (SHIFT_COUNT_TRUNCATED)
3567 arg1 = (unsigned HOST_WIDE_INT) arg1 % width;
3568 else if (arg1 < 0 || arg1 >= GET_MODE_BITSIZE (mode))
3569 return 0;
b8698a0f 3570
0a67e02c
PB
3571 val = (code == ASHIFT
3572 ? ((unsigned HOST_WIDE_INT) arg0) << arg1
3573 : ((unsigned HOST_WIDE_INT) arg0) >> arg1);
b8698a0f 3574
0a67e02c
PB
3575 /* Sign-extend the result for arithmetic right shifts. */
3576 if (code == ASHIFTRT && arg0s < 0 && arg1 > 0)
43c36287 3577 val |= ((unsigned HOST_WIDE_INT) (-1)) << (width - arg1);
0a67e02c 3578 break;
b8698a0f 3579
0a67e02c
PB
3580 case ROTATERT:
3581 if (arg1 < 0)
3582 return 0;
b8698a0f 3583
0a67e02c
PB
3584 arg1 %= width;
3585 val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
3586 | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
3587 break;
b8698a0f 3588
0a67e02c
PB
3589 case ROTATE:
3590 if (arg1 < 0)
3591 return 0;
b8698a0f 3592
0a67e02c
PB
3593 arg1 %= width;
3594 val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
3595 | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
3596 break;
b8698a0f 3597
0a67e02c
PB
3598 case COMPARE:
3599 /* Do nothing here. */
3600 return 0;
b8698a0f 3601
0a67e02c
PB
3602 case SMIN:
3603 val = arg0s <= arg1s ? arg0s : arg1s;
3604 break;
b8698a0f 3605
0a67e02c
PB
3606 case UMIN:
3607 val = ((unsigned HOST_WIDE_INT) arg0
3608 <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
3609 break;
b8698a0f 3610
0a67e02c
PB
3611 case SMAX:
3612 val = arg0s > arg1s ? arg0s : arg1s;
3613 break;
b8698a0f 3614
0a67e02c
PB
3615 case UMAX:
3616 val = ((unsigned HOST_WIDE_INT) arg0
3617 > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
3618 break;
b8698a0f 3619
0a67e02c
PB
3620 case SS_PLUS:
3621 case US_PLUS:
3622 case SS_MINUS:
3623 case US_MINUS:
14c931f1
CF
3624 case SS_MULT:
3625 case US_MULT:
3626 case SS_DIV:
3627 case US_DIV:
e551ad26 3628 case SS_ASHIFT:
14c931f1 3629 case US_ASHIFT:
0a67e02c
PB
3630 /* ??? There are simplifications that can be done. */
3631 return 0;
b8698a0f 3632
0a67e02c
PB
3633 default:
3634 gcc_unreachable ();
3635 }
0cedb36c 3636
bb80db7b 3637 return gen_int_mode (val, mode);
0a67e02c 3638 }
0cedb36c 3639
0a67e02c 3640 return NULL_RTX;
0cedb36c 3641}
0a67e02c
PB
3642
3643
0cedb36c
JL
3644\f
3645/* Simplify a PLUS or MINUS, at least one of whose operands may be another
3646 PLUS or MINUS.
3647
3648 Rather than test for specific case, we do this by a brute-force method
3649 and do all possible simplifications until no more changes occur. Then
1941069a 3650 we rebuild the operation. */
0cedb36c 3651
9b3bd424
RH
3652struct simplify_plus_minus_op_data
3653{
3654 rtx op;
f805670f 3655 short neg;
9b3bd424
RH
3656};
3657
7e0b4eae
PB
3658static bool
3659simplify_plus_minus_op_data_cmp (rtx x, rtx y)
9b3bd424 3660{
f805670f 3661 int result;
9b3bd424 3662
7e0b4eae
PB
3663 result = (commutative_operand_precedence (y)
3664 - commutative_operand_precedence (x));
f805670f 3665 if (result)
7e0b4eae 3666 return result > 0;
d26cef13
PB
3667
3668 /* Group together equal REGs to do more simplification. */
7e0b4eae
PB
3669 if (REG_P (x) && REG_P (y))
3670 return REGNO (x) > REGNO (y);
d26cef13 3671 else
7e0b4eae 3672 return false;
9b3bd424
RH
3673}
3674
0cedb36c 3675static rtx
46c5ad27 3676simplify_plus_minus (enum rtx_code code, enum machine_mode mode, rtx op0,
1941069a 3677 rtx op1)
0cedb36c 3678{
9b3bd424 3679 struct simplify_plus_minus_op_data ops[8];
0cedb36c 3680 rtx result, tem;
36686ad6 3681 int n_ops = 2, input_ops = 2;
d26cef13 3682 int changed, n_constants = 0, canonicalized = 0;
0cedb36c
JL
3683 int i, j;
3684
703ad42b 3685 memset (ops, 0, sizeof ops);
786de7eb 3686
0cedb36c
JL
3687 /* Set up the two operands and then expand them until nothing has been
3688 changed. If we run out of room in our array, give up; this should
3689 almost never happen. */
3690
9b3bd424
RH
3691 ops[0].op = op0;
3692 ops[0].neg = 0;
3693 ops[1].op = op1;
3694 ops[1].neg = (code == MINUS);
0cedb36c 3695
9b3bd424 3696 do
0cedb36c
JL
3697 {
3698 changed = 0;
3699
3700 for (i = 0; i < n_ops; i++)
9b3bd424
RH
3701 {
3702 rtx this_op = ops[i].op;
3703 int this_neg = ops[i].neg;
3704 enum rtx_code this_code = GET_CODE (this_op);
0cedb36c 3705
9b3bd424
RH
3706 switch (this_code)
3707 {
3708 case PLUS:
3709 case MINUS:
3710 if (n_ops == 7)
e16e3291 3711 return NULL_RTX;
0cedb36c 3712
9b3bd424
RH
3713 ops[n_ops].op = XEXP (this_op, 1);
3714 ops[n_ops].neg = (this_code == MINUS) ^ this_neg;
3715 n_ops++;
3716
3717 ops[i].op = XEXP (this_op, 0);
3718 input_ops++;
3719 changed = 1;
1941069a 3720 canonicalized |= this_neg;
9b3bd424
RH
3721 break;
3722
3723 case NEG:
3724 ops[i].op = XEXP (this_op, 0);
3725 ops[i].neg = ! this_neg;
3726 changed = 1;
1941069a 3727 canonicalized = 1;
9b3bd424
RH
3728 break;
3729
3730 case CONST:
e3c8ea67
RH
3731 if (n_ops < 7
3732 && GET_CODE (XEXP (this_op, 0)) == PLUS
3733 && CONSTANT_P (XEXP (XEXP (this_op, 0), 0))
3734 && CONSTANT_P (XEXP (XEXP (this_op, 0), 1)))
3735 {
3736 ops[i].op = XEXP (XEXP (this_op, 0), 0);
3737 ops[n_ops].op = XEXP (XEXP (this_op, 0), 1);
3738 ops[n_ops].neg = this_neg;
3739 n_ops++;
e3c8ea67 3740 changed = 1;
1941069a 3741 canonicalized = 1;
e3c8ea67 3742 }
9b3bd424
RH
3743 break;
3744
3745 case NOT:
3746 /* ~a -> (-a - 1) */
3747 if (n_ops != 7)
3748 {
3749 ops[n_ops].op = constm1_rtx;
2e951384 3750 ops[n_ops++].neg = this_neg;
9b3bd424
RH
3751 ops[i].op = XEXP (this_op, 0);
3752 ops[i].neg = !this_neg;
3753 changed = 1;
1941069a 3754 canonicalized = 1;
9b3bd424
RH
3755 }
3756 break;
0cedb36c 3757
9b3bd424 3758 case CONST_INT:
d26cef13 3759 n_constants++;
9b3bd424
RH
3760 if (this_neg)
3761 {
aff8a8d5 3762 ops[i].op = neg_const_int (mode, this_op);
9b3bd424
RH
3763 ops[i].neg = 0;
3764 changed = 1;
1941069a 3765 canonicalized = 1;
9b3bd424
RH
3766 }
3767 break;
0cedb36c 3768
9b3bd424
RH
3769 default:
3770 break;
3771 }
3772 }
0cedb36c 3773 }
9b3bd424 3774 while (changed);
0cedb36c 3775
d26cef13
PB
3776 if (n_constants > 1)
3777 canonicalized = 1;
36686ad6 3778
d26cef13 3779 gcc_assert (n_ops >= 2);
0cedb36c 3780
1941069a
PB
3781 /* If we only have two operands, we can avoid the loops. */
3782 if (n_ops == 2)
3783 {
3784 enum rtx_code code = ops[0].neg || ops[1].neg ? MINUS : PLUS;
3785 rtx lhs, rhs;
3786
3787 /* Get the two operands. Be careful with the order, especially for
3788 the cases where code == MINUS. */
3789 if (ops[0].neg && ops[1].neg)
3790 {
3791 lhs = gen_rtx_NEG (mode, ops[0].op);
3792 rhs = ops[1].op;
3793 }
3794 else if (ops[0].neg)
3795 {
3796 lhs = ops[1].op;
3797 rhs = ops[0].op;
3798 }
3799 else
3800 {
3801 lhs = ops[0].op;
3802 rhs = ops[1].op;
3803 }
3804
3805 return simplify_const_binary_operation (code, mode, lhs, rhs);
3806 }
3807
d26cef13 3808 /* Now simplify each pair of operands until nothing changes. */
9b3bd424 3809 do
0cedb36c 3810 {
d26cef13
PB
3811 /* Insertion sort is good enough for an eight-element array. */
3812 for (i = 1; i < n_ops; i++)
3813 {
3814 struct simplify_plus_minus_op_data save;
3815 j = i - 1;
7e0b4eae 3816 if (!simplify_plus_minus_op_data_cmp (ops[j].op, ops[i].op))
d26cef13
PB
3817 continue;
3818
3819 canonicalized = 1;
3820 save = ops[i];
3821 do
3822 ops[j + 1] = ops[j];
7e0b4eae 3823 while (j-- && simplify_plus_minus_op_data_cmp (ops[j].op, save.op));
d26cef13
PB
3824 ops[j + 1] = save;
3825 }
0cedb36c 3826
d26cef13
PB
3827 changed = 0;
3828 for (i = n_ops - 1; i > 0; i--)
3829 for (j = i - 1; j >= 0; j--)
9b3bd424 3830 {
d26cef13
PB
3831 rtx lhs = ops[j].op, rhs = ops[i].op;
3832 int lneg = ops[j].neg, rneg = ops[i].neg;
0cedb36c 3833
d26cef13 3834 if (lhs != 0 && rhs != 0)
9b3bd424
RH
3835 {
3836 enum rtx_code ncode = PLUS;
3837
3838 if (lneg != rneg)
3839 {
3840 ncode = MINUS;
3841 if (lneg)
3842 tem = lhs, lhs = rhs, rhs = tem;
3843 }
3844 else if (swap_commutative_operands_p (lhs, rhs))
3845 tem = lhs, lhs = rhs, rhs = tem;
3846
481683e1
SZ
3847 if ((GET_CODE (lhs) == CONST || CONST_INT_P (lhs))
3848 && (GET_CODE (rhs) == CONST || CONST_INT_P (rhs)))
349f4ea1
AK
3849 {
3850 rtx tem_lhs, tem_rhs;
3851
3852 tem_lhs = GET_CODE (lhs) == CONST ? XEXP (lhs, 0) : lhs;
3853 tem_rhs = GET_CODE (rhs) == CONST ? XEXP (rhs, 0) : rhs;
3854 tem = simplify_binary_operation (ncode, mode, tem_lhs, tem_rhs);
9b3bd424 3855
349f4ea1
AK
3856 if (tem && !CONSTANT_P (tem))
3857 tem = gen_rtx_CONST (GET_MODE (tem), tem);
3858 }
3859 else
3860 tem = simplify_binary_operation (ncode, mode, lhs, rhs);
b8698a0f 3861
786de7eb 3862 /* Reject "simplifications" that just wrap the two
9b3bd424
RH
3863 arguments in a CONST. Failure to do so can result
3864 in infinite recursion with simplify_binary_operation
3865 when it calls us to simplify CONST operations. */
3866 if (tem
3867 && ! (GET_CODE (tem) == CONST
3868 && GET_CODE (XEXP (tem, 0)) == ncode
3869 && XEXP (XEXP (tem, 0), 0) == lhs
d26cef13 3870 && XEXP (XEXP (tem, 0), 1) == rhs))
9b3bd424
RH
3871 {
3872 lneg &= rneg;
3873 if (GET_CODE (tem) == NEG)
3874 tem = XEXP (tem, 0), lneg = !lneg;
481683e1 3875 if (CONST_INT_P (tem) && lneg)
aff8a8d5 3876 tem = neg_const_int (mode, tem), lneg = 0;
9b3bd424
RH
3877
3878 ops[i].op = tem;
3879 ops[i].neg = lneg;
3880 ops[j].op = NULL_RTX;
3881 changed = 1;
dc5b3407 3882 canonicalized = 1;
9b3bd424
RH
3883 }
3884 }
3885 }
0cedb36c 3886
dc5b3407
ZD
3887 /* If nothing changed, fail. */
3888 if (!canonicalized)
3889 return NULL_RTX;
3890
d26cef13
PB
3891 /* Pack all the operands to the lower-numbered entries. */
3892 for (i = 0, j = 0; j < n_ops; j++)
3893 if (ops[j].op)
3894 {
3895 ops[i] = ops[j];
3896 i++;
3897 }
3898 n_ops = i;
0cedb36c 3899 }
9b3bd424 3900 while (changed);
0cedb36c 3901
c877353c
RS
3902 /* Create (minus -C X) instead of (neg (const (plus X C))). */
3903 if (n_ops == 2
481683e1 3904 && CONST_INT_P (ops[1].op)
c877353c
RS
3905 && CONSTANT_P (ops[0].op)
3906 && ops[0].neg)
3907 return gen_rtx_fmt_ee (MINUS, mode, ops[1].op, ops[0].op);
b8698a0f 3908
9b3bd424
RH
3909 /* We suppressed creation of trivial CONST expressions in the
3910 combination loop to avoid recursion. Create one manually now.
3911 The combination loop should have ensured that there is exactly
3912 one CONST_INT, and the sort will have ensured that it is last
3913 in the array and that any other constant will be next-to-last. */
0cedb36c 3914
9b3bd424 3915 if (n_ops > 1
481683e1 3916 && CONST_INT_P (ops[n_ops - 1].op)
9b3bd424
RH
3917 && CONSTANT_P (ops[n_ops - 2].op))
3918 {
aff8a8d5 3919 rtx value = ops[n_ops - 1].op;
4768dbdd 3920 if (ops[n_ops - 1].neg ^ ops[n_ops - 2].neg)
aff8a8d5
CM
3921 value = neg_const_int (mode, value);
3922 ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, INTVAL (value));
9b3bd424
RH
3923 n_ops--;
3924 }
3925
0786ca87 3926 /* Put a non-negated operand first, if possible. */
0cedb36c 3927
9b3bd424
RH
3928 for (i = 0; i < n_ops && ops[i].neg; i++)
3929 continue;
0cedb36c 3930 if (i == n_ops)
0786ca87 3931 ops[0].op = gen_rtx_NEG (mode, ops[0].op);
0cedb36c
JL
3932 else if (i != 0)
3933 {
9b3bd424
RH
3934 tem = ops[0].op;
3935 ops[0] = ops[i];
3936 ops[i].op = tem;
3937 ops[i].neg = 1;
0cedb36c
JL
3938 }
3939
3940 /* Now make the result by performing the requested operations. */
9b3bd424 3941 result = ops[0].op;
0cedb36c 3942 for (i = 1; i < n_ops; i++)
9b3bd424
RH
3943 result = gen_rtx_fmt_ee (ops[i].neg ? MINUS : PLUS,
3944 mode, result, ops[i].op);
0cedb36c 3945
0786ca87 3946 return result;
0cedb36c
JL
3947}
3948
5ac20c1a
RS
3949/* Check whether an operand is suitable for calling simplify_plus_minus. */
3950static bool
f7d504c2 3951plus_minus_operand_p (const_rtx x)
5ac20c1a
RS
3952{
3953 return GET_CODE (x) == PLUS
3954 || GET_CODE (x) == MINUS
3955 || (GET_CODE (x) == CONST
3956 && GET_CODE (XEXP (x, 0)) == PLUS
3957 && CONSTANT_P (XEXP (XEXP (x, 0), 0))
3958 && CONSTANT_P (XEXP (XEXP (x, 0), 1)));
3959}
3960
0cedb36c 3961/* Like simplify_binary_operation except used for relational operators.
c6fb08ad 3962 MODE is the mode of the result. If MODE is VOIDmode, both operands must
fc7ca5fd 3963 not also be VOIDmode.
c6fb08ad
PB
3964
3965 CMP_MODE specifies in which mode the comparison is done in, so it is
3966 the mode of the operands. If CMP_MODE is VOIDmode, it is taken from
3967 the operands or, if both are VOIDmode, the operands are compared in
3968 "infinite precision". */
3969rtx
3970simplify_relational_operation (enum rtx_code code, enum machine_mode mode,
3971 enum machine_mode cmp_mode, rtx op0, rtx op1)
3972{
3973 rtx tem, trueop0, trueop1;
3974
3975 if (cmp_mode == VOIDmode)
3976 cmp_mode = GET_MODE (op0);
3977 if (cmp_mode == VOIDmode)
3978 cmp_mode = GET_MODE (op1);
3979
3980 tem = simplify_const_relational_operation (code, cmp_mode, op0, op1);
3981 if (tem)
3982 {
3d8bf70f 3983 if (SCALAR_FLOAT_MODE_P (mode))
c6fb08ad
PB
3984 {
3985 if (tem == const0_rtx)
3986 return CONST0_RTX (mode);
fc7ca5fd
RS
3987#ifdef FLOAT_STORE_FLAG_VALUE
3988 {
3989 REAL_VALUE_TYPE val;
3990 val = FLOAT_STORE_FLAG_VALUE (mode);
3991 return CONST_DOUBLE_FROM_REAL_VALUE (val, mode);
3992 }
3993#else
3994 return NULL_RTX;
b8698a0f 3995#endif
c6fb08ad 3996 }
fc7ca5fd
RS
3997 if (VECTOR_MODE_P (mode))
3998 {
3999 if (tem == const0_rtx)
4000 return CONST0_RTX (mode);
4001#ifdef VECTOR_STORE_FLAG_VALUE
4002 {
4003 int i, units;
21e5076a 4004 rtvec v;
fc7ca5fd
RS
4005
4006 rtx val = VECTOR_STORE_FLAG_VALUE (mode);
4007 if (val == NULL_RTX)
4008 return NULL_RTX;
4009 if (val == const1_rtx)
4010 return CONST1_RTX (mode);
4011
4012 units = GET_MODE_NUNITS (mode);
4013 v = rtvec_alloc (units);
4014 for (i = 0; i < units; i++)
4015 RTVEC_ELT (v, i) = val;
4016 return gen_rtx_raw_CONST_VECTOR (mode, v);
4017 }
4018#else
4019 return NULL_RTX;
c6fb08ad 4020#endif
fc7ca5fd 4021 }
c6fb08ad
PB
4022
4023 return tem;
4024 }
4025
4026 /* For the following tests, ensure const0_rtx is op1. */
4027 if (swap_commutative_operands_p (op0, op1)
4028 || (op0 == const0_rtx && op1 != const0_rtx))
4029 tem = op0, op0 = op1, op1 = tem, code = swap_condition (code);
4030
4031 /* If op0 is a compare, extract the comparison arguments from it. */
4032 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
f90b7a5a
PB
4033 return simplify_gen_relational (code, mode, VOIDmode,
4034 XEXP (op0, 0), XEXP (op0, 1));
c6fb08ad 4035
30a440a7 4036 if (GET_MODE_CLASS (cmp_mode) == MODE_CC
c6fb08ad
PB
4037 || CC0_P (op0))
4038 return NULL_RTX;
4039
4040 trueop0 = avoid_constant_pool_reference (op0);
4041 trueop1 = avoid_constant_pool_reference (op1);
4042 return simplify_relational_operation_1 (code, mode, cmp_mode,
4043 trueop0, trueop1);
4044}
4045
4046/* This part of simplify_relational_operation is only used when CMP_MODE
4047 is not in class MODE_CC (i.e. it is a real comparison).
4048
4049 MODE is the mode of the result, while CMP_MODE specifies in which
4050 mode the comparison is done in, so it is the mode of the operands. */
bc4ad38c
ZD
4051
4052static rtx
c6fb08ad
PB
4053simplify_relational_operation_1 (enum rtx_code code, enum machine_mode mode,
4054 enum machine_mode cmp_mode, rtx op0, rtx op1)
4055{
bc4ad38c
ZD
4056 enum rtx_code op0code = GET_CODE (op0);
4057
3fa0cacd 4058 if (op1 == const0_rtx && COMPARISON_P (op0))
c6fb08ad 4059 {
3fa0cacd
RS
4060 /* If op0 is a comparison, extract the comparison arguments
4061 from it. */
4062 if (code == NE)
c6fb08ad 4063 {
3fa0cacd
RS
4064 if (GET_MODE (op0) == mode)
4065 return simplify_rtx (op0);
4066 else
4067 return simplify_gen_relational (GET_CODE (op0), mode, VOIDmode,
4068 XEXP (op0, 0), XEXP (op0, 1));
4069 }
4070 else if (code == EQ)
4071 {
4072 enum rtx_code new_code = reversed_comparison_code (op0, NULL_RTX);
4073 if (new_code != UNKNOWN)
4074 return simplify_gen_relational (new_code, mode, VOIDmode,
4075 XEXP (op0, 0), XEXP (op0, 1));
4076 }
4077 }
4078
1d1eb80c
BS
4079 /* (LTU/GEU (PLUS a C) C), where C is constant, can be simplified to
4080 (GEU/LTU a -C). Likewise for (LTU/GEU (PLUS a C) a). */
4081 if ((code == LTU || code == GEU)
4082 && GET_CODE (op0) == PLUS
481683e1 4083 && CONST_INT_P (XEXP (op0, 1))
1d1eb80c
BS
4084 && (rtx_equal_p (op1, XEXP (op0, 0))
4085 || rtx_equal_p (op1, XEXP (op0, 1))))
4086 {
4087 rtx new_cmp
4088 = simplify_gen_unary (NEG, cmp_mode, XEXP (op0, 1), cmp_mode);
4089 return simplify_gen_relational ((code == LTU ? GEU : LTU), mode,
4090 cmp_mode, XEXP (op0, 0), new_cmp);
4091 }
4092
921c4418
RIL
4093 /* Canonicalize (LTU/GEU (PLUS a b) b) as (LTU/GEU (PLUS a b) a). */
4094 if ((code == LTU || code == GEU)
4095 && GET_CODE (op0) == PLUS
cf369845
HPN
4096 && rtx_equal_p (op1, XEXP (op0, 1))
4097 /* Don't recurse "infinitely" for (LTU/GEU (PLUS b b) b). */
4098 && !rtx_equal_p (op1, XEXP (op0, 0)))
4942b76b
JJ
4099 return simplify_gen_relational (code, mode, cmp_mode, op0,
4100 copy_rtx (XEXP (op0, 0)));
921c4418 4101
3fa0cacd
RS
4102 if (op1 == const0_rtx)
4103 {
4104 /* Canonicalize (GTU x 0) as (NE x 0). */
4105 if (code == GTU)
4106 return simplify_gen_relational (NE, mode, cmp_mode, op0, op1);
4107 /* Canonicalize (LEU x 0) as (EQ x 0). */
4108 if (code == LEU)
4109 return simplify_gen_relational (EQ, mode, cmp_mode, op0, op1);
4110 }
4111 else if (op1 == const1_rtx)
4112 {
4113 switch (code)
4114 {
4115 case GE:
4116 /* Canonicalize (GE x 1) as (GT x 0). */
4117 return simplify_gen_relational (GT, mode, cmp_mode,
4118 op0, const0_rtx);
4119 case GEU:
4120 /* Canonicalize (GEU x 1) as (NE x 0). */
4121 return simplify_gen_relational (NE, mode, cmp_mode,
4122 op0, const0_rtx);
4123 case LT:
4124 /* Canonicalize (LT x 1) as (LE x 0). */
4125 return simplify_gen_relational (LE, mode, cmp_mode,
4126 op0, const0_rtx);
4127 case LTU:
4128 /* Canonicalize (LTU x 1) as (EQ x 0). */
4129 return simplify_gen_relational (EQ, mode, cmp_mode,
4130 op0, const0_rtx);
4131 default:
4132 break;
c6fb08ad
PB
4133 }
4134 }
3fa0cacd
RS
4135 else if (op1 == constm1_rtx)
4136 {
4137 /* Canonicalize (LE x -1) as (LT x 0). */
4138 if (code == LE)
4139 return simplify_gen_relational (LT, mode, cmp_mode, op0, const0_rtx);
4140 /* Canonicalize (GT x -1) as (GE x 0). */
4141 if (code == GT)
4142 return simplify_gen_relational (GE, mode, cmp_mode, op0, const0_rtx);
4143 }
0cedb36c 4144
bc4ad38c
ZD
4145 /* (eq/ne (plus x cst1) cst2) simplifies to (eq/ne x (cst2 - cst1)) */
4146 if ((code == EQ || code == NE)
4147 && (op0code == PLUS || op0code == MINUS)
4148 && CONSTANT_P (op1)
551a3297
RH
4149 && CONSTANT_P (XEXP (op0, 1))
4150 && (INTEGRAL_MODE_P (cmp_mode) || flag_unsafe_math_optimizations))
bc4ad38c
ZD
4151 {
4152 rtx x = XEXP (op0, 0);
4153 rtx c = XEXP (op0, 1);
4154
4155 c = simplify_gen_binary (op0code == PLUS ? MINUS : PLUS,
4156 cmp_mode, op1, c);
4157 return simplify_gen_relational (code, mode, cmp_mode, x, c);
4158 }
4159
1419a885
RS
4160 /* (ne:SI (zero_extract:SI FOO (const_int 1) BAR) (const_int 0))) is
4161 the same as (zero_extract:SI FOO (const_int 1) BAR). */
4162 if (code == NE
4163 && op1 == const0_rtx
4164 && GET_MODE_CLASS (mode) == MODE_INT
4165 && cmp_mode != VOIDmode
61961eff
RS
4166 /* ??? Work-around BImode bugs in the ia64 backend. */
4167 && mode != BImode
f8eacd97 4168 && cmp_mode != BImode
1419a885
RS
4169 && nonzero_bits (op0, cmp_mode) == 1
4170 && STORE_FLAG_VALUE == 1)
f8eacd97
RS
4171 return GET_MODE_SIZE (mode) > GET_MODE_SIZE (cmp_mode)
4172 ? simplify_gen_unary (ZERO_EXTEND, mode, op0, cmp_mode)
4173 : lowpart_subreg (mode, op0, cmp_mode);
1419a885 4174
5484a3c3
RS
4175 /* (eq/ne (xor x y) 0) simplifies to (eq/ne x y). */
4176 if ((code == EQ || code == NE)
4177 && op1 == const0_rtx
4178 && op0code == XOR)
4179 return simplify_gen_relational (code, mode, cmp_mode,
4180 XEXP (op0, 0), XEXP (op0, 1));
4181
4d49d44d 4182 /* (eq/ne (xor x y) x) simplifies to (eq/ne y 0). */
5484a3c3
RS
4183 if ((code == EQ || code == NE)
4184 && op0code == XOR
4185 && rtx_equal_p (XEXP (op0, 0), op1)
4d49d44d
KH
4186 && !side_effects_p (XEXP (op0, 0)))
4187 return simplify_gen_relational (code, mode, cmp_mode,
4188 XEXP (op0, 1), const0_rtx);
4189
4190 /* Likewise (eq/ne (xor x y) y) simplifies to (eq/ne x 0). */
5484a3c3
RS
4191 if ((code == EQ || code == NE)
4192 && op0code == XOR
4193 && rtx_equal_p (XEXP (op0, 1), op1)
4d49d44d
KH
4194 && !side_effects_p (XEXP (op0, 1)))
4195 return simplify_gen_relational (code, mode, cmp_mode,
4196 XEXP (op0, 0), const0_rtx);
5484a3c3
RS
4197
4198 /* (eq/ne (xor x C1) C2) simplifies to (eq/ne x (C1^C2)). */
4199 if ((code == EQ || code == NE)
4200 && op0code == XOR
481683e1 4201 && (CONST_INT_P (op1)
5484a3c3 4202 || GET_CODE (op1) == CONST_DOUBLE)
481683e1 4203 && (CONST_INT_P (XEXP (op0, 1))
5484a3c3
RS
4204 || GET_CODE (XEXP (op0, 1)) == CONST_DOUBLE))
4205 return simplify_gen_relational (code, mode, cmp_mode, XEXP (op0, 0),
4206 simplify_gen_binary (XOR, cmp_mode,
4207 XEXP (op0, 1), op1));
4208
69fce32f
RS
4209 if (op0code == POPCOUNT && op1 == const0_rtx)
4210 switch (code)
4211 {
4212 case EQ:
4213 case LE:
4214 case LEU:
4215 /* (eq (popcount x) (const_int 0)) -> (eq x (const_int 0)). */
4216 return simplify_gen_relational (EQ, mode, GET_MODE (XEXP (op0, 0)),
4217 XEXP (op0, 0), const0_rtx);
4218
4219 case NE:
4220 case GT:
4221 case GTU:
4222 /* (ne (popcount x) (const_int 0)) -> (ne x (const_int 0)). */
2376c58f 4223 return simplify_gen_relational (NE, mode, GET_MODE (XEXP (op0, 0)),
69fce32f
RS
4224 XEXP (op0, 0), const0_rtx);
4225
4226 default:
4227 break;
4228 }
4229
c6fb08ad
PB
4230 return NULL_RTX;
4231}
4232
b8698a0f 4233enum
39641489 4234{
a567207e
PB
4235 CMP_EQ = 1,
4236 CMP_LT = 2,
4237 CMP_GT = 4,
4238 CMP_LTU = 8,
4239 CMP_GTU = 16
39641489
PB
4240};
4241
4242
4243/* Convert the known results for EQ, LT, GT, LTU, GTU contained in
4244 KNOWN_RESULT to a CONST_INT, based on the requested comparison CODE
b8698a0f 4245 For KNOWN_RESULT to make sense it should be either CMP_EQ, or the
a567207e 4246 logical OR of one of (CMP_LT, CMP_GT) and one of (CMP_LTU, CMP_GTU).
39641489
PB
4247 For floating-point comparisons, assume that the operands were ordered. */
4248
4249static rtx
4250comparison_result (enum rtx_code code, int known_results)
4251{
39641489
PB
4252 switch (code)
4253 {
4254 case EQ:
4255 case UNEQ:
a567207e 4256 return (known_results & CMP_EQ) ? const_true_rtx : const0_rtx;
39641489
PB
4257 case NE:
4258 case LTGT:
a567207e 4259 return (known_results & CMP_EQ) ? const0_rtx : const_true_rtx;
39641489
PB
4260
4261 case LT:
4262 case UNLT:
a567207e 4263 return (known_results & CMP_LT) ? const_true_rtx : const0_rtx;
39641489
PB
4264 case GE:
4265 case UNGE:
a567207e 4266 return (known_results & CMP_LT) ? const0_rtx : const_true_rtx;
39641489
PB
4267
4268 case GT:
4269 case UNGT:
a567207e 4270 return (known_results & CMP_GT) ? const_true_rtx : const0_rtx;
39641489
PB
4271 case LE:
4272 case UNLE:
a567207e 4273 return (known_results & CMP_GT) ? const0_rtx : const_true_rtx;
39641489
PB
4274
4275 case LTU:
a567207e 4276 return (known_results & CMP_LTU) ? const_true_rtx : const0_rtx;
39641489 4277 case GEU:
a567207e 4278 return (known_results & CMP_LTU) ? const0_rtx : const_true_rtx;
39641489
PB
4279
4280 case GTU:
a567207e 4281 return (known_results & CMP_GTU) ? const_true_rtx : const0_rtx;
39641489 4282 case LEU:
a567207e 4283 return (known_results & CMP_GTU) ? const0_rtx : const_true_rtx;
39641489
PB
4284
4285 case ORDERED:
4286 return const_true_rtx;
4287 case UNORDERED:
4288 return const0_rtx;
4289 default:
4290 gcc_unreachable ();
4291 }
4292}
4293
c6fb08ad
PB
4294/* Check if the given comparison (done in the given MODE) is actually a
4295 tautology or a contradiction.
7ce3e360
RS
4296 If no simplification is possible, this function returns zero.
4297 Otherwise, it returns either const_true_rtx or const0_rtx. */
0cedb36c
JL
4298
4299rtx
7ce3e360
RS
4300simplify_const_relational_operation (enum rtx_code code,
4301 enum machine_mode mode,
4302 rtx op0, rtx op1)
0cedb36c 4303{
0cedb36c 4304 rtx tem;
4ba5f925
JH
4305 rtx trueop0;
4306 rtx trueop1;
0cedb36c 4307
41374e13
NS
4308 gcc_assert (mode != VOIDmode
4309 || (GET_MODE (op0) == VOIDmode
4310 && GET_MODE (op1) == VOIDmode));
47b1e19b 4311
0cedb36c
JL
4312 /* If op0 is a compare, extract the comparison arguments from it. */
4313 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
5b5dc475
UW
4314 {
4315 op1 = XEXP (op0, 1);
4316 op0 = XEXP (op0, 0);
4317
4318 if (GET_MODE (op0) != VOIDmode)
4319 mode = GET_MODE (op0);
4320 else if (GET_MODE (op1) != VOIDmode)
4321 mode = GET_MODE (op1);
4322 else
4323 return 0;
4324 }
0cedb36c
JL
4325
4326 /* We can't simplify MODE_CC values since we don't know what the
4327 actual comparison is. */
8beccec8 4328 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC || CC0_P (op0))
0cedb36c
JL
4329 return 0;
4330
52a75c3c 4331 /* Make sure the constant is second. */
9ce79a7a 4332 if (swap_commutative_operands_p (op0, op1))
52a75c3c
RH
4333 {
4334 tem = op0, op0 = op1, op1 = tem;
4335 code = swap_condition (code);
4336 }
4337
9ce79a7a
RS
4338 trueop0 = avoid_constant_pool_reference (op0);
4339 trueop1 = avoid_constant_pool_reference (op1);
4340
0cedb36c
JL
4341 /* For integer comparisons of A and B maybe we can simplify A - B and can
4342 then simplify a comparison of that with zero. If A and B are both either
4343 a register or a CONST_INT, this can't help; testing for these cases will
4344 prevent infinite recursion here and speed things up.
4345
e0d0c193
RG
4346 We can only do this for EQ and NE comparisons as otherwise we may
4347 lose or introduce overflow which we cannot disregard as undefined as
4348 we do not know the signedness of the operation on either the left or
4349 the right hand side of the comparison. */
0cedb36c 4350
e0d0c193
RG
4351 if (INTEGRAL_MODE_P (mode) && trueop1 != const0_rtx
4352 && (code == EQ || code == NE)
481683e1
SZ
4353 && ! ((REG_P (op0) || CONST_INT_P (trueop0))
4354 && (REG_P (op1) || CONST_INT_P (trueop1)))
0cedb36c 4355 && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
e0d0c193
RG
4356 /* We cannot do this if tem is a nonzero address. */
4357 && ! nonzero_address_p (tem))
7ce3e360
RS
4358 return simplify_const_relational_operation (signed_condition (code),
4359 mode, tem, const0_rtx);
0cedb36c 4360
bdbb0460 4361 if (! HONOR_NANS (mode) && code == ORDERED)
1f36a2dd
JH
4362 return const_true_rtx;
4363
bdbb0460 4364 if (! HONOR_NANS (mode) && code == UNORDERED)
1f36a2dd
JH
4365 return const0_rtx;
4366
71925bc0 4367 /* For modes without NaNs, if the two operands are equal, we know the
39641489
PB
4368 result except if they have side-effects. Even with NaNs we know
4369 the result of unordered comparisons and, if signaling NaNs are
4370 irrelevant, also the result of LT/GT/LTGT. */
4371 if ((! HONOR_NANS (GET_MODE (trueop0))
4372 || code == UNEQ || code == UNLE || code == UNGE
4373 || ((code == LT || code == GT || code == LTGT)
4374 && ! HONOR_SNANS (GET_MODE (trueop0))))
8821d091
EB
4375 && rtx_equal_p (trueop0, trueop1)
4376 && ! side_effects_p (trueop0))
a567207e 4377 return comparison_result (code, CMP_EQ);
0cedb36c
JL
4378
4379 /* If the operands are floating-point constants, see if we can fold
4380 the result. */
39641489
PB
4381 if (GET_CODE (trueop0) == CONST_DOUBLE
4382 && GET_CODE (trueop1) == CONST_DOUBLE
4383 && SCALAR_FLOAT_MODE_P (GET_MODE (trueop0)))
0cedb36c 4384 {
15e5ad76 4385 REAL_VALUE_TYPE d0, d1;
0cedb36c 4386
15e5ad76
ZW
4387 REAL_VALUE_FROM_CONST_DOUBLE (d0, trueop0);
4388 REAL_VALUE_FROM_CONST_DOUBLE (d1, trueop1);
90a74703 4389
1eeeb6a4 4390 /* Comparisons are unordered iff at least one of the values is NaN. */
15e5ad76 4391 if (REAL_VALUE_ISNAN (d0) || REAL_VALUE_ISNAN (d1))
90a74703
JH
4392 switch (code)
4393 {
4394 case UNEQ:
4395 case UNLT:
4396 case UNGT:
4397 case UNLE:
4398 case UNGE:
4399 case NE:
4400 case UNORDERED:
4401 return const_true_rtx;
4402 case EQ:
4403 case LT:
4404 case GT:
4405 case LE:
4406 case GE:
4407 case LTGT:
4408 case ORDERED:
4409 return const0_rtx;
4410 default:
4411 return 0;
4412 }
0cedb36c 4413
39641489 4414 return comparison_result (code,
a567207e
PB
4415 (REAL_VALUES_EQUAL (d0, d1) ? CMP_EQ :
4416 REAL_VALUES_LESS (d0, d1) ? CMP_LT : CMP_GT));
0cedb36c 4417 }
0cedb36c
JL
4418
4419 /* Otherwise, see if the operands are both integers. */
39641489
PB
4420 if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
4421 && (GET_CODE (trueop0) == CONST_DOUBLE
481683e1 4422 || CONST_INT_P (trueop0))
39641489 4423 && (GET_CODE (trueop1) == CONST_DOUBLE
481683e1 4424 || CONST_INT_P (trueop1)))
0cedb36c
JL
4425 {
4426 int width = GET_MODE_BITSIZE (mode);
4427 HOST_WIDE_INT l0s, h0s, l1s, h1s;
4428 unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u;
4429
4430 /* Get the two words comprising each integer constant. */
4ba5f925 4431 if (GET_CODE (trueop0) == CONST_DOUBLE)
0cedb36c 4432 {
4ba5f925
JH
4433 l0u = l0s = CONST_DOUBLE_LOW (trueop0);
4434 h0u = h0s = CONST_DOUBLE_HIGH (trueop0);
0cedb36c
JL
4435 }
4436 else
4437 {
4ba5f925 4438 l0u = l0s = INTVAL (trueop0);
ba34d877 4439 h0u = h0s = HWI_SIGN_EXTEND (l0s);
0cedb36c 4440 }
786de7eb 4441
4ba5f925 4442 if (GET_CODE (trueop1) == CONST_DOUBLE)
0cedb36c 4443 {
4ba5f925
JH
4444 l1u = l1s = CONST_DOUBLE_LOW (trueop1);
4445 h1u = h1s = CONST_DOUBLE_HIGH (trueop1);
0cedb36c
JL
4446 }
4447 else
4448 {
4ba5f925 4449 l1u = l1s = INTVAL (trueop1);
ba34d877 4450 h1u = h1s = HWI_SIGN_EXTEND (l1s);
0cedb36c
JL
4451 }
4452
4453 /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT,
4454 we have to sign or zero-extend the values. */
0cedb36c
JL
4455 if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
4456 {
43c36287
EB
4457 l0u &= ((unsigned HOST_WIDE_INT) 1 << width) - 1;
4458 l1u &= ((unsigned HOST_WIDE_INT) 1 << width) - 1;
0cedb36c 4459
43c36287
EB
4460 if (l0s & ((unsigned HOST_WIDE_INT) 1 << (width - 1)))
4461 l0s |= ((unsigned HOST_WIDE_INT) (-1) << width);
0cedb36c 4462
43c36287
EB
4463 if (l1s & ((unsigned HOST_WIDE_INT) 1 << (width - 1)))
4464 l1s |= ((unsigned HOST_WIDE_INT) (-1) << width);
0cedb36c 4465 }
d4f1c1fa
RH
4466 if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
4467 h0u = h1u = 0, h0s = HWI_SIGN_EXTEND (l0s), h1s = HWI_SIGN_EXTEND (l1s);
0cedb36c 4468
39641489 4469 if (h0u == h1u && l0u == l1u)
a567207e 4470 return comparison_result (code, CMP_EQ);
39641489
PB
4471 else
4472 {
4473 int cr;
a567207e
PB
4474 cr = (h0s < h1s || (h0s == h1s && l0u < l1u)) ? CMP_LT : CMP_GT;
4475 cr |= (h0u < h1u || (h0u == h1u && l0u < l1u)) ? CMP_LTU : CMP_GTU;
4476 return comparison_result (code, cr);
39641489 4477 }
0cedb36c
JL
4478 }
4479
39641489
PB
4480 /* Optimize comparisons with upper and lower bounds. */
4481 if (SCALAR_INT_MODE_P (mode)
4482 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
481683e1 4483 && CONST_INT_P (trueop1))
0cedb36c 4484 {
39641489
PB
4485 int sign;
4486 unsigned HOST_WIDE_INT nonzero = nonzero_bits (trueop0, mode);
4487 HOST_WIDE_INT val = INTVAL (trueop1);
4488 HOST_WIDE_INT mmin, mmax;
4489
4490 if (code == GEU
4491 || code == LEU
4492 || code == GTU
4493 || code == LTU)
4494 sign = 0;
4495 else
4496 sign = 1;
0aea6467 4497
39641489
PB
4498 /* Get a reduced range if the sign bit is zero. */
4499 if (nonzero <= (GET_MODE_MASK (mode) >> 1))
4500 {
4501 mmin = 0;
4502 mmax = nonzero;
4503 }
4504 else
4505 {
4506 rtx mmin_rtx, mmax_rtx;
a567207e 4507 get_mode_bounds (mode, sign, mode, &mmin_rtx, &mmax_rtx);
39641489 4508
dc7c279e
JJ
4509 mmin = INTVAL (mmin_rtx);
4510 mmax = INTVAL (mmax_rtx);
4511 if (sign)
4512 {
4513 unsigned int sign_copies = num_sign_bit_copies (trueop0, mode);
4514
4515 mmin >>= (sign_copies - 1);
4516 mmax >>= (sign_copies - 1);
4517 }
0aea6467
ZD
4518 }
4519
0cedb36c
JL
4520 switch (code)
4521 {
39641489
PB
4522 /* x >= y is always true for y <= mmin, always false for y > mmax. */
4523 case GEU:
4524 if ((unsigned HOST_WIDE_INT) val <= (unsigned HOST_WIDE_INT) mmin)
4525 return const_true_rtx;
4526 if ((unsigned HOST_WIDE_INT) val > (unsigned HOST_WIDE_INT) mmax)
4527 return const0_rtx;
4528 break;
4529 case GE:
4530 if (val <= mmin)
4531 return const_true_rtx;
4532 if (val > mmax)
4533 return const0_rtx;
4534 break;
4535
4536 /* x <= y is always true for y >= mmax, always false for y < mmin. */
4537 case LEU:
4538 if ((unsigned HOST_WIDE_INT) val >= (unsigned HOST_WIDE_INT) mmax)
4539 return const_true_rtx;
4540 if ((unsigned HOST_WIDE_INT) val < (unsigned HOST_WIDE_INT) mmin)
4541 return const0_rtx;
4542 break;
4543 case LE:
4544 if (val >= mmax)
4545 return const_true_rtx;
4546 if (val < mmin)
4547 return const0_rtx;
4548 break;
4549
0cedb36c 4550 case EQ:
39641489
PB
4551 /* x == y is always false for y out of range. */
4552 if (val < mmin || val > mmax)
4553 return const0_rtx;
4554 break;
4555
4556 /* x > y is always false for y >= mmax, always true for y < mmin. */
4557 case GTU:
4558 if ((unsigned HOST_WIDE_INT) val >= (unsigned HOST_WIDE_INT) mmax)
4559 return const0_rtx;
4560 if ((unsigned HOST_WIDE_INT) val < (unsigned HOST_WIDE_INT) mmin)
4561 return const_true_rtx;
4562 break;
4563 case GT:
4564 if (val >= mmax)
4565 return const0_rtx;
4566 if (val < mmin)
4567 return const_true_rtx;
4568 break;
4569
4570 /* x < y is always false for y <= mmin, always true for y > mmax. */
4571 case LTU:
4572 if ((unsigned HOST_WIDE_INT) val <= (unsigned HOST_WIDE_INT) mmin)
4573 return const0_rtx;
4574 if ((unsigned HOST_WIDE_INT) val > (unsigned HOST_WIDE_INT) mmax)
4575 return const_true_rtx;
4576 break;
4577 case LT:
4578 if (val <= mmin)
0cedb36c 4579 return const0_rtx;
39641489
PB
4580 if (val > mmax)
4581 return const_true_rtx;
0cedb36c
JL
4582 break;
4583
4584 case NE:
39641489
PB
4585 /* x != y is always true for y out of range. */
4586 if (val < mmin || val > mmax)
0cedb36c
JL
4587 return const_true_rtx;
4588 break;
4589
39641489
PB
4590 default:
4591 break;
4592 }
4593 }
4594
4595 /* Optimize integer comparisons with zero. */
4596 if (trueop1 == const0_rtx)
4597 {
4598 /* Some addresses are known to be nonzero. We don't know
a567207e 4599 their sign, but equality comparisons are known. */
39641489 4600 if (nonzero_address_p (trueop0))
a567207e 4601 {
39641489
PB
4602 if (code == EQ || code == LEU)
4603 return const0_rtx;
4604 if (code == NE || code == GTU)
4605 return const_true_rtx;
a567207e 4606 }
39641489
PB
4607
4608 /* See if the first operand is an IOR with a constant. If so, we
4609 may be able to determine the result of this comparison. */
4610 if (GET_CODE (op0) == IOR)
a567207e 4611 {
39641489 4612 rtx inner_const = avoid_constant_pool_reference (XEXP (op0, 1));
481683e1 4613 if (CONST_INT_P (inner_const) && inner_const != const0_rtx)
39641489 4614 {
a567207e
PB
4615 int sign_bitnum = GET_MODE_BITSIZE (mode) - 1;
4616 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
43c36287
EB
4617 && (UINTVAL (inner_const)
4618 & ((unsigned HOST_WIDE_INT) 1
4619 << sign_bitnum)));
a567207e
PB
4620
4621 switch (code)
4622 {
4623 case EQ:
39641489 4624 case LEU:
a567207e
PB
4625 return const0_rtx;
4626 case NE:
39641489 4627 case GTU:
a567207e
PB
4628 return const_true_rtx;
4629 case LT:
4630 case LE:
4631 if (has_sign)
4632 return const_true_rtx;
4633 break;
4634 case GT:
39641489 4635 case GE:
a567207e
PB
4636 if (has_sign)
4637 return const0_rtx;
4638 break;
4639 default:
4640 break;
4641 }
4642 }
39641489
PB
4643 }
4644 }
4645
4646 /* Optimize comparison of ABS with zero. */
4647 if (trueop1 == CONST0_RTX (mode)
4648 && (GET_CODE (trueop0) == ABS
4649 || (GET_CODE (trueop0) == FLOAT_EXTEND
4650 && GET_CODE (XEXP (trueop0, 0)) == ABS)))
4651 {
4652 switch (code)
4653 {
0da65b89
RS
4654 case LT:
4655 /* Optimize abs(x) < 0.0. */
39641489 4656 if (!HONOR_SNANS (mode)
eeef0e45
ILT
4657 && (!INTEGRAL_MODE_P (mode)
4658 || (!flag_wrapv && !flag_trapv && flag_strict_overflow)))
0da65b89 4659 {
39641489
PB
4660 if (INTEGRAL_MODE_P (mode)
4661 && (issue_strict_overflow_warning
4662 (WARN_STRICT_OVERFLOW_CONDITIONAL)))
4663 warning (OPT_Wstrict_overflow,
4664 ("assuming signed overflow does not occur when "
4665 "assuming abs (x) < 0 is false"));
4666 return const0_rtx;
0da65b89
RS
4667 }
4668 break;
4669
4670 case GE:
4671 /* Optimize abs(x) >= 0.0. */
39641489 4672 if (!HONOR_NANS (mode)
eeef0e45
ILT
4673 && (!INTEGRAL_MODE_P (mode)
4674 || (!flag_wrapv && !flag_trapv && flag_strict_overflow)))
0da65b89 4675 {
39641489
PB
4676 if (INTEGRAL_MODE_P (mode)
4677 && (issue_strict_overflow_warning
4678 (WARN_STRICT_OVERFLOW_CONDITIONAL)))
4679 warning (OPT_Wstrict_overflow,
4680 ("assuming signed overflow does not occur when "
4681 "assuming abs (x) >= 0 is true"));
4682 return const_true_rtx;
0da65b89
RS
4683 }
4684 break;
4685
8d90f9c0
GK
4686 case UNGE:
4687 /* Optimize ! (abs(x) < 0.0). */
39641489 4688 return const_true_rtx;
46c5ad27 4689
0cedb36c
JL
4690 default:
4691 break;
4692 }
0cedb36c
JL
4693 }
4694
39641489 4695 return 0;
0cedb36c
JL
4696}
4697\f
4698/* Simplify CODE, an operation with result mode MODE and three operands,
4699 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
4700 a constant. Return 0 if no simplifications is possible. */
4701
4702rtx
46c5ad27
AJ
4703simplify_ternary_operation (enum rtx_code code, enum machine_mode mode,
4704 enum machine_mode op0_mode, rtx op0, rtx op1,
4705 rtx op2)
0cedb36c 4706{
749a2da1 4707 unsigned int width = GET_MODE_BITSIZE (mode);
5c822194
RH
4708 bool any_change = false;
4709 rtx tem;
0cedb36c
JL
4710
4711 /* VOIDmode means "infinite" precision. */
4712 if (width == 0)
4713 width = HOST_BITS_PER_WIDE_INT;
4714
4715 switch (code)
4716 {
1b1562a5 4717 case FMA:
5c822194
RH
4718 /* Simplify negations around the multiplication. */
4719 /* -a * -b + c => a * b + c. */
4720 if (GET_CODE (op0) == NEG)
4721 {
4722 tem = simplify_unary_operation (NEG, mode, op1, mode);
4723 if (tem)
4724 op1 = tem, op0 = XEXP (op0, 0), any_change = true;
4725 }
4726 else if (GET_CODE (op1) == NEG)
4727 {
4728 tem = simplify_unary_operation (NEG, mode, op0, mode);
4729 if (tem)
4730 op0 = tem, op1 = XEXP (op1, 0), any_change = true;
4731 }
4732
4733 /* Canonicalize the two multiplication operands. */
4734 /* a * -b + c => -b * a + c. */
4735 if (swap_commutative_operands_p (op0, op1))
4736 tem = op0, op0 = op1, op1 = tem, any_change = true;
4737
4738 if (any_change)
4739 return gen_rtx_FMA (mode, op0, op1, op2);
1b1562a5
MM
4740 return NULL_RTX;
4741
0cedb36c
JL
4742 case SIGN_EXTRACT:
4743 case ZERO_EXTRACT:
481683e1
SZ
4744 if (CONST_INT_P (op0)
4745 && CONST_INT_P (op1)
4746 && CONST_INT_P (op2)
d882fe51 4747 && ((unsigned) INTVAL (op1) + (unsigned) INTVAL (op2) <= width)
f9e158c3 4748 && width <= (unsigned) HOST_BITS_PER_WIDE_INT)
0cedb36c
JL
4749 {
4750 /* Extracting a bit-field from a constant */
43c36287 4751 unsigned HOST_WIDE_INT val = UINTVAL (op0);
0cedb36c
JL
4752
4753 if (BITS_BIG_ENDIAN)
43c36287 4754 val >>= GET_MODE_BITSIZE (op0_mode) - INTVAL (op2) - INTVAL (op1);
0cedb36c
JL
4755 else
4756 val >>= INTVAL (op2);
4757
4758 if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
4759 {
4760 /* First zero-extend. */
43c36287 4761 val &= ((unsigned HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
0cedb36c
JL
4762 /* If desired, propagate sign bit. */
4763 if (code == SIGN_EXTRACT
43c36287
EB
4764 && (val & ((unsigned HOST_WIDE_INT) 1 << (INTVAL (op1) - 1)))
4765 != 0)
4766 val |= ~ (((unsigned HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
0cedb36c
JL
4767 }
4768
4769 /* Clear the bits that don't belong in our mode,
4770 unless they and our sign bit are all one.
4771 So we get either a reasonable negative value or a reasonable
4772 unsigned value for this mode. */
4773 if (width < HOST_BITS_PER_WIDE_INT
43c36287
EB
4774 && ((val & ((unsigned HOST_WIDE_INT) (-1) << (width - 1)))
4775 != ((unsigned HOST_WIDE_INT) (-1) << (width - 1))))
4776 val &= ((unsigned HOST_WIDE_INT) 1 << width) - 1;
0cedb36c 4777
449ecb09 4778 return gen_int_mode (val, mode);
0cedb36c
JL
4779 }
4780 break;
4781
4782 case IF_THEN_ELSE:
481683e1 4783 if (CONST_INT_P (op0))
0cedb36c
JL
4784 return op0 != const0_rtx ? op1 : op2;
4785
31f0f571
RS
4786 /* Convert c ? a : a into "a". */
4787 if (rtx_equal_p (op1, op2) && ! side_effects_p (op0))
0cedb36c 4788 return op1;
31f0f571
RS
4789
4790 /* Convert a != b ? a : b into "a". */
4791 if (GET_CODE (op0) == NE
4792 && ! side_effects_p (op0)
4793 && ! HONOR_NANS (mode)
4794 && ! HONOR_SIGNED_ZEROS (mode)
4795 && ((rtx_equal_p (XEXP (op0, 0), op1)
4796 && rtx_equal_p (XEXP (op0, 1), op2))
4797 || (rtx_equal_p (XEXP (op0, 0), op2)
4798 && rtx_equal_p (XEXP (op0, 1), op1))))
4799 return op1;
4800
4801 /* Convert a == b ? a : b into "b". */
4802 if (GET_CODE (op0) == EQ
4803 && ! side_effects_p (op0)
4804 && ! HONOR_NANS (mode)
4805 && ! HONOR_SIGNED_ZEROS (mode)
4806 && ((rtx_equal_p (XEXP (op0, 0), op1)
4807 && rtx_equal_p (XEXP (op0, 1), op2))
4808 || (rtx_equal_p (XEXP (op0, 0), op2)
4809 && rtx_equal_p (XEXP (op0, 1), op1))))
0cedb36c 4810 return op2;
31f0f571 4811
ec8e098d 4812 if (COMPARISON_P (op0) && ! side_effects_p (op0))
0cedb36c 4813 {
47b1e19b
JH
4814 enum machine_mode cmp_mode = (GET_MODE (XEXP (op0, 0)) == VOIDmode
4815 ? GET_MODE (XEXP (op0, 1))
4816 : GET_MODE (XEXP (op0, 0)));
3e882897 4817 rtx temp;
a774e06e
RH
4818
4819 /* Look for happy constants in op1 and op2. */
481683e1 4820 if (CONST_INT_P (op1) && CONST_INT_P (op2))
a774e06e
RH
4821 {
4822 HOST_WIDE_INT t = INTVAL (op1);
4823 HOST_WIDE_INT f = INTVAL (op2);
786de7eb 4824
a774e06e
RH
4825 if (t == STORE_FLAG_VALUE && f == 0)
4826 code = GET_CODE (op0);
261efdef
JH
4827 else if (t == 0 && f == STORE_FLAG_VALUE)
4828 {
4829 enum rtx_code tmp;
4830 tmp = reversed_comparison_code (op0, NULL_RTX);
4831 if (tmp == UNKNOWN)
4832 break;
4833 code = tmp;
4834 }
a774e06e
RH
4835 else
4836 break;
4837
77306e3e 4838 return simplify_gen_relational (code, mode, cmp_mode,
c6fb08ad
PB
4839 XEXP (op0, 0), XEXP (op0, 1));
4840 }
4841
4842 if (cmp_mode == VOIDmode)
4843 cmp_mode = op0_mode;
4844 temp = simplify_relational_operation (GET_CODE (op0), op0_mode,
4845 cmp_mode, XEXP (op0, 0),
4846 XEXP (op0, 1));
4847
4848 /* See if any simplifications were possible. */
4849 if (temp)
4850 {
481683e1 4851 if (CONST_INT_P (temp))
c6fb08ad
PB
4852 return temp == const0_rtx ? op2 : op1;
4853 else if (temp)
4854 return gen_rtx_IF_THEN_ELSE (mode, temp, op1, op2);
a774e06e 4855 }
0cedb36c
JL
4856 }
4857 break;
31f0f571 4858
d9deed68 4859 case VEC_MERGE:
41374e13
NS
4860 gcc_assert (GET_MODE (op0) == mode);
4861 gcc_assert (GET_MODE (op1) == mode);
4862 gcc_assert (VECTOR_MODE_P (mode));
d9deed68 4863 op2 = avoid_constant_pool_reference (op2);
481683e1 4864 if (CONST_INT_P (op2))
d9deed68
JH
4865 {
4866 int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4867 unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size);
5be86fec 4868 int mask = (1 << n_elts) - 1;
d9deed68 4869
852c8ba1
JH
4870 if (!(INTVAL (op2) & mask))
4871 return op1;
4872 if ((INTVAL (op2) & mask) == mask)
4873 return op0;
4874
4875 op0 = avoid_constant_pool_reference (op0);
4876 op1 = avoid_constant_pool_reference (op1);
4877 if (GET_CODE (op0) == CONST_VECTOR
4878 && GET_CODE (op1) == CONST_VECTOR)
4879 {
4880 rtvec v = rtvec_alloc (n_elts);
4881 unsigned int i;
4882
4883 for (i = 0; i < n_elts; i++)
4884 RTVEC_ELT (v, i) = (INTVAL (op2) & (1 << i)
4885 ? CONST_VECTOR_ELT (op0, i)
4886 : CONST_VECTOR_ELT (op1, i));
4887 return gen_rtx_CONST_VECTOR (mode, v);
4888 }
d9deed68
JH
4889 }
4890 break;
0cedb36c
JL
4891
4892 default:
41374e13 4893 gcc_unreachable ();
0cedb36c
JL
4894 }
4895
4896 return 0;
4897}
4898
14c931f1
CF
4899/* Evaluate a SUBREG of a CONST_INT or CONST_DOUBLE or CONST_FIXED
4900 or CONST_VECTOR,
4901 returning another CONST_INT or CONST_DOUBLE or CONST_FIXED or CONST_VECTOR.
eea50aa0 4902
550d1387
GK
4903 Works by unpacking OP into a collection of 8-bit values
4904 represented as a little-endian array of 'unsigned char', selecting by BYTE,
4905 and then repacking them again for OUTERMODE. */
eea50aa0 4906
550d1387 4907static rtx
b8698a0f 4908simplify_immed_subreg (enum machine_mode outermode, rtx op,
550d1387
GK
4909 enum machine_mode innermode, unsigned int byte)
4910{
4911 /* We support up to 512-bit values (for V8DFmode). */
4912 enum {
4913 max_bitsize = 512,
4914 value_bit = 8,
4915 value_mask = (1 << value_bit) - 1
4916 };
4917 unsigned char value[max_bitsize / value_bit];
4918 int value_start;
4919 int i;
4920 int elem;
4921
4922 int num_elem;
4923 rtx * elems;
4924 int elem_bitsize;
4925 rtx result_s;
4926 rtvec result_v = NULL;
4927 enum mode_class outer_class;
4928 enum machine_mode outer_submode;
4929
4930 /* Some ports misuse CCmode. */
481683e1 4931 if (GET_MODE_CLASS (outermode) == MODE_CC && CONST_INT_P (op))
e5c56fd9
JH
4932 return op;
4933
6e4b5aaf
RH
4934 /* We have no way to represent a complex constant at the rtl level. */
4935 if (COMPLEX_MODE_P (outermode))
4936 return NULL_RTX;
4937
550d1387
GK
4938 /* Unpack the value. */
4939
cb2a532e
AH
4940 if (GET_CODE (op) == CONST_VECTOR)
4941 {
550d1387
GK
4942 num_elem = CONST_VECTOR_NUNITS (op);
4943 elems = &CONST_VECTOR_ELT (op, 0);
4944 elem_bitsize = GET_MODE_BITSIZE (GET_MODE_INNER (innermode));
4945 }
4946 else
4947 {
4948 num_elem = 1;
4949 elems = &op;
4950 elem_bitsize = max_bitsize;
4951 }
41374e13
NS
4952 /* If this asserts, it is too complicated; reducing value_bit may help. */
4953 gcc_assert (BITS_PER_UNIT % value_bit == 0);
4954 /* I don't know how to handle endianness of sub-units. */
4955 gcc_assert (elem_bitsize % BITS_PER_UNIT == 0);
b8698a0f 4956
550d1387
GK
4957 for (elem = 0; elem < num_elem; elem++)
4958 {
4959 unsigned char * vp;
4960 rtx el = elems[elem];
b8698a0f 4961
550d1387
GK
4962 /* Vectors are kept in target memory order. (This is probably
4963 a mistake.) */
4964 {
4965 unsigned byte = (elem * elem_bitsize) / BITS_PER_UNIT;
558c51c5 4966 unsigned ibyte = (((num_elem - 1 - elem) * elem_bitsize)
550d1387
GK
4967 / BITS_PER_UNIT);
4968 unsigned word_byte = WORDS_BIG_ENDIAN ? ibyte : byte;
4969 unsigned subword_byte = BYTES_BIG_ENDIAN ? ibyte : byte;
4970 unsigned bytele = (subword_byte % UNITS_PER_WORD
4971 + (word_byte / UNITS_PER_WORD) * UNITS_PER_WORD);
4972 vp = value + (bytele * BITS_PER_UNIT) / value_bit;
4973 }
b8698a0f 4974
550d1387 4975 switch (GET_CODE (el))
34a80643 4976 {
550d1387
GK
4977 case CONST_INT:
4978 for (i = 0;
b8698a0f 4979 i < HOST_BITS_PER_WIDE_INT && i < elem_bitsize;
550d1387
GK
4980 i += value_bit)
4981 *vp++ = INTVAL (el) >> i;
4982 /* CONST_INTs are always logically sign-extended. */
4983 for (; i < elem_bitsize; i += value_bit)
4984 *vp++ = INTVAL (el) < 0 ? -1 : 0;
4985 break;
b8698a0f 4986
550d1387
GK
4987 case CONST_DOUBLE:
4988 if (GET_MODE (el) == VOIDmode)
4989 {
4990 /* If this triggers, someone should have generated a
4991 CONST_INT instead. */
41374e13 4992 gcc_assert (elem_bitsize > HOST_BITS_PER_WIDE_INT);
cb2a532e 4993
550d1387
GK
4994 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i += value_bit)
4995 *vp++ = CONST_DOUBLE_LOW (el) >> i;
4996 while (i < HOST_BITS_PER_WIDE_INT * 2 && i < elem_bitsize)
4997 {
8064d930
RE
4998 *vp++
4999 = CONST_DOUBLE_HIGH (el) >> (i - HOST_BITS_PER_WIDE_INT);
550d1387
GK
5000 i += value_bit;
5001 }
5002 /* It shouldn't matter what's done here, so fill it with
5003 zero. */
1125164c 5004 for (; i < elem_bitsize; i += value_bit)
550d1387
GK
5005 *vp++ = 0;
5006 }
41374e13 5007 else
34a80643 5008 {
550d1387
GK
5009 long tmp[max_bitsize / 32];
5010 int bitsize = GET_MODE_BITSIZE (GET_MODE (el));
41374e13 5011
3d8bf70f 5012 gcc_assert (SCALAR_FLOAT_MODE_P (GET_MODE (el)));
41374e13
NS
5013 gcc_assert (bitsize <= elem_bitsize);
5014 gcc_assert (bitsize % value_bit == 0);
550d1387
GK
5015
5016 real_to_target (tmp, CONST_DOUBLE_REAL_VALUE (el),
5017 GET_MODE (el));
5018
5019 /* real_to_target produces its result in words affected by
5020 FLOAT_WORDS_BIG_ENDIAN. However, we ignore this,
5021 and use WORDS_BIG_ENDIAN instead; see the documentation
5022 of SUBREG in rtl.texi. */
5023 for (i = 0; i < bitsize; i += value_bit)
226cfe61 5024 {
550d1387
GK
5025 int ibase;
5026 if (WORDS_BIG_ENDIAN)
5027 ibase = bitsize - 1 - i;
5028 else
5029 ibase = i;
5030 *vp++ = tmp[ibase / 32] >> i % 32;
226cfe61 5031 }
b8698a0f 5032
550d1387
GK
5033 /* It shouldn't matter what's done here, so fill it with
5034 zero. */
5035 for (; i < elem_bitsize; i += value_bit)
5036 *vp++ = 0;
34a80643 5037 }
550d1387 5038 break;
14c931f1
CF
5039
5040 case CONST_FIXED:
5041 if (elem_bitsize <= HOST_BITS_PER_WIDE_INT)
5042 {
5043 for (i = 0; i < elem_bitsize; i += value_bit)
5044 *vp++ = CONST_FIXED_VALUE_LOW (el) >> i;
5045 }
5046 else
5047 {
5048 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i += value_bit)
5049 *vp++ = CONST_FIXED_VALUE_LOW (el) >> i;
5050 for (; i < 2 * HOST_BITS_PER_WIDE_INT && i < elem_bitsize;
5051 i += value_bit)
5052 *vp++ = CONST_FIXED_VALUE_HIGH (el)
5053 >> (i - HOST_BITS_PER_WIDE_INT);
5054 for (; i < elem_bitsize; i += value_bit)
5055 *vp++ = 0;
5056 }
5057 break;
b8698a0f 5058
550d1387 5059 default:
41374e13 5060 gcc_unreachable ();
226cfe61 5061 }
cb2a532e
AH
5062 }
5063
550d1387
GK
5064 /* Now, pick the right byte to start with. */
5065 /* Renumber BYTE so that the least-significant byte is byte 0. A special
5066 case is paradoxical SUBREGs, which shouldn't be adjusted since they
5067 will already have offset 0. */
5068 if (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode))
eea50aa0 5069 {
558c51c5 5070 unsigned ibyte = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode)
550d1387
GK
5071 - byte);
5072 unsigned word_byte = WORDS_BIG_ENDIAN ? ibyte : byte;
5073 unsigned subword_byte = BYTES_BIG_ENDIAN ? ibyte : byte;
5074 byte = (subword_byte % UNITS_PER_WORD
5075 + (word_byte / UNITS_PER_WORD) * UNITS_PER_WORD);
5076 }
eea50aa0 5077
550d1387
GK
5078 /* BYTE should still be inside OP. (Note that BYTE is unsigned,
5079 so if it's become negative it will instead be very large.) */
41374e13 5080 gcc_assert (byte < GET_MODE_SIZE (innermode));
3767c0fd 5081
550d1387
GK
5082 /* Convert from bytes to chunks of size value_bit. */
5083 value_start = byte * (BITS_PER_UNIT / value_bit);
eea50aa0 5084
550d1387 5085 /* Re-pack the value. */
b8698a0f 5086
550d1387
GK
5087 if (VECTOR_MODE_P (outermode))
5088 {
5089 num_elem = GET_MODE_NUNITS (outermode);
5090 result_v = rtvec_alloc (num_elem);
5091 elems = &RTVEC_ELT (result_v, 0);
5092 outer_submode = GET_MODE_INNER (outermode);
5093 }
5094 else
5095 {
5096 num_elem = 1;
5097 elems = &result_s;
5098 outer_submode = outermode;
5099 }
eea50aa0 5100
550d1387
GK
5101 outer_class = GET_MODE_CLASS (outer_submode);
5102 elem_bitsize = GET_MODE_BITSIZE (outer_submode);
451f86fd 5103
41374e13
NS
5104 gcc_assert (elem_bitsize % value_bit == 0);
5105 gcc_assert (elem_bitsize + value_start * value_bit <= max_bitsize);
451f86fd 5106
550d1387
GK
5107 for (elem = 0; elem < num_elem; elem++)
5108 {
5109 unsigned char *vp;
b8698a0f 5110
550d1387
GK
5111 /* Vectors are stored in target memory order. (This is probably
5112 a mistake.) */
5113 {
5114 unsigned byte = (elem * elem_bitsize) / BITS_PER_UNIT;
558c51c5 5115 unsigned ibyte = (((num_elem - 1 - elem) * elem_bitsize)
550d1387
GK
5116 / BITS_PER_UNIT);
5117 unsigned word_byte = WORDS_BIG_ENDIAN ? ibyte : byte;
5118 unsigned subword_byte = BYTES_BIG_ENDIAN ? ibyte : byte;
5119 unsigned bytele = (subword_byte % UNITS_PER_WORD
5120 + (word_byte / UNITS_PER_WORD) * UNITS_PER_WORD);
5121 vp = value + value_start + (bytele * BITS_PER_UNIT) / value_bit;
5122 }
5123
5124 switch (outer_class)
eea50aa0 5125 {
550d1387
GK
5126 case MODE_INT:
5127 case MODE_PARTIAL_INT:
5128 {
5129 unsigned HOST_WIDE_INT hi = 0, lo = 0;
5130
5131 for (i = 0;
5132 i < HOST_BITS_PER_WIDE_INT && i < elem_bitsize;
5133 i += value_bit)
43c36287 5134 lo |= (unsigned HOST_WIDE_INT)(*vp++ & value_mask) << i;
550d1387 5135 for (; i < elem_bitsize; i += value_bit)
43c36287
EB
5136 hi |= (unsigned HOST_WIDE_INT)(*vp++ & value_mask)
5137 << (i - HOST_BITS_PER_WIDE_INT);
b8698a0f 5138
550d1387
GK
5139 /* immed_double_const doesn't call trunc_int_for_mode. I don't
5140 know why. */
5141 if (elem_bitsize <= HOST_BITS_PER_WIDE_INT)
5142 elems[elem] = gen_int_mode (lo, outer_submode);
3242fbd8 5143 else if (elem_bitsize <= 2 * HOST_BITS_PER_WIDE_INT)
550d1387 5144 elems[elem] = immed_double_const (lo, hi, outer_submode);
3242fbd8
UB
5145 else
5146 return NULL_RTX;
550d1387
GK
5147 }
5148 break;
b8698a0f 5149
550d1387 5150 case MODE_FLOAT:
15ed7b52 5151 case MODE_DECIMAL_FLOAT:
550d1387
GK
5152 {
5153 REAL_VALUE_TYPE r;
5154 long tmp[max_bitsize / 32];
b8698a0f 5155
550d1387
GK
5156 /* real_from_target wants its input in words affected by
5157 FLOAT_WORDS_BIG_ENDIAN. However, we ignore this,
5158 and use WORDS_BIG_ENDIAN instead; see the documentation
5159 of SUBREG in rtl.texi. */
5160 for (i = 0; i < max_bitsize / 32; i++)
5161 tmp[i] = 0;
5162 for (i = 0; i < elem_bitsize; i += value_bit)
5163 {
5164 int ibase;
5165 if (WORDS_BIG_ENDIAN)
5166 ibase = elem_bitsize - 1 - i;
5167 else
5168 ibase = i;
effdb493 5169 tmp[ibase / 32] |= (*vp++ & value_mask) << i % 32;
550d1387 5170 }
eea50aa0 5171
550d1387
GK
5172 real_from_target (&r, tmp, outer_submode);
5173 elems[elem] = CONST_DOUBLE_FROM_REAL_VALUE (r, outer_submode);
5174 }
5175 break;
14c931f1
CF
5176
5177 case MODE_FRACT:
5178 case MODE_UFRACT:
5179 case MODE_ACCUM:
5180 case MODE_UACCUM:
5181 {
5182 FIXED_VALUE_TYPE f;
5183 f.data.low = 0;
5184 f.data.high = 0;
5185 f.mode = outer_submode;
5186
5187 for (i = 0;
5188 i < HOST_BITS_PER_WIDE_INT && i < elem_bitsize;
5189 i += value_bit)
43c36287 5190 f.data.low |= (unsigned HOST_WIDE_INT)(*vp++ & value_mask) << i;
14c931f1 5191 for (; i < elem_bitsize; i += value_bit)
43c36287 5192 f.data.high |= ((unsigned HOST_WIDE_INT)(*vp++ & value_mask)
14c931f1
CF
5193 << (i - HOST_BITS_PER_WIDE_INT));
5194
5195 elems[elem] = CONST_FIXED_FROM_FIXED_VALUE (f, outer_submode);
5196 }
5197 break;
b8698a0f 5198
550d1387 5199 default:
41374e13 5200 gcc_unreachable ();
550d1387
GK
5201 }
5202 }
5203 if (VECTOR_MODE_P (outermode))
5204 return gen_rtx_CONST_VECTOR (outermode, result_v);
5205 else
5206 return result_s;
5207}
eea50aa0 5208
550d1387
GK
5209/* Simplify SUBREG:OUTERMODE(OP:INNERMODE, BYTE)
5210 Return 0 if no simplifications are possible. */
5211rtx
5212simplify_subreg (enum machine_mode outermode, rtx op,
5213 enum machine_mode innermode, unsigned int byte)
5214{
5215 /* Little bit of sanity checking. */
41374e13
NS
5216 gcc_assert (innermode != VOIDmode);
5217 gcc_assert (outermode != VOIDmode);
5218 gcc_assert (innermode != BLKmode);
5219 gcc_assert (outermode != BLKmode);
eea50aa0 5220
41374e13
NS
5221 gcc_assert (GET_MODE (op) == innermode
5222 || GET_MODE (op) == VOIDmode);
eea50aa0 5223
41374e13
NS
5224 gcc_assert ((byte % GET_MODE_SIZE (outermode)) == 0);
5225 gcc_assert (byte < GET_MODE_SIZE (innermode));
eea50aa0 5226
550d1387
GK
5227 if (outermode == innermode && !byte)
5228 return op;
eea50aa0 5229
481683e1 5230 if (CONST_INT_P (op)
550d1387 5231 || GET_CODE (op) == CONST_DOUBLE
14c931f1 5232 || GET_CODE (op) == CONST_FIXED
550d1387
GK
5233 || GET_CODE (op) == CONST_VECTOR)
5234 return simplify_immed_subreg (outermode, op, innermode, byte);
eea50aa0
JH
5235
5236 /* Changing mode twice with SUBREG => just change it once,
5237 or not at all if changing back op starting mode. */
5238 if (GET_CODE (op) == SUBREG)
5239 {
5240 enum machine_mode innermostmode = GET_MODE (SUBREG_REG (op));
1ffb3f9a 5241 int final_offset = byte + SUBREG_BYTE (op);
53ed1a12 5242 rtx newx;
eea50aa0
JH
5243
5244 if (outermode == innermostmode
5245 && byte == 0 && SUBREG_BYTE (op) == 0)
5246 return SUBREG_REG (op);
5247
1ffb3f9a
JH
5248 /* The SUBREG_BYTE represents offset, as if the value were stored
5249 in memory. Irritating exception is paradoxical subreg, where
5250 we define SUBREG_BYTE to be 0. On big endian machines, this
2d76cb1a 5251 value should be negative. For a moment, undo this exception. */
1ffb3f9a 5252 if (byte == 0 && GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
eea50aa0 5253 {
1ffb3f9a
JH
5254 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
5255 if (WORDS_BIG_ENDIAN)
5256 final_offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
5257 if (BYTES_BIG_ENDIAN)
5258 final_offset += difference % UNITS_PER_WORD;
5259 }
5260 if (SUBREG_BYTE (op) == 0
5261 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
5262 {
5263 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
5264 if (WORDS_BIG_ENDIAN)
5265 final_offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
5266 if (BYTES_BIG_ENDIAN)
5267 final_offset += difference % UNITS_PER_WORD;
5268 }
5269
5270 /* See whether resulting subreg will be paradoxical. */
2fe7bb35 5271 if (GET_MODE_SIZE (innermostmode) > GET_MODE_SIZE (outermode))
1ffb3f9a
JH
5272 {
5273 /* In nonparadoxical subregs we can't handle negative offsets. */
5274 if (final_offset < 0)
5275 return NULL_RTX;
5276 /* Bail out in case resulting subreg would be incorrect. */
5277 if (final_offset % GET_MODE_SIZE (outermode)
ae0ed63a
JM
5278 || (unsigned) final_offset >= GET_MODE_SIZE (innermostmode))
5279 return NULL_RTX;
1ffb3f9a
JH
5280 }
5281 else
5282 {
5283 int offset = 0;
5284 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (outermode));
5285
5286 /* In paradoxical subreg, see if we are still looking on lower part.
5287 If so, our SUBREG_BYTE will be 0. */
5288 if (WORDS_BIG_ENDIAN)
5289 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
5290 if (BYTES_BIG_ENDIAN)
5291 offset += difference % UNITS_PER_WORD;
5292 if (offset == final_offset)
5293 final_offset = 0;
eea50aa0 5294 else
ae0ed63a 5295 return NULL_RTX;
eea50aa0
JH
5296 }
5297
4d6922ee 5298 /* Recurse for further possible simplifications. */
beb72684
RH
5299 newx = simplify_subreg (outermode, SUBREG_REG (op), innermostmode,
5300 final_offset);
53ed1a12
BI
5301 if (newx)
5302 return newx;
beb72684
RH
5303 if (validate_subreg (outermode, innermostmode,
5304 SUBREG_REG (op), final_offset))
4613543f
RS
5305 {
5306 newx = gen_rtx_SUBREG (outermode, SUBREG_REG (op), final_offset);
5307 if (SUBREG_PROMOTED_VAR_P (op)
5308 && SUBREG_PROMOTED_UNSIGNED_P (op) >= 0
5309 && GET_MODE_CLASS (outermode) == MODE_INT
5310 && IN_RANGE (GET_MODE_SIZE (outermode),
5311 GET_MODE_SIZE (innermode),
5312 GET_MODE_SIZE (innermostmode))
5313 && subreg_lowpart_p (newx))
5314 {
5315 SUBREG_PROMOTED_VAR_P (newx) = 1;
5316 SUBREG_PROMOTED_UNSIGNED_SET
5317 (newx, SUBREG_PROMOTED_UNSIGNED_P (op));
5318 }
5319 return newx;
5320 }
beb72684 5321 return NULL_RTX;
eea50aa0
JH
5322 }
5323
de7f492b
AN
5324 /* Merge implicit and explicit truncations. */
5325
5326 if (GET_CODE (op) == TRUNCATE
5327 && GET_MODE_SIZE (outermode) < GET_MODE_SIZE (innermode)
5328 && subreg_lowpart_offset (outermode, innermode) == byte)
5329 return simplify_gen_unary (TRUNCATE, outermode, XEXP (op, 0),
5330 GET_MODE (XEXP (op, 0)));
5331
eea50aa0
JH
5332 /* SUBREG of a hard register => just change the register number
5333 and/or mode. If the hard register is not valid in that mode,
5334 suppress this simplification. If the hard register is the stack,
5335 frame, or argument pointer, leave this as a SUBREG. */
5336
eef302d2 5337 if (REG_P (op) && HARD_REGISTER_P (op))
eea50aa0 5338 {
eef302d2
RS
5339 unsigned int regno, final_regno;
5340
5341 regno = REGNO (op);
5342 final_regno = simplify_subreg_regno (regno, innermode, byte, outermode);
5343 if (HARD_REGISTER_NUM_P (final_regno))
49d801d3 5344 {
dedc1e6d
AO
5345 rtx x;
5346 int final_offset = byte;
5347
5348 /* Adjust offset for paradoxical subregs. */
5349 if (byte == 0
5350 && GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
5351 {
5352 int difference = (GET_MODE_SIZE (innermode)
5353 - GET_MODE_SIZE (outermode));
5354 if (WORDS_BIG_ENDIAN)
5355 final_offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
5356 if (BYTES_BIG_ENDIAN)
5357 final_offset += difference % UNITS_PER_WORD;
5358 }
5359
5360 x = gen_rtx_REG_offset (op, outermode, final_regno, final_offset);
49d801d3
JH
5361
5362 /* Propagate original regno. We don't have any way to specify
14b493d6 5363 the offset inside original regno, so do so only for lowpart.
49d801d3
JH
5364 The information is used only by alias analysis that can not
5365 grog partial register anyway. */
5366
5367 if (subreg_lowpart_offset (outermode, innermode) == byte)
5368 ORIGINAL_REGNO (x) = ORIGINAL_REGNO (op);
5369 return x;
5370 }
eea50aa0
JH
5371 }
5372
5373 /* If we have a SUBREG of a register that we are replacing and we are
5374 replacing it with a MEM, make a new MEM and try replacing the
5375 SUBREG with it. Don't do this if the MEM has a mode-dependent address
5376 or if we would be widening it. */
5377
3c0cb5de 5378 if (MEM_P (op)
eea50aa0 5379 && ! mode_dependent_address_p (XEXP (op, 0))
04864a46
JH
5380 /* Allow splitting of volatile memory references in case we don't
5381 have instruction to move the whole thing. */
5382 && (! MEM_VOLATILE_P (op)
ef89d648 5383 || ! have_insn_for (SET, innermode))
eea50aa0 5384 && GET_MODE_SIZE (outermode) <= GET_MODE_SIZE (GET_MODE (op)))
f1ec5147 5385 return adjust_address_nv (op, outermode, byte);
e5c56fd9
JH
5386
5387 /* Handle complex values represented as CONCAT
5388 of real and imaginary part. */
5389 if (GET_CODE (op) == CONCAT)
5390 {
a957d77f 5391 unsigned int part_size, final_offset;
4f1da2e9
RS
5392 rtx part, res;
5393
a957d77f
RS
5394 part_size = GET_MODE_UNIT_SIZE (GET_MODE (XEXP (op, 0)));
5395 if (byte < part_size)
5396 {
5397 part = XEXP (op, 0);
5398 final_offset = byte;
5399 }
5400 else
5401 {
5402 part = XEXP (op, 1);
5403 final_offset = byte - part_size;
5404 }
5405
5406 if (final_offset + GET_MODE_SIZE (outermode) > part_size)
4f1da2e9 5407 return NULL_RTX;
e5c56fd9 5408
9199d62b
DD
5409 res = simplify_subreg (outermode, part, GET_MODE (part), final_offset);
5410 if (res)
5411 return res;
beb72684 5412 if (validate_subreg (outermode, GET_MODE (part), part, final_offset))
4f1da2e9 5413 return gen_rtx_SUBREG (outermode, part, final_offset);
beb72684 5414 return NULL_RTX;
e5c56fd9
JH
5415 }
5416
bb51e270
RS
5417 /* Optimize SUBREG truncations of zero and sign extended values. */
5418 if ((GET_CODE (op) == ZERO_EXTEND
5419 || GET_CODE (op) == SIGN_EXTEND)
5420 && GET_MODE_BITSIZE (outermode) < GET_MODE_BITSIZE (innermode))
5421 {
5422 unsigned int bitpos = subreg_lsb_1 (outermode, innermode, byte);
5423
5424 /* If we're requesting the lowpart of a zero or sign extension,
5425 there are three possibilities. If the outermode is the same
5426 as the origmode, we can omit both the extension and the subreg.
5427 If the outermode is not larger than the origmode, we can apply
5428 the truncation without the extension. Finally, if the outermode
5429 is larger than the origmode, but both are integer modes, we
5430 can just extend to the appropriate mode. */
5431 if (bitpos == 0)
5432 {
5433 enum machine_mode origmode = GET_MODE (XEXP (op, 0));
5434 if (outermode == origmode)
5435 return XEXP (op, 0);
5436 if (GET_MODE_BITSIZE (outermode) <= GET_MODE_BITSIZE (origmode))
dc4bbaf7
RS
5437 return simplify_gen_subreg (outermode, XEXP (op, 0), origmode,
5438 subreg_lowpart_offset (outermode,
5439 origmode));
bb51e270
RS
5440 if (SCALAR_INT_MODE_P (outermode))
5441 return simplify_gen_unary (GET_CODE (op), outermode,
5442 XEXP (op, 0), origmode);
5443 }
5444
5445 /* A SUBREG resulting from a zero extension may fold to zero if
5446 it extracts higher bits that the ZERO_EXTEND's source bits. */
5447 if (GET_CODE (op) == ZERO_EXTEND
5448 && bitpos >= GET_MODE_BITSIZE (GET_MODE (XEXP (op, 0))))
5449 return CONST0_RTX (outermode);
5450 }
5451
c79fc296
RS
5452 /* Simplify (subreg:QI (lshiftrt:SI (sign_extend:SI (x:QI)) C), 0) into
5453 to (ashiftrt:QI (x:QI) C), where C is a suitable small constant and
5454 the outer subreg is effectively a truncation to the original mode. */
5455 if ((GET_CODE (op) == LSHIFTRT
5456 || GET_CODE (op) == ASHIFTRT)
5457 && SCALAR_INT_MODE_P (outermode)
5458 /* Ensure that OUTERMODE is at least twice as wide as the INNERMODE
5459 to avoid the possibility that an outer LSHIFTRT shifts by more
5460 than the sign extension's sign_bit_copies and introduces zeros
5461 into the high bits of the result. */
5462 && (2 * GET_MODE_BITSIZE (outermode)) <= GET_MODE_BITSIZE (innermode)
481683e1 5463 && CONST_INT_P (XEXP (op, 1))
c79fc296
RS
5464 && GET_CODE (XEXP (op, 0)) == SIGN_EXTEND
5465 && GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode
5466 && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (outermode)
5467 && subreg_lsb_1 (outermode, innermode, byte) == 0)
5468 return simplify_gen_binary (ASHIFTRT, outermode,
5469 XEXP (XEXP (op, 0), 0), XEXP (op, 1));
5470
5471 /* Likewise (subreg:QI (lshiftrt:SI (zero_extend:SI (x:QI)) C), 0) into
5472 to (lshiftrt:QI (x:QI) C), where C is a suitable small constant and
5473 the outer subreg is effectively a truncation to the original mode. */
5474 if ((GET_CODE (op) == LSHIFTRT
5475 || GET_CODE (op) == ASHIFTRT)
5476 && SCALAR_INT_MODE_P (outermode)
5477 && GET_MODE_BITSIZE (outermode) < GET_MODE_BITSIZE (innermode)
481683e1 5478 && CONST_INT_P (XEXP (op, 1))
c79fc296
RS
5479 && GET_CODE (XEXP (op, 0)) == ZERO_EXTEND
5480 && GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode
5481 && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (outermode)
5482 && subreg_lsb_1 (outermode, innermode, byte) == 0)
5483 return simplify_gen_binary (LSHIFTRT, outermode,
5484 XEXP (XEXP (op, 0), 0), XEXP (op, 1));
5485
5486 /* Likewise (subreg:QI (ashift:SI (zero_extend:SI (x:QI)) C), 0) into
5487 to (ashift:QI (x:QI) C), where C is a suitable small constant and
5488 the outer subreg is effectively a truncation to the original mode. */
5489 if (GET_CODE (op) == ASHIFT
5490 && SCALAR_INT_MODE_P (outermode)
5491 && GET_MODE_BITSIZE (outermode) < GET_MODE_BITSIZE (innermode)
481683e1 5492 && CONST_INT_P (XEXP (op, 1))
c79fc296
RS
5493 && (GET_CODE (XEXP (op, 0)) == ZERO_EXTEND
5494 || GET_CODE (XEXP (op, 0)) == SIGN_EXTEND)
5495 && GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode
5496 && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (outermode)
5497 && subreg_lsb_1 (outermode, innermode, byte) == 0)
5498 return simplify_gen_binary (ASHIFT, outermode,
5499 XEXP (XEXP (op, 0), 0), XEXP (op, 1));
5500
97efb03a
PB
5501 /* Recognize a word extraction from a multi-word subreg. */
5502 if ((GET_CODE (op) == LSHIFTRT
5503 || GET_CODE (op) == ASHIFTRT)
5504 && SCALAR_INT_MODE_P (outermode)
5505 && GET_MODE_BITSIZE (outermode) >= BITS_PER_WORD
5506 && GET_MODE_BITSIZE (innermode) >= (2 * GET_MODE_BITSIZE (outermode))
481683e1 5507 && CONST_INT_P (XEXP (op, 1))
97efb03a 5508 && (INTVAL (XEXP (op, 1)) & (GET_MODE_BITSIZE (outermode) - 1)) == 0
2b3b22d3 5509 && INTVAL (XEXP (op, 1)) >= 0
b8698a0f 5510 && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (innermode)
97efb03a
PB
5511 && byte == subreg_lowpart_offset (outermode, innermode))
5512 {
5513 int shifted_bytes = INTVAL (XEXP (op, 1)) / BITS_PER_UNIT;
5514 return simplify_gen_subreg (outermode, XEXP (op, 0), innermode,
5515 (WORDS_BIG_ENDIAN
2b3b22d3
UB
5516 ? byte - shifted_bytes
5517 : byte + shifted_bytes));
97efb03a
PB
5518 }
5519
509dd380
JJ
5520 /* If we have a lowpart SUBREG of a right shift of MEM, make a new MEM
5521 and try replacing the SUBREG and shift with it. Don't do this if
5522 the MEM has a mode-dependent address or if we would be widening it. */
5523
5524 if ((GET_CODE (op) == LSHIFTRT
5525 || GET_CODE (op) == ASHIFTRT)
5526 && MEM_P (XEXP (op, 0))
5527 && CONST_INT_P (XEXP (op, 1))
5528 && GET_MODE_SIZE (outermode) < GET_MODE_SIZE (GET_MODE (op))
5529 && (INTVAL (XEXP (op, 1)) % GET_MODE_BITSIZE (outermode)) == 0
5530 && INTVAL (XEXP (op, 1)) > 0
5531 && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (innermode)
5532 && ! mode_dependent_address_p (XEXP (XEXP (op, 0), 0))
5533 && ! MEM_VOLATILE_P (XEXP (op, 0))
5534 && byte == subreg_lowpart_offset (outermode, innermode)
5535 && (GET_MODE_SIZE (outermode) >= UNITS_PER_WORD
5536 || WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN))
5537 {
5538 int shifted_bytes = INTVAL (XEXP (op, 1)) / BITS_PER_UNIT;
5539 return adjust_address_nv (XEXP (op, 0), outermode,
5540 (WORDS_BIG_ENDIAN
5541 ? byte - shifted_bytes
5542 : byte + shifted_bytes));
5543 }
5544
eea50aa0
JH
5545 return NULL_RTX;
5546}
550d1387 5547
949c5d62
JH
5548/* Make a SUBREG operation or equivalent if it folds. */
5549
5550rtx
46c5ad27
AJ
5551simplify_gen_subreg (enum machine_mode outermode, rtx op,
5552 enum machine_mode innermode, unsigned int byte)
949c5d62 5553{
53ed1a12 5554 rtx newx;
949c5d62 5555
53ed1a12
BI
5556 newx = simplify_subreg (outermode, op, innermode, byte);
5557 if (newx)
5558 return newx;
949c5d62 5559
4f1da2e9
RS
5560 if (GET_CODE (op) == SUBREG
5561 || GET_CODE (op) == CONCAT
5562 || GET_MODE (op) == VOIDmode)
949c5d62
JH
5563 return NULL_RTX;
5564
beb72684
RH
5565 if (validate_subreg (outermode, innermode, op, byte))
5566 return gen_rtx_SUBREG (outermode, op, byte);
5567
5568 return NULL_RTX;
949c5d62 5569}
beb72684 5570
0cedb36c
JL
5571/* Simplify X, an rtx expression.
5572
5573 Return the simplified expression or NULL if no simplifications
5574 were possible.
5575
5576 This is the preferred entry point into the simplification routines;
5577 however, we still allow passes to call the more specific routines.
5578
14b493d6 5579 Right now GCC has three (yes, three) major bodies of RTL simplification
0cedb36c
JL
5580 code that need to be unified.
5581
5582 1. fold_rtx in cse.c. This code uses various CSE specific
5583 information to aid in RTL simplification.
5584
5585 2. simplify_rtx in combine.c. Similar to fold_rtx, except that
5586 it uses combine specific information to aid in RTL
5587 simplification.
5588
5589 3. The routines in this file.
5590
5591
5592 Long term we want to only have one body of simplification code; to
5593 get to that state I recommend the following steps:
5594
5595 1. Pour over fold_rtx & simplify_rtx and move any simplifications
5596 which are not pass dependent state into these routines.
5597
5598 2. As code is moved by #1, change fold_rtx & simplify_rtx to
5599 use this routine whenever possible.
5600
5601 3. Allow for pass dependent state to be provided to these
5602 routines and add simplifications based on the pass dependent
5603 state. Remove code from cse.c & combine.c that becomes
5604 redundant/dead.
5605
5606 It will take time, but ultimately the compiler will be easier to
5607 maintain and improve. It's totally silly that when we add a
5608 simplification that it needs to be added to 4 places (3 for RTL
5609 simplification and 1 for tree simplification. */
786de7eb 5610
0cedb36c 5611rtx
58f9752a 5612simplify_rtx (const_rtx x)
0cedb36c 5613{
58f9752a
KG
5614 const enum rtx_code code = GET_CODE (x);
5615 const enum machine_mode mode = GET_MODE (x);
0cedb36c
JL
5616
5617 switch (GET_RTX_CLASS (code))
5618 {
ec8e098d 5619 case RTX_UNARY:
0cedb36c
JL
5620 return simplify_unary_operation (code, mode,
5621 XEXP (x, 0), GET_MODE (XEXP (x, 0)));
ec8e098d 5622 case RTX_COMM_ARITH:
df0afdbe 5623 if (swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
cf6bcbd0 5624 return simplify_gen_binary (code, mode, XEXP (x, 1), XEXP (x, 0));
b42abad8 5625
2b72593e 5626 /* Fall through.... */
b42abad8 5627
ec8e098d 5628 case RTX_BIN_ARITH:
0cedb36c
JL
5629 return simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
5630
ec8e098d
PB
5631 case RTX_TERNARY:
5632 case RTX_BITFIELD_OPS:
0cedb36c 5633 return simplify_ternary_operation (code, mode, GET_MODE (XEXP (x, 0)),
d9c695ff
RK
5634 XEXP (x, 0), XEXP (x, 1),
5635 XEXP (x, 2));
0cedb36c 5636
ec8e098d
PB
5637 case RTX_COMPARE:
5638 case RTX_COMM_COMPARE:
c6fb08ad
PB
5639 return simplify_relational_operation (code, mode,
5640 ((GET_MODE (XEXP (x, 0))
5641 != VOIDmode)
5642 ? GET_MODE (XEXP (x, 0))
5643 : GET_MODE (XEXP (x, 1))),
5644 XEXP (x, 0),
5645 XEXP (x, 1));
d41ba56f 5646
ec8e098d 5647 case RTX_EXTRA:
949c5d62 5648 if (code == SUBREG)
e2561558
RS
5649 return simplify_subreg (mode, SUBREG_REG (x),
5650 GET_MODE (SUBREG_REG (x)),
5651 SUBREG_BYTE (x));
d41ba56f
RS
5652 break;
5653
ec8e098d 5654 case RTX_OBJ:
d41ba56f
RS
5655 if (code == LO_SUM)
5656 {
5657 /* Convert (lo_sum (high FOO) FOO) to FOO. */
5658 if (GET_CODE (XEXP (x, 0)) == HIGH
5659 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
5660 return XEXP (x, 1);
5661 }
5662 break;
5663
0cedb36c 5664 default:
d41ba56f 5665 break;
0cedb36c 5666 }
d41ba56f 5667 return NULL;
0cedb36c 5668}