]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/fold-const.c
re PR middle-end/42834 (memcpy folding overeager)
[thirdparty/gcc.git] / gcc / fold-const.c
CommitLineData
6d716ca8 1/* Fold a constant sub-tree into a single node for C-compiler
080ea642 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
c75c517d 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
d95787e6 4 Free Software Foundation, Inc.
6d716ca8 5
1322177d 6This file is part of GCC.
6d716ca8 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
9dcd6f09 10Software Foundation; either version 3, or (at your option) any later
1322177d 11version.
6d716ca8 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
6d716ca8
RS
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
6d716ca8 21
6dc42e49 22/*@@ This file should be rewritten to use an arbitrary precision
6d716ca8
RS
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
9589f23e 30/* The entry points in this file are fold, size_int_wide and size_binop.
6d716ca8
RS
31
32 fold takes a tree as argument and returns a simplified tree.
33
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
37
38 size_int takes an integer value, and creates a tree constant
0da6f3db
DE
39 with type from `sizetype'.
40
07beea0d
AH
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
0da6f3db 44
e9a25f70 45#include "config.h"
2fde567e 46#include "system.h"
4977bab6
ZW
47#include "coretypes.h"
48#include "tm.h"
6d716ca8
RS
49#include "flags.h"
50#include "tree.h"
d49b6e1e 51#include "realmpfr.h"
efe3eb65 52#include "rtl.h"
0e9295cf 53#include "expr.h"
6baf1cc8 54#include "tm_p.h"
bd03c084 55#include "target.h"
10f0ad3d 56#include "toplev.h"
6ac01510 57#include "intl.h"
a3770a81 58#include "ggc.h"
4c160717 59#include "hashtab.h"
43577e6b 60#include "langhooks.h"
5dfa45d0 61#include "md5.h"
726a989a 62#include "gimple.h"
70f34814 63#include "tree-flow.h"
6d716ca8 64
110abdbc 65/* Nonzero if we are folding constants inside an initializer; zero
63b48197
MS
66 otherwise. */
67int folding_initializer = 0;
68
d1a7edaf
PB
69/* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72enum comparison_code {
73 COMPCODE_FALSE = 0,
74 COMPCODE_LT = 1,
75 COMPCODE_EQ = 2,
76 COMPCODE_LE = 3,
77 COMPCODE_GT = 4,
78 COMPCODE_LTGT = 5,
79 COMPCODE_GE = 6,
80 COMPCODE_ORD = 7,
81 COMPCODE_UNORD = 8,
82 COMPCODE_UNLT = 9,
83 COMPCODE_UNEQ = 10,
84 COMPCODE_UNLE = 11,
85 COMPCODE_UNGT = 12,
86 COMPCODE_NE = 13,
87 COMPCODE_UNGE = 14,
88 COMPCODE_TRUE = 15
89};
90
05d362b8 91static bool negate_mathfn_p (enum built_in_function);
fa8db1f7
AJ
92static bool negate_expr_p (tree);
93static tree negate_expr (tree);
94static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
db3927fb 95static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
43a5d30b 96static tree const_binop (enum tree_code, tree, tree);
d1a7edaf
PB
97static enum comparison_code comparison_to_compcode (enum tree_code);
98static enum tree_code compcode_to_comparison (enum comparison_code);
fa8db1f7
AJ
99static int operand_equal_for_comparison_p (tree, tree, tree);
100static int twoval_comparison_p (tree, tree *, tree *, int *);
db3927fb
AH
101static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106static tree optimize_bit_field_compare (location_t, enum tree_code,
107 tree, tree, tree);
108static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
109 HOST_WIDE_INT *,
fa8db1f7
AJ
110 enum machine_mode *, int *, int *,
111 tree *, tree *);
45dc13b9 112static int all_ones_mask_p (const_tree, int);
ac545c64
KG
113static tree sign_bit_p (tree, const_tree);
114static int simple_operand_p (const_tree);
fa8db1f7 115static tree range_binop (enum tree_code, tree, tree, int, tree, int);
f8fe0545
EB
116static tree range_predecessor (tree);
117static tree range_successor (tree);
a243fb4a 118extern tree make_range (tree, int *, tree *, tree *, bool *);
a243fb4a
MLI
119extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
120 tree, tree);
db3927fb
AH
121static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
122static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
fa8db1f7 123static tree unextend (tree, int, int, tree);
db3927fb
AH
124static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
125static tree optimize_minmax_comparison (location_t, enum tree_code,
126 tree, tree, tree);
6ac01510
ILT
127static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
128static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
db3927fb
AH
129static tree fold_binary_op_with_conditional_arg (location_t,
130 enum tree_code, tree,
e9da788c 131 tree, tree,
3b70b82a 132 tree, tree, int);
db3927fb
AH
133static tree fold_mathfn_compare (location_t,
134 enum built_in_function, enum tree_code,
fa8db1f7 135 tree, tree, tree);
db3927fb
AH
136static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
137static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
ac545c64 138static bool reorder_operands_p (const_tree, const_tree);
33d13fac 139static tree fold_negate_const (tree, tree);
9589f23e 140static tree fold_not_const (const_tree, tree);
8e7b3a43 141static tree fold_relational_const (enum tree_code, tree, tree, tree);
d1d1c602 142static tree fold_convert_const (enum tree_code, tree, tree);
78bf6e2f 143
33d13fac 144
d4b60170
RK
145/* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
146 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
147 and SUM1. Then this yields nonzero if overflow occurred during the
148 addition.
149
150 Overflow occurs if A and B have the same sign, but A and SUM differ in
151 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
152 sign. */
153#define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
6d716ca8 154\f
03b0db0a
RG
155/* If ARG2 divides ARG1 with zero remainder, carries out the division
156 of type CODE and returns the quotient.
157 Otherwise returns NULL_TREE. */
158
108f6c2f 159tree
ac545c64 160div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
03b0db0a 161{
2bd1333d 162 double_int quo, rem;
793e86a7
RG
163 int uns;
164
165 /* The sign of the division is according to operand two, that
166 does the correct thing for POINTER_PLUS_EXPR where we want
167 a signed division. */
168 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
169 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
170 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
171 uns = false;
03b0db0a 172
2bd1333d
AS
173 quo = double_int_divmod (tree_to_double_int (arg1),
174 tree_to_double_int (arg2),
175 uns, code, &rem);
03b0db0a 176
2bd1333d
AS
177 if (double_int_zero_p (rem))
178 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
03b0db0a 179
2bd1333d 180 return NULL_TREE;
03b0db0a 181}
6d716ca8 182\f
110abdbc 183/* This is nonzero if we should defer warnings about undefined
6ac01510
ILT
184 overflow. This facility exists because these warnings are a
185 special case. The code to estimate loop iterations does not want
186 to issue any warnings, since it works with expressions which do not
187 occur in user code. Various bits of cleanup code call fold(), but
188 only use the result if it has certain characteristics (e.g., is a
189 constant); that code only wants to issue a warning if the result is
190 used. */
191
192static int fold_deferring_overflow_warnings;
193
194/* If a warning about undefined overflow is deferred, this is the
195 warning. Note that this may cause us to turn two warnings into
196 one, but that is fine since it is sufficient to only give one
197 warning per expression. */
198
199static const char* fold_deferred_overflow_warning;
200
201/* If a warning about undefined overflow is deferred, this is the
202 level at which the warning should be emitted. */
203
204static enum warn_strict_overflow_code fold_deferred_overflow_code;
205
206/* Start deferring overflow warnings. We could use a stack here to
207 permit nested calls, but at present it is not necessary. */
208
209void
210fold_defer_overflow_warnings (void)
211{
212 ++fold_deferring_overflow_warnings;
213}
214
215/* Stop deferring overflow warnings. If there is a pending warning,
216 and ISSUE is true, then issue the warning if appropriate. STMT is
217 the statement with which the warning should be associated (used for
218 location information); STMT may be NULL. CODE is the level of the
219 warning--a warn_strict_overflow_code value. This function will use
220 the smaller of CODE and the deferred code when deciding whether to
221 issue the warning. CODE may be zero to mean to always use the
222 deferred code. */
223
224void
726a989a 225fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
6ac01510
ILT
226{
227 const char *warnmsg;
228 location_t locus;
229
230 gcc_assert (fold_deferring_overflow_warnings > 0);
231 --fold_deferring_overflow_warnings;
232 if (fold_deferring_overflow_warnings > 0)
233 {
234 if (fold_deferred_overflow_warning != NULL
235 && code != 0
236 && code < (int) fold_deferred_overflow_code)
32e8bb8e 237 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
6ac01510
ILT
238 return;
239 }
240
241 warnmsg = fold_deferred_overflow_warning;
242 fold_deferred_overflow_warning = NULL;
243
244 if (!issue || warnmsg == NULL)
245 return;
246
726a989a 247 if (gimple_no_warning_p (stmt))
e233ac97
ILT
248 return;
249
6ac01510
ILT
250 /* Use the smallest code level when deciding to issue the
251 warning. */
252 if (code == 0 || code > (int) fold_deferred_overflow_code)
253 code = fold_deferred_overflow_code;
254
255 if (!issue_strict_overflow_warning (code))
256 return;
257
726a989a 258 if (stmt == NULL)
6ac01510
ILT
259 locus = input_location;
260 else
726a989a 261 locus = gimple_location (stmt);
fab922b1 262 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
6ac01510
ILT
263}
264
265/* Stop deferring overflow warnings, ignoring any deferred
266 warnings. */
267
268void
269fold_undefer_and_ignore_overflow_warnings (void)
270{
726a989a 271 fold_undefer_overflow_warnings (false, NULL, 0);
6ac01510
ILT
272}
273
274/* Whether we are deferring overflow warnings. */
275
276bool
277fold_deferring_overflow_warnings_p (void)
278{
279 return fold_deferring_overflow_warnings > 0;
280}
281
282/* This is called when we fold something based on the fact that signed
283 overflow is undefined. */
284
285static void
286fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
287{
6ac01510
ILT
288 if (fold_deferring_overflow_warnings > 0)
289 {
290 if (fold_deferred_overflow_warning == NULL
291 || wc < fold_deferred_overflow_code)
292 {
293 fold_deferred_overflow_warning = gmsgid;
294 fold_deferred_overflow_code = wc;
295 }
296 }
297 else if (issue_strict_overflow_warning (wc))
298 warning (OPT_Wstrict_overflow, gmsgid);
299}
300\f
dd6f2a43
VR
301/* Return true if the built-in mathematical function specified by CODE
302 is odd, i.e. -f(x) == f(-x). */
05d362b8
RS
303
304static bool
305negate_mathfn_p (enum built_in_function code)
306{
307 switch (code)
308 {
ea6a6627
VR
309 CASE_FLT_FN (BUILT_IN_ASIN):
310 CASE_FLT_FN (BUILT_IN_ASINH):
311 CASE_FLT_FN (BUILT_IN_ATAN):
312 CASE_FLT_FN (BUILT_IN_ATANH):
4b26d10b
KG
313 CASE_FLT_FN (BUILT_IN_CASIN):
314 CASE_FLT_FN (BUILT_IN_CASINH):
315 CASE_FLT_FN (BUILT_IN_CATAN):
316 CASE_FLT_FN (BUILT_IN_CATANH):
ea6a6627 317 CASE_FLT_FN (BUILT_IN_CBRT):
4b26d10b
KG
318 CASE_FLT_FN (BUILT_IN_CPROJ):
319 CASE_FLT_FN (BUILT_IN_CSIN):
320 CASE_FLT_FN (BUILT_IN_CSINH):
321 CASE_FLT_FN (BUILT_IN_CTAN):
322 CASE_FLT_FN (BUILT_IN_CTANH):
5c5b2155
KG
323 CASE_FLT_FN (BUILT_IN_ERF):
324 CASE_FLT_FN (BUILT_IN_LLROUND):
325 CASE_FLT_FN (BUILT_IN_LROUND):
326 CASE_FLT_FN (BUILT_IN_ROUND):
ea6a6627
VR
327 CASE_FLT_FN (BUILT_IN_SIN):
328 CASE_FLT_FN (BUILT_IN_SINH):
329 CASE_FLT_FN (BUILT_IN_TAN):
330 CASE_FLT_FN (BUILT_IN_TANH):
5c5b2155 331 CASE_FLT_FN (BUILT_IN_TRUNC):
05d362b8
RS
332 return true;
333
5c5b2155
KG
334 CASE_FLT_FN (BUILT_IN_LLRINT):
335 CASE_FLT_FN (BUILT_IN_LRINT):
336 CASE_FLT_FN (BUILT_IN_NEARBYINT):
337 CASE_FLT_FN (BUILT_IN_RINT):
338 return !flag_rounding_math;
b8698a0f 339
05d362b8
RS
340 default:
341 break;
342 }
343 return false;
344}
345
82b85a85
ZD
346/* Check whether we may negate an integer constant T without causing
347 overflow. */
348
349bool
fa233e34 350may_negate_without_overflow_p (const_tree t)
82b85a85
ZD
351{
352 unsigned HOST_WIDE_INT val;
353 unsigned int prec;
354 tree type;
355
0bccc606 356 gcc_assert (TREE_CODE (t) == INTEGER_CST);
82b85a85
ZD
357
358 type = TREE_TYPE (t);
359 if (TYPE_UNSIGNED (type))
360 return false;
361
362 prec = TYPE_PRECISION (type);
363 if (prec > HOST_BITS_PER_WIDE_INT)
364 {
365 if (TREE_INT_CST_LOW (t) != 0)
366 return true;
367 prec -= HOST_BITS_PER_WIDE_INT;
368 val = TREE_INT_CST_HIGH (t);
369 }
370 else
371 val = TREE_INT_CST_LOW (t);
372 if (prec < HOST_BITS_PER_WIDE_INT)
373 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
374 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
375}
376
080ea642 377/* Determine whether an expression T can be cheaply negated using
1af8dcbf 378 the function negate_expr without introducing undefined overflow. */
080ea642
RS
379
380static bool
fa8db1f7 381negate_expr_p (tree t)
080ea642 382{
080ea642
RS
383 tree type;
384
385 if (t == 0)
386 return false;
387
388 type = TREE_TYPE (t);
389
390 STRIP_SIGN_NOPS (t);
391 switch (TREE_CODE (t))
392 {
393 case INTEGER_CST:
eeef0e45 394 if (TYPE_OVERFLOW_WRAPS (type))
05d362b8 395 return true;
080ea642
RS
396
397 /* Check that -CST will not overflow type. */
82b85a85 398 return may_negate_without_overflow_p (t);
189d4130 399 case BIT_NOT_EXPR:
eeef0e45
ILT
400 return (INTEGRAL_TYPE_P (type)
401 && TYPE_OVERFLOW_WRAPS (type));
080ea642 402
325217ed 403 case FIXED_CST:
080ea642 404 case NEGATE_EXPR:
080ea642
RS
405 return true;
406
4e62a017
RG
407 case REAL_CST:
408 /* We want to canonicalize to positive real constants. Pretend
409 that only negative ones can be easily negated. */
410 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
411
05d362b8
RS
412 case COMPLEX_CST:
413 return negate_expr_p (TREE_REALPART (t))
414 && negate_expr_p (TREE_IMAGPART (t));
415
1aeef526
KG
416 case COMPLEX_EXPR:
417 return negate_expr_p (TREE_OPERAND (t, 0))
418 && negate_expr_p (TREE_OPERAND (t, 1));
419
8fbbe90b
KG
420 case CONJ_EXPR:
421 return negate_expr_p (TREE_OPERAND (t, 0));
422
dfb36f9b 423 case PLUS_EXPR:
1b43b967
RS
424 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
425 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
dfb36f9b
RS
426 return false;
427 /* -(A + B) -> (-B) - A. */
428 if (negate_expr_p (TREE_OPERAND (t, 1))
429 && reorder_operands_p (TREE_OPERAND (t, 0),
430 TREE_OPERAND (t, 1)))
431 return true;
432 /* -(A + B) -> (-A) - B. */
433 return negate_expr_p (TREE_OPERAND (t, 0));
434
02a1994c
RS
435 case MINUS_EXPR:
436 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1b43b967
RS
437 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
438 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
05d362b8
RS
439 && reorder_operands_p (TREE_OPERAND (t, 0),
440 TREE_OPERAND (t, 1));
02a1994c 441
8ab49fef 442 case MULT_EXPR:
8df83eae 443 if (TYPE_UNSIGNED (TREE_TYPE (t)))
8ab49fef
RS
444 break;
445
446 /* Fall through. */
447
448 case RDIV_EXPR:
449 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
450 return negate_expr_p (TREE_OPERAND (t, 1))
451 || negate_expr_p (TREE_OPERAND (t, 0));
452 break;
453
965d7fa4
AP
454 case TRUNC_DIV_EXPR:
455 case ROUND_DIV_EXPR:
456 case FLOOR_DIV_EXPR:
457 case CEIL_DIV_EXPR:
458 case EXACT_DIV_EXPR:
6ac01510
ILT
459 /* In general we can't negate A / B, because if A is INT_MIN and
460 B is 1, we may turn this into INT_MIN / -1 which is undefined
461 and actually traps on some architectures. But if overflow is
462 undefined, we can negate, because - (INT_MIN / 1) is an
463 overflow. */
eeef0e45
ILT
464 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
465 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
965d7fa4
AP
466 break;
467 return negate_expr_p (TREE_OPERAND (t, 1))
468 || negate_expr_p (TREE_OPERAND (t, 0));
469
05d362b8
RS
470 case NOP_EXPR:
471 /* Negate -((double)float) as (double)(-float). */
472 if (TREE_CODE (type) == REAL_TYPE)
473 {
474 tree tem = strip_float_extensions (t);
475 if (tem != t)
476 return negate_expr_p (tem);
477 }
478 break;
479
480 case CALL_EXPR:
481 /* Negate -f(x) as f(-x). */
482 if (negate_mathfn_p (builtin_mathfn_code (t)))
5039610b 483 return negate_expr_p (CALL_EXPR_ARG (t, 0));
05d362b8
RS
484 break;
485
239a625e
RS
486 case RSHIFT_EXPR:
487 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
488 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
489 {
490 tree op1 = TREE_OPERAND (t, 1);
491 if (TREE_INT_CST_HIGH (op1) == 0
492 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
493 == TREE_INT_CST_LOW (op1))
494 return true;
495 }
496 break;
497
080ea642
RS
498 default:
499 break;
500 }
501 return false;
502}
503
1af8dcbf
RG
504/* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
505 simplification is possible.
506 If negate_expr_p would return true for T, NULL_TREE will never be
507 returned. */
6d716ca8 508
1baa375f 509static tree
db3927fb 510fold_negate_expr (location_t loc, tree t)
1baa375f 511{
1af8dcbf 512 tree type = TREE_TYPE (t);
1baa375f
RK
513 tree tem;
514
1baa375f
RK
515 switch (TREE_CODE (t))
516 {
189d4130
AP
517 /* Convert - (~A) to A + 1. */
518 case BIT_NOT_EXPR:
1af8dcbf 519 if (INTEGRAL_TYPE_P (type))
db3927fb 520 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
189d4130 521 build_int_cst (type, 1));
8bce9e98 522 break;
b8698a0f 523
1baa375f 524 case INTEGER_CST:
33d13fac 525 tem = fold_negate_const (t, type);
ee7d8048 526 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
eeef0e45 527 || !TYPE_OVERFLOW_TRAPS (type))
1baa375f
RK
528 return tem;
529 break;
530
8ab49fef 531 case REAL_CST:
33d13fac 532 tem = fold_negate_const (t, type);
8ab49fef 533 /* Two's complement FP formats, such as c4x, may overflow. */
455f14dd 534 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1af8dcbf 535 return tem;
8ab49fef
RS
536 break;
537
325217ed
CF
538 case FIXED_CST:
539 tem = fold_negate_const (t, type);
540 return tem;
541
05d362b8
RS
542 case COMPLEX_CST:
543 {
544 tree rpart = negate_expr (TREE_REALPART (t));
545 tree ipart = negate_expr (TREE_IMAGPART (t));
546
547 if ((TREE_CODE (rpart) == REAL_CST
548 && TREE_CODE (ipart) == REAL_CST)
549 || (TREE_CODE (rpart) == INTEGER_CST
550 && TREE_CODE (ipart) == INTEGER_CST))
551 return build_complex (type, rpart, ipart);
552 }
553 break;
554
1aeef526
KG
555 case COMPLEX_EXPR:
556 if (negate_expr_p (t))
db3927fb
AH
557 return fold_build2_loc (loc, COMPLEX_EXPR, type,
558 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
559 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
1aeef526 560 break;
b8698a0f 561
8fbbe90b
KG
562 case CONJ_EXPR:
563 if (negate_expr_p (t))
db3927fb
AH
564 return fold_build1_loc (loc, CONJ_EXPR, type,
565 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
8fbbe90b
KG
566 break;
567
1baa375f 568 case NEGATE_EXPR:
1af8dcbf 569 return TREE_OPERAND (t, 0);
1baa375f 570
dfb36f9b 571 case PLUS_EXPR:
1b43b967
RS
572 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
573 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
dfb36f9b
RS
574 {
575 /* -(A + B) -> (-B) - A. */
576 if (negate_expr_p (TREE_OPERAND (t, 1))
577 && reorder_operands_p (TREE_OPERAND (t, 0),
578 TREE_OPERAND (t, 1)))
59ce6d6b
RS
579 {
580 tem = negate_expr (TREE_OPERAND (t, 1));
db3927fb 581 return fold_build2_loc (loc, MINUS_EXPR, type,
1af8dcbf 582 tem, TREE_OPERAND (t, 0));
59ce6d6b
RS
583 }
584
dfb36f9b
RS
585 /* -(A + B) -> (-A) - B. */
586 if (negate_expr_p (TREE_OPERAND (t, 0)))
59ce6d6b
RS
587 {
588 tem = negate_expr (TREE_OPERAND (t, 0));
db3927fb 589 return fold_build2_loc (loc, MINUS_EXPR, type,
1af8dcbf 590 tem, TREE_OPERAND (t, 1));
59ce6d6b 591 }
dfb36f9b
RS
592 }
593 break;
594
1baa375f
RK
595 case MINUS_EXPR:
596 /* - (A - B) -> B - A */
1b43b967
RS
597 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
598 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
05d362b8 599 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
db3927fb 600 return fold_build2_loc (loc, MINUS_EXPR, type,
1af8dcbf 601 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1baa375f
RK
602 break;
603
8ab49fef 604 case MULT_EXPR:
1af8dcbf 605 if (TYPE_UNSIGNED (type))
8ab49fef
RS
606 break;
607
608 /* Fall through. */
609
610 case RDIV_EXPR:
1af8dcbf 611 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
8ab49fef
RS
612 {
613 tem = TREE_OPERAND (t, 1);
614 if (negate_expr_p (tem))
db3927fb 615 return fold_build2_loc (loc, TREE_CODE (t), type,
1af8dcbf 616 TREE_OPERAND (t, 0), negate_expr (tem));
8ab49fef
RS
617 tem = TREE_OPERAND (t, 0);
618 if (negate_expr_p (tem))
db3927fb 619 return fold_build2_loc (loc, TREE_CODE (t), type,
1af8dcbf 620 negate_expr (tem), TREE_OPERAND (t, 1));
8ab49fef
RS
621 }
622 break;
623
965d7fa4
AP
624 case TRUNC_DIV_EXPR:
625 case ROUND_DIV_EXPR:
626 case FLOOR_DIV_EXPR:
627 case CEIL_DIV_EXPR:
628 case EXACT_DIV_EXPR:
6ac01510
ILT
629 /* In general we can't negate A / B, because if A is INT_MIN and
630 B is 1, we may turn this into INT_MIN / -1 which is undefined
631 and actually traps on some architectures. But if overflow is
632 undefined, we can negate, because - (INT_MIN / 1) is an
633 overflow. */
eeef0e45 634 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
965d7fa4 635 {
6ac01510
ILT
636 const char * const warnmsg = G_("assuming signed overflow does not "
637 "occur when negating a division");
965d7fa4
AP
638 tem = TREE_OPERAND (t, 1);
639 if (negate_expr_p (tem))
6ac01510
ILT
640 {
641 if (INTEGRAL_TYPE_P (type)
642 && (TREE_CODE (tem) != INTEGER_CST
643 || integer_onep (tem)))
644 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
db3927fb 645 return fold_build2_loc (loc, TREE_CODE (t), type,
6ac01510
ILT
646 TREE_OPERAND (t, 0), negate_expr (tem));
647 }
965d7fa4
AP
648 tem = TREE_OPERAND (t, 0);
649 if (negate_expr_p (tem))
6ac01510
ILT
650 {
651 if (INTEGRAL_TYPE_P (type)
652 && (TREE_CODE (tem) != INTEGER_CST
653 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
654 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
db3927fb 655 return fold_build2_loc (loc, TREE_CODE (t), type,
6ac01510
ILT
656 negate_expr (tem), TREE_OPERAND (t, 1));
657 }
965d7fa4
AP
658 }
659 break;
660
05d362b8
RS
661 case NOP_EXPR:
662 /* Convert -((double)float) into (double)(-float). */
663 if (TREE_CODE (type) == REAL_TYPE)
664 {
665 tem = strip_float_extensions (t);
666 if (tem != t && negate_expr_p (tem))
db3927fb 667 return fold_convert_loc (loc, type, negate_expr (tem));
05d362b8
RS
668 }
669 break;
670
671 case CALL_EXPR:
672 /* Negate -f(x) as f(-x). */
673 if (negate_mathfn_p (builtin_mathfn_code (t))
5039610b 674 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
05d362b8 675 {
5039610b 676 tree fndecl, arg;
05d362b8
RS
677
678 fndecl = get_callee_fndecl (t);
5039610b 679 arg = negate_expr (CALL_EXPR_ARG (t, 0));
db3927fb 680 return build_call_expr_loc (loc, fndecl, 1, arg);
05d362b8
RS
681 }
682 break;
683
239a625e
RS
684 case RSHIFT_EXPR:
685 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
686 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
687 {
688 tree op1 = TREE_OPERAND (t, 1);
689 if (TREE_INT_CST_HIGH (op1) == 0
690 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
691 == TREE_INT_CST_LOW (op1))
692 {
8df83eae 693 tree ntype = TYPE_UNSIGNED (type)
12753674 694 ? signed_type_for (type)
ca5ba2a3 695 : unsigned_type_for (type);
db3927fb
AH
696 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
697 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
698 return fold_convert_loc (loc, type, temp);
239a625e
RS
699 }
700 }
701 break;
702
1baa375f
RK
703 default:
704 break;
705 }
706
1af8dcbf
RG
707 return NULL_TREE;
708}
709
710/* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
711 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
712 return NULL_TREE. */
713
714static tree
715negate_expr (tree t)
716{
717 tree type, tem;
db3927fb 718 location_t loc;
1af8dcbf
RG
719
720 if (t == NULL_TREE)
721 return NULL_TREE;
722
db3927fb 723 loc = EXPR_LOCATION (t);
1af8dcbf
RG
724 type = TREE_TYPE (t);
725 STRIP_SIGN_NOPS (t);
726
db3927fb 727 tem = fold_negate_expr (loc, t);
1af8dcbf 728 if (!tem)
db3927fb
AH
729 {
730 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
731 SET_EXPR_LOCATION (tem, loc);
732 }
733 return fold_convert_loc (loc, type, tem);
1baa375f
RK
734}
735\f
736/* Split a tree IN into a constant, literal and variable parts that could be
737 combined with CODE to make IN. "constant" means an expression with
738 TREE_CONSTANT but that isn't an actual constant. CODE must be a
739 commutative arithmetic operation. Store the constant part into *CONP,
cff27795 740 the literal in *LITP and return the variable part. If a part isn't
1baa375f
RK
741 present, set it to null. If the tree does not decompose in this way,
742 return the entire tree as the variable part and the other parts as null.
743
744 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
cff27795
EB
745 case, we negate an operand that was subtracted. Except if it is a
746 literal for which we use *MINUS_LITP instead.
747
748 If NEGATE_P is true, we are negating all of IN, again except a literal
749 for which we use *MINUS_LITP instead.
1baa375f
RK
750
751 If IN is itself a literal or constant, return it as appropriate.
752
753 Note that we do not guarantee that any of the three values will be the
754 same type as IN, but they will have the same signedness and mode. */
755
756static tree
75040a04
AJ
757split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
758 tree *minus_litp, int negate_p)
6d716ca8 759{
1baa375f
RK
760 tree var = 0;
761
6d716ca8 762 *conp = 0;
1baa375f 763 *litp = 0;
cff27795 764 *minus_litp = 0;
1baa375f 765
30f7a378 766 /* Strip any conversions that don't change the machine mode or signedness. */
1baa375f
RK
767 STRIP_SIGN_NOPS (in);
768
325217ed
CF
769 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
770 || TREE_CODE (in) == FIXED_CST)
1baa375f 771 *litp = in;
1baa375f 772 else if (TREE_CODE (in) == code
41bb1f06 773 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
325217ed 774 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1baa375f
RK
775 /* We can associate addition and subtraction together (even
776 though the C standard doesn't say so) for integers because
777 the value is not affected. For reals, the value might be
778 affected, so we can't. */
779 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
780 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
781 {
782 tree op0 = TREE_OPERAND (in, 0);
783 tree op1 = TREE_OPERAND (in, 1);
784 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
785 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
786
787 /* First see if either of the operands is a literal, then a constant. */
325217ed
CF
788 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
789 || TREE_CODE (op0) == FIXED_CST)
1baa375f 790 *litp = op0, op0 = 0;
325217ed
CF
791 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
792 || TREE_CODE (op1) == FIXED_CST)
1baa375f
RK
793 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
794
795 if (op0 != 0 && TREE_CONSTANT (op0))
796 *conp = op0, op0 = 0;
797 else if (op1 != 0 && TREE_CONSTANT (op1))
798 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
799
800 /* If we haven't dealt with either operand, this is not a case we can
30f7a378 801 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1baa375f
RK
802 if (op0 != 0 && op1 != 0)
803 var = in;
804 else if (op0 != 0)
805 var = op0;
806 else
807 var = op1, neg_var_p = neg1_p;
6d716ca8 808
1baa375f 809 /* Now do any needed negations. */
cff27795
EB
810 if (neg_litp_p)
811 *minus_litp = *litp, *litp = 0;
812 if (neg_conp_p)
813 *conp = negate_expr (*conp);
814 if (neg_var_p)
815 var = negate_expr (var);
1baa375f 816 }
1796dff4
RH
817 else if (TREE_CONSTANT (in))
818 *conp = in;
1baa375f
RK
819 else
820 var = in;
821
822 if (negate_p)
6d716ca8 823 {
cff27795
EB
824 if (*litp)
825 *minus_litp = *litp, *litp = 0;
826 else if (*minus_litp)
827 *litp = *minus_litp, *minus_litp = 0;
1baa375f 828 *conp = negate_expr (*conp);
cff27795 829 var = negate_expr (var);
6d716ca8 830 }
1baa375f
RK
831
832 return var;
833}
834
db3927fb
AH
835/* Re-associate trees split by the above function. T1 and T2 are
836 either expressions to associate or null. Return the new
837 expression, if any. LOC is the location of the new expression. If
cff27795 838 we build an operation, do it in TYPE and with CODE. */
1baa375f
RK
839
840static tree
db3927fb 841associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
1baa375f 842{
db3927fb
AH
843 tree tem;
844
1baa375f
RK
845 if (t1 == 0)
846 return t2;
847 else if (t2 == 0)
848 return t1;
849
1baa375f
RK
850 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
851 try to fold this since we will have infinite recursion. But do
852 deal with any NEGATE_EXPRs. */
853 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
854 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
855 {
1bed5ee3
JJ
856 if (code == PLUS_EXPR)
857 {
858 if (TREE_CODE (t1) == NEGATE_EXPR)
db3927fb
AH
859 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2),
860 fold_convert_loc (loc, type, TREE_OPERAND (t1, 0)));
1bed5ee3 861 else if (TREE_CODE (t2) == NEGATE_EXPR)
db3927fb
AH
862 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1),
863 fold_convert_loc (loc, type, TREE_OPERAND (t2, 0)));
18522563 864 else if (integer_zerop (t2))
db3927fb 865 return fold_convert_loc (loc, type, t1);
1bed5ee3 866 }
18522563
ZD
867 else if (code == MINUS_EXPR)
868 {
869 if (integer_zerop (t2))
db3927fb 870 return fold_convert_loc (loc, type, t1);
18522563
ZD
871 }
872
db3927fb
AH
873 tem = build2 (code, type, fold_convert_loc (loc, type, t1),
874 fold_convert_loc (loc, type, t2));
875 goto associate_trees_exit;
1baa375f
RK
876 }
877
db3927fb
AH
878 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
879 fold_convert_loc (loc, type, t2));
880 associate_trees_exit:
881 protected_set_expr_location (tem, loc);
882 return tem;
6d716ca8
RS
883}
884\f
000d8d44
RS
885/* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
886 for use in int_const_binop, size_binop and size_diffop. */
887
888static bool
ac545c64 889int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
000d8d44
RS
890{
891 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
892 return false;
893 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
894 return false;
895
896 switch (code)
897 {
898 case LSHIFT_EXPR:
899 case RSHIFT_EXPR:
900 case LROTATE_EXPR:
901 case RROTATE_EXPR:
902 return true;
903
904 default:
905 break;
906 }
907
908 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
909 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
910 && TYPE_MODE (type1) == TYPE_MODE (type2);
911}
912
913
e9a25f70 914/* Combine two integer constants ARG1 and ARG2 under operation CODE
fd6c76f4
RS
915 to produce a new constant. Return NULL_TREE if we don't know how
916 to evaluate CODE at compile-time.
91d33e36 917
4c160717 918 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
6d716ca8 919
6de9cd9a 920tree
fa233e34 921int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
6d716ca8 922{
fd7de64c 923 double_int op1, op2, res, tmp;
b3694847 924 tree t;
4c160717 925 tree type = TREE_TYPE (arg1);
fd7de64c
AS
926 bool uns = TYPE_UNSIGNED (type);
927 bool is_sizetype
4c160717 928 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
fd7de64c 929 bool overflow = false;
3dedc65a 930
fd7de64c
AS
931 op1 = tree_to_double_int (arg1);
932 op2 = tree_to_double_int (arg2);
e9a25f70
JL
933
934 switch (code)
6d716ca8 935 {
e9a25f70 936 case BIT_IOR_EXPR:
fd7de64c 937 res = double_int_ior (op1, op2);
e9a25f70 938 break;
6d716ca8 939
e9a25f70 940 case BIT_XOR_EXPR:
fd7de64c 941 res = double_int_xor (op1, op2);
e9a25f70 942 break;
6d716ca8 943
e9a25f70 944 case BIT_AND_EXPR:
fd7de64c 945 res = double_int_and (op1, op2);
e9a25f70 946 break;
6d716ca8 947
e9a25f70 948 case RSHIFT_EXPR:
fd7de64c
AS
949 res = double_int_rshift (op1, double_int_to_shwi (op2),
950 TYPE_PRECISION (type), !uns);
951 break;
952
e9a25f70
JL
953 case LSHIFT_EXPR:
954 /* It's unclear from the C standard whether shifts can overflow.
955 The following code ignores overflow; perhaps a C standard
956 interpretation ruling is needed. */
fd7de64c
AS
957 res = double_int_lshift (op1, double_int_to_shwi (op2),
958 TYPE_PRECISION (type), !uns);
e9a25f70 959 break;
6d716ca8 960
e9a25f70 961 case RROTATE_EXPR:
fd7de64c
AS
962 res = double_int_rrotate (op1, double_int_to_shwi (op2),
963 TYPE_PRECISION (type));
964 break;
965
e9a25f70 966 case LROTATE_EXPR:
fd7de64c
AS
967 res = double_int_lrotate (op1, double_int_to_shwi (op2),
968 TYPE_PRECISION (type));
e9a25f70 969 break;
6d716ca8 970
e9a25f70 971 case PLUS_EXPR:
fd7de64c
AS
972 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
973 &res.low, &res.high);
e9a25f70 974 break;
6d716ca8 975
e9a25f70 976 case MINUS_EXPR:
fd7de64c
AS
977 neg_double (op2.low, op2.high, &res.low, &res.high);
978 add_double (op1.low, op1.high, res.low, res.high,
979 &res.low, &res.high);
980 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
e9a25f70 981 break;
6d716ca8 982
e9a25f70 983 case MULT_EXPR:
fd7de64c
AS
984 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
985 &res.low, &res.high);
e9a25f70 986 break;
6d716ca8 987
e9a25f70
JL
988 case TRUNC_DIV_EXPR:
989 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
990 case EXACT_DIV_EXPR:
991 /* This is a shortcut for a common special case. */
fd7de64c 992 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
455f14dd
RS
993 && !TREE_OVERFLOW (arg1)
994 && !TREE_OVERFLOW (arg2)
fd7de64c 995 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
e9a25f70
JL
996 {
997 if (code == CEIL_DIV_EXPR)
fd7de64c 998 op1.low += op2.low - 1;
05bccae2 999
fd7de64c 1000 res.low = op1.low / op2.low, res.high = 0;
6d716ca8 1001 break;
e9a25f70 1002 }
6d716ca8 1003
30f7a378 1004 /* ... fall through ... */
6d716ca8 1005
b6cc0a72 1006 case ROUND_DIV_EXPR:
fd7de64c 1007 if (double_int_zero_p (op2))
fd6c76f4 1008 return NULL_TREE;
fd7de64c 1009 if (double_int_one_p (op2))
e9a25f70 1010 {
fd7de64c 1011 res = op1;
6d716ca8 1012 break;
e9a25f70 1013 }
fd7de64c
AS
1014 if (double_int_equal_p (op1, op2)
1015 && ! double_int_zero_p (op1))
e9a25f70 1016 {
fd7de64c 1017 res = double_int_one;
63e7fe9b 1018 break;
e9a25f70 1019 }
fd7de64c
AS
1020 overflow = div_and_round_double (code, uns,
1021 op1.low, op1.high, op2.low, op2.high,
1022 &res.low, &res.high,
1023 &tmp.low, &tmp.high);
e9a25f70 1024 break;
63e7fe9b 1025
e9a25f70
JL
1026 case TRUNC_MOD_EXPR:
1027 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1028 /* This is a shortcut for a common special case. */
fd7de64c 1029 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
455f14dd
RS
1030 && !TREE_OVERFLOW (arg1)
1031 && !TREE_OVERFLOW (arg2)
fd7de64c 1032 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
e9a25f70
JL
1033 {
1034 if (code == CEIL_MOD_EXPR)
fd7de64c
AS
1035 op1.low += op2.low - 1;
1036 res.low = op1.low % op2.low, res.high = 0;
63e7fe9b 1037 break;
e9a25f70 1038 }
63e7fe9b 1039
30f7a378 1040 /* ... fall through ... */
e9a25f70 1041
b6cc0a72 1042 case ROUND_MOD_EXPR:
fd7de64c 1043 if (double_int_zero_p (op2))
fd6c76f4 1044 return NULL_TREE;
e9a25f70 1045 overflow = div_and_round_double (code, uns,
fd7de64c
AS
1046 op1.low, op1.high, op2.low, op2.high,
1047 &tmp.low, &tmp.high,
1048 &res.low, &res.high);
e9a25f70
JL
1049 break;
1050
1051 case MIN_EXPR:
fd7de64c
AS
1052 res = double_int_min (op1, op2, uns);
1053 break;
d4b60170 1054
fd7de64c
AS
1055 case MAX_EXPR:
1056 res = double_int_max (op1, op2, uns);
e9a25f70 1057 break;
3dedc65a 1058
e9a25f70 1059 default:
fd6c76f4 1060 return NULL_TREE;
3dedc65a 1061 }
e9a25f70 1062
ca7a3bd7
NS
1063 if (notrunc)
1064 {
fd7de64c 1065 t = build_int_cst_wide (TREE_TYPE (arg1), res.low, res.high);
b8fca551 1066
ca7a3bd7
NS
1067 /* Propagate overflow flags ourselves. */
1068 if (((!uns || is_sizetype) && overflow)
1069 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
89b0433e
NS
1070 {
1071 t = copy_node (t);
1072 TREE_OVERFLOW (t) = 1;
89b0433e 1073 }
ca7a3bd7
NS
1074 }
1075 else
9589f23e 1076 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
b8fca551 1077 ((!uns || is_sizetype) && overflow)
d95787e6 1078 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
3e6688a7 1079
e9a25f70
JL
1080 return t;
1081}
1082
d4b60170
RK
1083/* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1084 constant. We assume ARG1 and ARG2 have the same data type, or at least
858214db 1085 are the same kind of constant and the same machine mode. Return zero if
43a5d30b 1086 combining the constants is not allowed in the current operating mode. */
e9a25f70
JL
1087
1088static tree
43a5d30b 1089const_binop (enum tree_code code, tree arg1, tree arg2)
e9a25f70 1090{
858214db
EB
1091 /* Sanity check for the recursive cases. */
1092 if (!arg1 || !arg2)
1093 return NULL_TREE;
1094
b6cc0a72
KH
1095 STRIP_NOPS (arg1);
1096 STRIP_NOPS (arg2);
e9a25f70
JL
1097
1098 if (TREE_CODE (arg1) == INTEGER_CST)
43a5d30b 1099 return int_const_binop (code, arg1, arg2, 0);
e9a25f70 1100
6d716ca8
RS
1101 if (TREE_CODE (arg1) == REAL_CST)
1102 {
3e4093b6 1103 enum machine_mode mode;
79c844cd
RK
1104 REAL_VALUE_TYPE d1;
1105 REAL_VALUE_TYPE d2;
15e5ad76 1106 REAL_VALUE_TYPE value;
d284eb28
RS
1107 REAL_VALUE_TYPE result;
1108 bool inexact;
3e4093b6 1109 tree t, type;
6d716ca8 1110
fd6c76f4
RS
1111 /* The following codes are handled by real_arithmetic. */
1112 switch (code)
1113 {
1114 case PLUS_EXPR:
1115 case MINUS_EXPR:
1116 case MULT_EXPR:
1117 case RDIV_EXPR:
1118 case MIN_EXPR:
1119 case MAX_EXPR:
1120 break;
1121
1122 default:
1123 return NULL_TREE;
1124 }
1125
79c844cd
RK
1126 d1 = TREE_REAL_CST (arg1);
1127 d2 = TREE_REAL_CST (arg2);
5f610074 1128
3e4093b6
RS
1129 type = TREE_TYPE (arg1);
1130 mode = TYPE_MODE (type);
1131
1132 /* Don't perform operation if we honor signaling NaNs and
1133 either operand is a NaN. */
1134 if (HONOR_SNANS (mode)
1135 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1136 return NULL_TREE;
1137
1138 /* Don't perform operation if it would raise a division
1139 by zero exception. */
1140 if (code == RDIV_EXPR
1141 && REAL_VALUES_EQUAL (d2, dconst0)
1142 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1143 return NULL_TREE;
1144
5f610074
RK
1145 /* If either operand is a NaN, just return it. Otherwise, set up
1146 for floating-point trap; we return an overflow. */
1147 if (REAL_VALUE_ISNAN (d1))
1148 return arg1;
1149 else if (REAL_VALUE_ISNAN (d2))
1150 return arg2;
a4d3481d 1151
d284eb28
RS
1152 inexact = real_arithmetic (&value, code, &d1, &d2);
1153 real_convert (&result, mode, &value);
b6cc0a72 1154
68328cda
EB
1155 /* Don't constant fold this floating point operation if
1156 the result has overflowed and flag_trapping_math. */
68328cda
EB
1157 if (flag_trapping_math
1158 && MODE_HAS_INFINITIES (mode)
1159 && REAL_VALUE_ISINF (result)
1160 && !REAL_VALUE_ISINF (d1)
1161 && !REAL_VALUE_ISINF (d2))
1162 return NULL_TREE;
1163
d284eb28
RS
1164 /* Don't constant fold this floating point operation if the
1165 result may dependent upon the run-time rounding mode and
762297d9
RS
1166 flag_rounding_math is set, or if GCC's software emulation
1167 is unable to accurately represent the result. */
762297d9 1168 if ((flag_rounding_math
4099e2c2 1169 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
d284eb28
RS
1170 && (inexact || !real_identical (&result, &value)))
1171 return NULL_TREE;
1172
1173 t = build_real (type, result);
649ff3b4 1174
ca7a3bd7 1175 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
7c7b029d 1176 return t;
6d716ca8 1177 }
fd6c76f4 1178
325217ed
CF
1179 if (TREE_CODE (arg1) == FIXED_CST)
1180 {
1181 FIXED_VALUE_TYPE f1;
1182 FIXED_VALUE_TYPE f2;
1183 FIXED_VALUE_TYPE result;
1184 tree t, type;
1185 int sat_p;
1186 bool overflow_p;
1187
1188 /* The following codes are handled by fixed_arithmetic. */
1189 switch (code)
1190 {
1191 case PLUS_EXPR:
1192 case MINUS_EXPR:
1193 case MULT_EXPR:
1194 case TRUNC_DIV_EXPR:
1195 f2 = TREE_FIXED_CST (arg2);
1196 break;
1197
1198 case LSHIFT_EXPR:
1199 case RSHIFT_EXPR:
1200 f2.data.high = TREE_INT_CST_HIGH (arg2);
1201 f2.data.low = TREE_INT_CST_LOW (arg2);
1202 f2.mode = SImode;
1203 break;
1204
1205 default:
1206 return NULL_TREE;
1207 }
1208
1209 f1 = TREE_FIXED_CST (arg1);
1210 type = TREE_TYPE (arg1);
1211 sat_p = TYPE_SATURATING (type);
1212 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1213 t = build_fixed (type, result);
1214 /* Propagate overflow flags. */
1215 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
28ddeea1 1216 TREE_OVERFLOW (t) = 1;
325217ed
CF
1217 return t;
1218 }
1219
6d716ca8
RS
1220 if (TREE_CODE (arg1) == COMPLEX_CST)
1221 {
b3694847
SS
1222 tree type = TREE_TYPE (arg1);
1223 tree r1 = TREE_REALPART (arg1);
1224 tree i1 = TREE_IMAGPART (arg1);
1225 tree r2 = TREE_REALPART (arg2);
1226 tree i2 = TREE_IMAGPART (arg2);
858214db 1227 tree real, imag;
6d716ca8
RS
1228
1229 switch (code)
1230 {
1231 case PLUS_EXPR:
6d716ca8 1232 case MINUS_EXPR:
43a5d30b
AS
1233 real = const_binop (code, r1, r2);
1234 imag = const_binop (code, i1, i2);
6d716ca8
RS
1235 break;
1236
1237 case MULT_EXPR:
2f440f6a 1238 if (COMPLEX_FLOAT_TYPE_P (type))
ca75b926
KG
1239 return do_mpc_arg2 (arg1, arg2, type,
1240 /* do_nonfinite= */ folding_initializer,
1241 mpc_mul);
2f440f6a 1242
858214db 1243 real = const_binop (MINUS_EXPR,
43a5d30b
AS
1244 const_binop (MULT_EXPR, r1, r2),
1245 const_binop (MULT_EXPR, i1, i2));
858214db 1246 imag = const_binop (PLUS_EXPR,
43a5d30b
AS
1247 const_binop (MULT_EXPR, r1, i2),
1248 const_binop (MULT_EXPR, i1, r2));
6d716ca8
RS
1249 break;
1250
1251 case RDIV_EXPR:
2f440f6a 1252 if (COMPLEX_FLOAT_TYPE_P (type))
ca75b926
KG
1253 return do_mpc_arg2 (arg1, arg2, type,
1254 /* do_nonfinite= */ folding_initializer,
1255 mpc_div);
e3d5405d 1256 /* Fallthru ... */
e3d5405d
KG
1257 case TRUNC_DIV_EXPR:
1258 case CEIL_DIV_EXPR:
1259 case FLOOR_DIV_EXPR:
1260 case ROUND_DIV_EXPR:
1261 if (flag_complex_method == 0)
6d716ca8 1262 {
e3d5405d
KG
1263 /* Keep this algorithm in sync with
1264 tree-complex.c:expand_complex_div_straight().
1265
1266 Expand complex division to scalars, straightforward algorithm.
1267 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1268 t = br*br + bi*bi
1269 */
b3694847 1270 tree magsquared
6d716ca8 1271 = const_binop (PLUS_EXPR,
43a5d30b
AS
1272 const_binop (MULT_EXPR, r2, r2),
1273 const_binop (MULT_EXPR, i2, i2));
858214db
EB
1274 tree t1
1275 = const_binop (PLUS_EXPR,
43a5d30b
AS
1276 const_binop (MULT_EXPR, r1, r2),
1277 const_binop (MULT_EXPR, i1, i2));
858214db
EB
1278 tree t2
1279 = const_binop (MINUS_EXPR,
43a5d30b
AS
1280 const_binop (MULT_EXPR, i1, r2),
1281 const_binop (MULT_EXPR, r1, i2));
c10166c4 1282
43a5d30b
AS
1283 real = const_binop (code, t1, magsquared);
1284 imag = const_binop (code, t2, magsquared);
6d716ca8 1285 }
e3d5405d
KG
1286 else
1287 {
1288 /* Keep this algorithm in sync with
1289 tree-complex.c:expand_complex_div_wide().
1290
1291 Expand complex division to scalars, modified algorithm to minimize
1292 overflow with wide input ranges. */
08d19889
KG
1293 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1294 fold_abs_const (r2, TREE_TYPE (type)),
1295 fold_abs_const (i2, TREE_TYPE (type)));
b8698a0f 1296
e3d5405d
KG
1297 if (integer_nonzerop (compare))
1298 {
1299 /* In the TRUE branch, we compute
1300 ratio = br/bi;
1301 div = (br * ratio) + bi;
1302 tr = (ar * ratio) + ai;
1303 ti = (ai * ratio) - ar;
1304 tr = tr / div;
1305 ti = ti / div; */
43a5d30b 1306 tree ratio = const_binop (code, r2, i2);
08d19889 1307 tree div = const_binop (PLUS_EXPR, i2,
43a5d30b
AS
1308 const_binop (MULT_EXPR, r2, ratio));
1309 real = const_binop (MULT_EXPR, r1, ratio);
1310 real = const_binop (PLUS_EXPR, real, i1);
1311 real = const_binop (code, real, div);
1312
1313 imag = const_binop (MULT_EXPR, i1, ratio);
1314 imag = const_binop (MINUS_EXPR, imag, r1);
1315 imag = const_binop (code, imag, div);
e3d5405d
KG
1316 }
1317 else
1318 {
1319 /* In the FALSE branch, we compute
1320 ratio = d/c;
1321 divisor = (d * ratio) + c;
1322 tr = (b * ratio) + a;
1323 ti = b - (a * ratio);
1324 tr = tr / div;
1325 ti = ti / div; */
43a5d30b 1326 tree ratio = const_binop (code, i2, r2);
08d19889 1327 tree div = const_binop (PLUS_EXPR, r2,
43a5d30b 1328 const_binop (MULT_EXPR, i2, ratio));
08d19889 1329
43a5d30b
AS
1330 real = const_binop (MULT_EXPR, i1, ratio);
1331 real = const_binop (PLUS_EXPR, real, r1);
1332 real = const_binop (code, real, div);
08d19889 1333
43a5d30b
AS
1334 imag = const_binop (MULT_EXPR, r1, ratio);
1335 imag = const_binop (MINUS_EXPR, i1, imag);
1336 imag = const_binop (code, imag, div);
e3d5405d
KG
1337 }
1338 }
6d716ca8
RS
1339 break;
1340
1341 default:
fd6c76f4 1342 return NULL_TREE;
6d716ca8 1343 }
858214db
EB
1344
1345 if (real && imag)
1346 return build_complex (type, real, imag);
6d716ca8 1347 }
858214db 1348
d1d1c602
BM
1349 if (TREE_CODE (arg1) == VECTOR_CST)
1350 {
1351 tree type = TREE_TYPE(arg1);
1352 int count = TYPE_VECTOR_SUBPARTS (type), i;
1353 tree elements1, elements2, list = NULL_TREE;
b8698a0f 1354
d1d1c602
BM
1355 if(TREE_CODE(arg2) != VECTOR_CST)
1356 return NULL_TREE;
b8698a0f 1357
d1d1c602
BM
1358 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1359 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1360
1361 for (i = 0; i < count; i++)
1362 {
1363 tree elem1, elem2, elem;
b8698a0f 1364
d1d1c602
BM
1365 /* The trailing elements can be empty and should be treated as 0 */
1366 if(!elements1)
1367 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1368 else
1369 {
1370 elem1 = TREE_VALUE(elements1);
1371 elements1 = TREE_CHAIN (elements1);
b8698a0f
L
1372 }
1373
d1d1c602
BM
1374 if(!elements2)
1375 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1376 else
1377 {
1378 elem2 = TREE_VALUE(elements2);
1379 elements2 = TREE_CHAIN (elements2);
1380 }
b8698a0f 1381
43a5d30b 1382 elem = const_binop (code, elem1, elem2);
b8698a0f 1383
d1d1c602
BM
1384 /* It is possible that const_binop cannot handle the given
1385 code and return NULL_TREE */
1386 if(elem == NULL_TREE)
1387 return NULL_TREE;
b8698a0f 1388
d1d1c602
BM
1389 list = tree_cons (NULL_TREE, elem, list);
1390 }
b8698a0f 1391 return build_vector(type, nreverse(list));
d1d1c602 1392 }
fd6c76f4 1393 return NULL_TREE;
6d716ca8 1394}
4c160717 1395
ce552f75
NS
1396/* Create a size type INT_CST node with NUMBER sign extended. KIND
1397 indicates which particular sizetype to create. */
d4b60170 1398
fed3cef0 1399tree
3e95a7cb 1400size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
fed3cef0 1401{
ce552f75 1402 return build_int_cst (sizetype_tab[(int) kind], number);
fed3cef0 1403}
ce552f75 1404\f
fed3cef0
RK
1405/* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1406 is a tree code. The type of the result is taken from the operands.
000d8d44 1407 Both must be equivalent integer types, ala int_binop_types_match_p.
6d716ca8
RS
1408 If the operands are constant, so is the result. */
1409
1410tree
db3927fb 1411size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
6d716ca8 1412{
fed3cef0
RK
1413 tree type = TREE_TYPE (arg0);
1414
7ebcc52c
VR
1415 if (arg0 == error_mark_node || arg1 == error_mark_node)
1416 return error_mark_node;
1417
000d8d44
RS
1418 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1419 TREE_TYPE (arg1)));
fed3cef0 1420
6d716ca8
RS
1421 /* Handle the special case of two integer constants faster. */
1422 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1423 {
1424 /* And some specific cases even faster than that. */
74890d7b
RS
1425 if (code == PLUS_EXPR)
1426 {
1427 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1428 return arg1;
1429 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1430 return arg0;
1431 }
1432 else if (code == MINUS_EXPR)
1433 {
1434 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1435 return arg0;
1436 }
1437 else if (code == MULT_EXPR)
1438 {
1439 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1440 return arg1;
1441 }
9898deac 1442
6d716ca8 1443 /* Handle general case of two integer constants. */
4c160717 1444 return int_const_binop (code, arg0, arg1, 0);
6d716ca8
RS
1445 }
1446
db3927fb 1447 return fold_build2_loc (loc, code, type, arg0, arg1);
6d716ca8 1448}
697073d9 1449
fed3cef0
RK
1450/* Given two values, either both of sizetype or both of bitsizetype,
1451 compute the difference between the two values. Return the value
1452 in signed type corresponding to the type of the operands. */
697073d9
JM
1453
1454tree
db3927fb 1455size_diffop_loc (location_t loc, tree arg0, tree arg1)
697073d9 1456{
fed3cef0
RK
1457 tree type = TREE_TYPE (arg0);
1458 tree ctype;
697073d9 1459
000d8d44
RS
1460 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1461 TREE_TYPE (arg1)));
697073d9 1462
fed3cef0 1463 /* If the type is already signed, just do the simple thing. */
8df83eae 1464 if (!TYPE_UNSIGNED (type))
db3927fb 1465 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
fed3cef0 1466
000d8d44
RS
1467 if (type == sizetype)
1468 ctype = ssizetype;
1469 else if (type == bitsizetype)
1470 ctype = sbitsizetype;
1471 else
12753674 1472 ctype = signed_type_for (type);
fed3cef0
RK
1473
1474 /* If either operand is not a constant, do the conversions to the signed
1475 type and subtract. The hardware will do the right thing with any
1476 overflow in the subtraction. */
1477 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
db3927fb
AH
1478 return size_binop_loc (loc, MINUS_EXPR,
1479 fold_convert_loc (loc, ctype, arg0),
1480 fold_convert_loc (loc, ctype, arg1));
fed3cef0
RK
1481
1482 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1483 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1484 overflow) and negate (which can't either). Special-case a result
1485 of zero while we're here. */
1486 if (tree_int_cst_equal (arg0, arg1))
57decb7e 1487 return build_int_cst (ctype, 0);
fed3cef0 1488 else if (tree_int_cst_lt (arg1, arg0))
db3927fb
AH
1489 return fold_convert_loc (loc, ctype,
1490 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
fed3cef0 1491 else
db3927fb
AH
1492 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1493 fold_convert_loc (loc, ctype,
1494 size_binop_loc (loc,
1495 MINUS_EXPR,
1496 arg1, arg0)));
697073d9 1497}
6d716ca8 1498\f
c756af79
RH
1499/* A subroutine of fold_convert_const handling conversions of an
1500 INTEGER_CST to another integer type. */
049e524f
RS
1501
1502static tree
ac545c64 1503fold_convert_const_int_from_int (tree type, const_tree arg1)
049e524f 1504{
c756af79 1505 tree t;
049e524f 1506
c756af79
RH
1507 /* Given an integer constant, make new constant with new type,
1508 appropriately sign-extended or truncated. */
9589f23e 1509 t = force_fit_type_double (type, tree_to_double_int (arg1),
9e9ef331 1510 !POINTER_TYPE_P (TREE_TYPE (arg1)),
b8fca551
RG
1511 (TREE_INT_CST_HIGH (arg1) < 0
1512 && (TYPE_UNSIGNED (type)
1513 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
d95787e6 1514 | TREE_OVERFLOW (arg1));
049e524f 1515
c756af79 1516 return t;
049e524f
RS
1517}
1518
c756af79
RH
1519/* A subroutine of fold_convert_const handling conversions a REAL_CST
1520 to an integer type. */
6d716ca8
RS
1521
1522static tree
ac545c64 1523fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
6d716ca8 1524{
649ff3b4 1525 int overflow = 0;
fdb33708
RS
1526 tree t;
1527
c756af79
RH
1528 /* The following code implements the floating point to integer
1529 conversion rules required by the Java Language Specification,
1530 that IEEE NaNs are mapped to zero and values that overflow
1531 the target precision saturate, i.e. values greater than
1532 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1533 are mapped to INT_MIN. These semantics are allowed by the
1534 C and C++ standards that simply state that the behavior of
1535 FP-to-integer conversion is unspecified upon overflow. */
6d716ca8 1536
2bd1333d 1537 double_int val;
c756af79
RH
1538 REAL_VALUE_TYPE r;
1539 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1540
1541 switch (code)
6d716ca8 1542 {
c756af79
RH
1543 case FIX_TRUNC_EXPR:
1544 real_trunc (&r, VOIDmode, &x);
1545 break;
1546
c756af79
RH
1547 default:
1548 gcc_unreachable ();
1549 }
1550
1551 /* If R is NaN, return zero and show we have an overflow. */
1552 if (REAL_VALUE_ISNAN (r))
1553 {
1554 overflow = 1;
2bd1333d 1555 val = double_int_zero;
c756af79
RH
1556 }
1557
1558 /* See if R is less than the lower bound or greater than the
1559 upper bound. */
1560
1561 if (! overflow)
1562 {
1563 tree lt = TYPE_MIN_VALUE (type);
1564 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1565 if (REAL_VALUES_LESS (r, l))
6d716ca8 1566 {
c756af79 1567 overflow = 1;
2bd1333d 1568 val = tree_to_double_int (lt);
6d716ca8 1569 }
c756af79
RH
1570 }
1571
1572 if (! overflow)
1573 {
1574 tree ut = TYPE_MAX_VALUE (type);
1575 if (ut)
6d716ca8 1576 {
c756af79
RH
1577 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1578 if (REAL_VALUES_LESS (u, r))
fdb33708 1579 {
c756af79 1580 overflow = 1;
2bd1333d 1581 val = tree_to_double_int (ut);
c756af79
RH
1582 }
1583 }
1584 }
fdb33708 1585
c756af79 1586 if (! overflow)
2bd1333d 1587 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
fdb33708 1588
9589f23e 1589 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
c756af79
RH
1590 return t;
1591}
fc627530 1592
325217ed
CF
1593/* A subroutine of fold_convert_const handling conversions of a
1594 FIXED_CST to an integer type. */
1595
1596static tree
ac545c64 1597fold_convert_const_int_from_fixed (tree type, const_tree arg1)
325217ed
CF
1598{
1599 tree t;
1600 double_int temp, temp_trunc;
1601 unsigned int mode;
1602
1603 /* Right shift FIXED_CST to temp by fbit. */
1604 temp = TREE_FIXED_CST (arg1).data;
1605 mode = TREE_FIXED_CST (arg1).mode;
1606 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1607 {
2bd1333d
AS
1608 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1609 HOST_BITS_PER_DOUBLE_INT,
1610 SIGNED_FIXED_POINT_MODE_P (mode));
325217ed
CF
1611
1612 /* Left shift temp to temp_trunc by fbit. */
2bd1333d
AS
1613 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1614 HOST_BITS_PER_DOUBLE_INT,
1615 SIGNED_FIXED_POINT_MODE_P (mode));
325217ed
CF
1616 }
1617 else
1618 {
2bd1333d
AS
1619 temp = double_int_zero;
1620 temp_trunc = double_int_zero;
325217ed
CF
1621 }
1622
1623 /* If FIXED_CST is negative, we need to round the value toward 0.
1624 By checking if the fractional bits are not zero to add 1 to temp. */
2bd1333d
AS
1625 if (SIGNED_FIXED_POINT_MODE_P (mode)
1626 && double_int_negative_p (temp_trunc)
325217ed 1627 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2bd1333d 1628 temp = double_int_add (temp, double_int_one);
325217ed
CF
1629
1630 /* Given a fixed-point constant, make new constant with new type,
1631 appropriately sign-extended or truncated. */
9589f23e 1632 t = force_fit_type_double (type, temp, -1,
2bd1333d 1633 (double_int_negative_p (temp)
325217ed
CF
1634 && (TYPE_UNSIGNED (type)
1635 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1636 | TREE_OVERFLOW (arg1));
1637
1638 return t;
1639}
1640
c756af79
RH
1641/* A subroutine of fold_convert_const handling conversions a REAL_CST
1642 to another floating point type. */
fdb33708 1643
c756af79 1644static tree
ac545c64 1645fold_convert_const_real_from_real (tree type, const_tree arg1)
c756af79 1646{
d284eb28 1647 REAL_VALUE_TYPE value;
c756af79 1648 tree t;
e1ee5cdc 1649
d284eb28
RS
1650 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1651 t = build_real (type, value);
875eda9c 1652
d33e4b70
SL
1653 /* If converting an infinity or NAN to a representation that doesn't
1654 have one, set the overflow bit so that we can produce some kind of
1655 error message at the appropriate point if necessary. It's not the
1656 most user-friendly message, but it's better than nothing. */
1657 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1658 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1659 TREE_OVERFLOW (t) = 1;
1660 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1661 && !MODE_HAS_NANS (TYPE_MODE (type)))
1662 TREE_OVERFLOW (t) = 1;
1663 /* Regular overflow, conversion produced an infinity in a mode that
1664 can't represent them. */
1665 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1666 && REAL_VALUE_ISINF (value)
1667 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1668 TREE_OVERFLOW (t) = 1;
1669 else
1670 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
c756af79
RH
1671 return t;
1672}
875eda9c 1673
325217ed
CF
1674/* A subroutine of fold_convert_const handling conversions a FIXED_CST
1675 to a floating point type. */
1676
1677static tree
ac545c64 1678fold_convert_const_real_from_fixed (tree type, const_tree arg1)
325217ed
CF
1679{
1680 REAL_VALUE_TYPE value;
1681 tree t;
1682
1683 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1684 t = build_real (type, value);
1685
1686 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
325217ed
CF
1687 return t;
1688}
1689
1690/* A subroutine of fold_convert_const handling conversions a FIXED_CST
1691 to another fixed-point type. */
1692
1693static tree
ac545c64 1694fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
325217ed
CF
1695{
1696 FIXED_VALUE_TYPE value;
1697 tree t;
1698 bool overflow_p;
1699
1700 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1701 TYPE_SATURATING (type));
1702 t = build_fixed (type, value);
1703
1704 /* Propagate overflow flags. */
1705 if (overflow_p | TREE_OVERFLOW (arg1))
28ddeea1 1706 TREE_OVERFLOW (t) = 1;
325217ed
CF
1707 return t;
1708}
1709
1710/* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1711 to a fixed-point type. */
1712
1713static tree
ac545c64 1714fold_convert_const_fixed_from_int (tree type, const_tree arg1)
325217ed
CF
1715{
1716 FIXED_VALUE_TYPE value;
1717 tree t;
1718 bool overflow_p;
1719
1720 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1721 TREE_INT_CST (arg1),
1722 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1723 TYPE_SATURATING (type));
1724 t = build_fixed (type, value);
1725
1726 /* Propagate overflow flags. */
1727 if (overflow_p | TREE_OVERFLOW (arg1))
28ddeea1 1728 TREE_OVERFLOW (t) = 1;
325217ed
CF
1729 return t;
1730}
1731
1732/* A subroutine of fold_convert_const handling conversions a REAL_CST
1733 to a fixed-point type. */
1734
1735static tree
ac545c64 1736fold_convert_const_fixed_from_real (tree type, const_tree arg1)
325217ed
CF
1737{
1738 FIXED_VALUE_TYPE value;
1739 tree t;
1740 bool overflow_p;
1741
1742 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1743 &TREE_REAL_CST (arg1),
1744 TYPE_SATURATING (type));
1745 t = build_fixed (type, value);
1746
1747 /* Propagate overflow flags. */
1748 if (overflow_p | TREE_OVERFLOW (arg1))
28ddeea1 1749 TREE_OVERFLOW (t) = 1;
325217ed
CF
1750 return t;
1751}
1752
c756af79
RH
1753/* Attempt to fold type conversion operation CODE of expression ARG1 to
1754 type TYPE. If no simplification can be done return NULL_TREE. */
875eda9c 1755
c756af79
RH
1756static tree
1757fold_convert_const (enum tree_code code, tree type, tree arg1)
1758{
1759 if (TREE_TYPE (arg1) == type)
1760 return arg1;
ca7a3bd7 1761
0e4b00d6
AP
1762 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1763 || TREE_CODE (type) == OFFSET_TYPE)
c756af79
RH
1764 {
1765 if (TREE_CODE (arg1) == INTEGER_CST)
1766 return fold_convert_const_int_from_int (type, arg1);
1767 else if (TREE_CODE (arg1) == REAL_CST)
1768 return fold_convert_const_int_from_real (code, type, arg1);
325217ed
CF
1769 else if (TREE_CODE (arg1) == FIXED_CST)
1770 return fold_convert_const_int_from_fixed (type, arg1);
6d716ca8
RS
1771 }
1772 else if (TREE_CODE (type) == REAL_TYPE)
1773 {
6d716ca8
RS
1774 if (TREE_CODE (arg1) == INTEGER_CST)
1775 return build_real_from_int_cst (type, arg1);
325217ed 1776 else if (TREE_CODE (arg1) == REAL_CST)
c756af79 1777 return fold_convert_const_real_from_real (type, arg1);
325217ed
CF
1778 else if (TREE_CODE (arg1) == FIXED_CST)
1779 return fold_convert_const_real_from_fixed (type, arg1);
1780 }
1781 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1782 {
1783 if (TREE_CODE (arg1) == FIXED_CST)
1784 return fold_convert_const_fixed_from_fixed (type, arg1);
1785 else if (TREE_CODE (arg1) == INTEGER_CST)
1786 return fold_convert_const_fixed_from_int (type, arg1);
1787 else if (TREE_CODE (arg1) == REAL_CST)
1788 return fold_convert_const_fixed_from_real (type, arg1);
6d716ca8 1789 }
fdb33708 1790 return NULL_TREE;
6d716ca8 1791}
088414c1 1792
c756af79
RH
1793/* Construct a vector of zero elements of vector type TYPE. */
1794
1795static tree
1796build_zero_vector (tree type)
1797{
1798 tree elem, list;
1799 int i, units;
1800
1801 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1802 units = TYPE_VECTOR_SUBPARTS (type);
b8698a0f 1803
c756af79
RH
1804 list = NULL_TREE;
1805 for (i = 0; i < units; i++)
1806 list = tree_cons (NULL_TREE, elem, list);
1807 return build_vector (type, list);
1808}
1809
3b357646
RG
1810/* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1811
1812bool
fa233e34 1813fold_convertible_p (const_tree type, const_tree arg)
3b357646
RG
1814{
1815 tree orig = TREE_TYPE (arg);
1816
1817 if (type == orig)
1818 return true;
1819
1820 if (TREE_CODE (arg) == ERROR_MARK
1821 || TREE_CODE (type) == ERROR_MARK
1822 || TREE_CODE (orig) == ERROR_MARK)
1823 return false;
1824
1825 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1826 return true;
1827
1828 switch (TREE_CODE (type))
1829 {
1830 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1831 case POINTER_TYPE: case REFERENCE_TYPE:
1832 case OFFSET_TYPE:
1833 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1834 || TREE_CODE (orig) == OFFSET_TYPE)
1835 return true;
1836 return (TREE_CODE (orig) == VECTOR_TYPE
1837 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1838
c17ee676
FXC
1839 case REAL_TYPE:
1840 case FIXED_POINT_TYPE:
1841 case COMPLEX_TYPE:
1842 case VECTOR_TYPE:
1843 case VOID_TYPE:
3b357646 1844 return TREE_CODE (type) == TREE_CODE (orig);
c17ee676
FXC
1845
1846 default:
1847 return false;
3b357646
RG
1848 }
1849}
1850
088414c1
RS
1851/* Convert expression ARG to type TYPE. Used by the middle-end for
1852 simple conversions in preference to calling the front-end's convert. */
1853
e419fe91 1854tree
db3927fb 1855fold_convert_loc (location_t loc, tree type, tree arg)
088414c1
RS
1856{
1857 tree orig = TREE_TYPE (arg);
1858 tree tem;
1859
1860 if (type == orig)
1861 return arg;
1862
1863 if (TREE_CODE (arg) == ERROR_MARK
1864 || TREE_CODE (type) == ERROR_MARK
1865 || TREE_CODE (orig) == ERROR_MARK)
1866 return error_mark_node;
1867
f4088621 1868 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
db3927fb 1869 return fold_build1_loc (loc, NOP_EXPR, type, arg);
088414c1 1870
0bccc606 1871 switch (TREE_CODE (type))
088414c1 1872 {
09e881c9
BE
1873 case POINTER_TYPE:
1874 case REFERENCE_TYPE:
1875 /* Handle conversions between pointers to different address spaces. */
1876 if (POINTER_TYPE_P (orig)
1877 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1878 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1879 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1880 /* fall through */
1881
71d59383 1882 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
0bccc606 1883 case OFFSET_TYPE:
088414c1
RS
1884 if (TREE_CODE (arg) == INTEGER_CST)
1885 {
1886 tem = fold_convert_const (NOP_EXPR, type, arg);
1887 if (tem != NULL_TREE)
1888 return tem;
1889 }
908d0773
AP
1890 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1891 || TREE_CODE (orig) == OFFSET_TYPE)
db3927fb 1892 return fold_build1_loc (loc, NOP_EXPR, type, arg);
088414c1 1893 if (TREE_CODE (orig) == COMPLEX_TYPE)
db3927fb
AH
1894 return fold_convert_loc (loc, type,
1895 fold_build1_loc (loc, REALPART_EXPR,
1896 TREE_TYPE (orig), arg));
0bccc606
NS
1897 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1898 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
db3927fb 1899 return fold_build1_loc (loc, NOP_EXPR, type, arg);
3e6688a7 1900
0bccc606 1901 case REAL_TYPE:
088414c1
RS
1902 if (TREE_CODE (arg) == INTEGER_CST)
1903 {
1904 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1905 if (tem != NULL_TREE)
1906 return tem;
1907 }
1908 else if (TREE_CODE (arg) == REAL_CST)
1909 {
1910 tem = fold_convert_const (NOP_EXPR, type, arg);
1911 if (tem != NULL_TREE)
1912 return tem;
1913 }
325217ed
CF
1914 else if (TREE_CODE (arg) == FIXED_CST)
1915 {
1916 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1917 if (tem != NULL_TREE)
1918 return tem;
1919 }
088414c1 1920
0bccc606 1921 switch (TREE_CODE (orig))
088414c1 1922 {
71d59383 1923 case INTEGER_TYPE:
0bccc606
NS
1924 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1925 case POINTER_TYPE: case REFERENCE_TYPE:
db3927fb 1926 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
3e6688a7 1927
0bccc606 1928 case REAL_TYPE:
db3927fb 1929 return fold_build1_loc (loc, NOP_EXPR, type, arg);
3e6688a7 1930
325217ed 1931 case FIXED_POINT_TYPE:
db3927fb 1932 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
325217ed
CF
1933
1934 case COMPLEX_TYPE:
db3927fb
AH
1935 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1936 return fold_convert_loc (loc, type, tem);
325217ed
CF
1937
1938 default:
1939 gcc_unreachable ();
1940 }
1941
1942 case FIXED_POINT_TYPE:
1943 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1944 || TREE_CODE (arg) == REAL_CST)
1945 {
1946 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1947 if (tem != NULL_TREE)
db3927fb 1948 goto fold_convert_exit;
325217ed
CF
1949 }
1950
1951 switch (TREE_CODE (orig))
1952 {
1953 case FIXED_POINT_TYPE:
1954 case INTEGER_TYPE:
1955 case ENUMERAL_TYPE:
1956 case BOOLEAN_TYPE:
1957 case REAL_TYPE:
db3927fb 1958 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
325217ed 1959
0bccc606 1960 case COMPLEX_TYPE:
db3927fb
AH
1961 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1962 return fold_convert_loc (loc, type, tem);
3e6688a7 1963
0bccc606
NS
1964 default:
1965 gcc_unreachable ();
088414c1 1966 }
3e6688a7 1967
0bccc606
NS
1968 case COMPLEX_TYPE:
1969 switch (TREE_CODE (orig))
1970 {
71d59383 1971 case INTEGER_TYPE:
0bccc606
NS
1972 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1973 case POINTER_TYPE: case REFERENCE_TYPE:
1974 case REAL_TYPE:
325217ed 1975 case FIXED_POINT_TYPE:
db3927fb
AH
1976 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1977 fold_convert_loc (loc, TREE_TYPE (type), arg),
1978 fold_convert_loc (loc, TREE_TYPE (type),
3111cce0 1979 integer_zero_node));
0bccc606
NS
1980 case COMPLEX_TYPE:
1981 {
1982 tree rpart, ipart;
3e6688a7 1983
0bccc606
NS
1984 if (TREE_CODE (arg) == COMPLEX_EXPR)
1985 {
db3927fb
AH
1986 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1987 TREE_OPERAND (arg, 0));
1988 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1989 TREE_OPERAND (arg, 1));
1990 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
0bccc606 1991 }
3e6688a7 1992
0bccc606 1993 arg = save_expr (arg);
db3927fb
AH
1994 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1995 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1996 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1997 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1998 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
0bccc606 1999 }
3e6688a7 2000
0bccc606
NS
2001 default:
2002 gcc_unreachable ();
2003 }
3e6688a7 2004
0bccc606 2005 case VECTOR_TYPE:
049e524f
RS
2006 if (integer_zerop (arg))
2007 return build_zero_vector (type);
0bccc606
NS
2008 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2009 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2010 || TREE_CODE (orig) == VECTOR_TYPE);
db3927fb 2011 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
088414c1 2012
0bccc606 2013 case VOID_TYPE:
bd7e4636 2014 tem = fold_ignored_result (arg);
726a989a 2015 if (TREE_CODE (tem) == MODIFY_EXPR)
db3927fb
AH
2016 goto fold_convert_exit;
2017 return fold_build1_loc (loc, NOP_EXPR, type, tem);
088414c1 2018
0bccc606
NS
2019 default:
2020 gcc_unreachable ();
088414c1 2021 }
db3927fb
AH
2022 fold_convert_exit:
2023 protected_set_expr_location (tem, loc);
2024 return tem;
088414c1 2025}
6d716ca8 2026\f
569b7f6a 2027/* Return false if expr can be assumed not to be an lvalue, true
283da5df 2028 otherwise. */
6d716ca8 2029
283da5df 2030static bool
ac545c64 2031maybe_lvalue_p (const_tree x)
6d716ca8 2032{
8d4a2ff6
RS
2033 /* We only need to wrap lvalue tree codes. */
2034 switch (TREE_CODE (x))
2035 {
2036 case VAR_DECL:
2037 case PARM_DECL:
2038 case RESULT_DECL:
2039 case LABEL_DECL:
2040 case FUNCTION_DECL:
2041 case SSA_NAME:
2042
2043 case COMPONENT_REF:
2044 case INDIRECT_REF:
7ccf35ed
DN
2045 case ALIGN_INDIRECT_REF:
2046 case MISALIGNED_INDIRECT_REF:
8d4a2ff6 2047 case ARRAY_REF:
44de5aeb 2048 case ARRAY_RANGE_REF:
8d4a2ff6 2049 case BIT_FIELD_REF:
0f59171d 2050 case OBJ_TYPE_REF:
8d4a2ff6
RS
2051
2052 case REALPART_EXPR:
2053 case IMAGPART_EXPR:
2054 case PREINCREMENT_EXPR:
2055 case PREDECREMENT_EXPR:
2056 case SAVE_EXPR:
8d4a2ff6
RS
2057 case TRY_CATCH_EXPR:
2058 case WITH_CLEANUP_EXPR:
2059 case COMPOUND_EXPR:
2060 case MODIFY_EXPR:
2061 case TARGET_EXPR:
2062 case COND_EXPR:
2063 case BIND_EXPR:
8d4a2ff6
RS
2064 break;
2065
2066 default:
2067 /* Assume the worst for front-end tree codes. */
2068 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2069 break;
283da5df 2070 return false;
8d4a2ff6 2071 }
283da5df
RS
2072
2073 return true;
2074}
2075
2076/* Return an expr equal to X but certainly not valid as an lvalue. */
2077
2078tree
db3927fb 2079non_lvalue_loc (location_t loc, tree x)
283da5df
RS
2080{
2081 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2082 us. */
2083 if (in_gimple_form)
2084 return x;
2085
2086 if (! maybe_lvalue_p (x))
2087 return x;
db3927fb
AH
2088 x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2089 SET_EXPR_LOCATION (x, loc);
2090 return x;
6d716ca8 2091}
a5e9b124 2092
e9866da3
JM
2093/* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2094 Zero means allow extended lvalues. */
2095
2096int pedantic_lvalues;
2097
a5e9b124
JW
2098/* When pedantic, return an expr equal to X but certainly not valid as a
2099 pedantic lvalue. Otherwise, return X. */
2100
49995c8e 2101static tree
db3927fb 2102pedantic_non_lvalue_loc (location_t loc, tree x)
a5e9b124 2103{
e9866da3 2104 if (pedantic_lvalues)
db3927fb
AH
2105 return non_lvalue_loc (loc, x);
2106 protected_set_expr_location (x, loc);
2107 return x;
a5e9b124 2108}
c05a9b68
RS
2109\f
2110/* Given a tree comparison code, return the code that is the logical inverse
2111 of the given code. It is not safe to do this for floating-point
d1a7edaf
PB
2112 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2113 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
6d716ca8 2114
227858d1 2115enum tree_code
d1a7edaf 2116invert_tree_comparison (enum tree_code code, bool honor_nans)
c05a9b68 2117{
d1a7edaf
PB
2118 if (honor_nans && flag_trapping_math)
2119 return ERROR_MARK;
2120
c05a9b68
RS
2121 switch (code)
2122 {
2123 case EQ_EXPR:
2124 return NE_EXPR;
2125 case NE_EXPR:
2126 return EQ_EXPR;
2127 case GT_EXPR:
d1a7edaf 2128 return honor_nans ? UNLE_EXPR : LE_EXPR;
c05a9b68 2129 case GE_EXPR:
d1a7edaf 2130 return honor_nans ? UNLT_EXPR : LT_EXPR;
c05a9b68 2131 case LT_EXPR:
d1a7edaf 2132 return honor_nans ? UNGE_EXPR : GE_EXPR;
c05a9b68 2133 case LE_EXPR:
d1a7edaf
PB
2134 return honor_nans ? UNGT_EXPR : GT_EXPR;
2135 case LTGT_EXPR:
2136 return UNEQ_EXPR;
2137 case UNEQ_EXPR:
2138 return LTGT_EXPR;
2139 case UNGT_EXPR:
2140 return LE_EXPR;
2141 case UNGE_EXPR:
2142 return LT_EXPR;
2143 case UNLT_EXPR:
2144 return GE_EXPR;
2145 case UNLE_EXPR:
c05a9b68 2146 return GT_EXPR;
d1a7edaf
PB
2147 case ORDERED_EXPR:
2148 return UNORDERED_EXPR;
2149 case UNORDERED_EXPR:
2150 return ORDERED_EXPR;
c05a9b68 2151 default:
0bccc606 2152 gcc_unreachable ();
c05a9b68
RS
2153 }
2154}
2155
2156/* Similar, but return the comparison that results if the operands are
2157 swapped. This is safe for floating-point. */
2158
fd660b1b 2159enum tree_code
fa8db1f7 2160swap_tree_comparison (enum tree_code code)
c05a9b68
RS
2161{
2162 switch (code)
2163 {
2164 case EQ_EXPR:
2165 case NE_EXPR:
09b2f9e8
RS
2166 case ORDERED_EXPR:
2167 case UNORDERED_EXPR:
2168 case LTGT_EXPR:
2169 case UNEQ_EXPR:
c05a9b68
RS
2170 return code;
2171 case GT_EXPR:
2172 return LT_EXPR;
2173 case GE_EXPR:
2174 return LE_EXPR;
2175 case LT_EXPR:
2176 return GT_EXPR;
2177 case LE_EXPR:
2178 return GE_EXPR;
09b2f9e8
RS
2179 case UNGT_EXPR:
2180 return UNLT_EXPR;
2181 case UNGE_EXPR:
2182 return UNLE_EXPR;
2183 case UNLT_EXPR:
2184 return UNGT_EXPR;
2185 case UNLE_EXPR:
2186 return UNGE_EXPR;
c05a9b68 2187 default:
0bccc606 2188 gcc_unreachable ();
c05a9b68
RS
2189 }
2190}
61f275ff 2191
8dcb27ed
RS
2192
2193/* Convert a comparison tree code from an enum tree_code representation
2194 into a compcode bit-based encoding. This function is the inverse of
2195 compcode_to_comparison. */
2196
d1a7edaf 2197static enum comparison_code
fa8db1f7 2198comparison_to_compcode (enum tree_code code)
8dcb27ed
RS
2199{
2200 switch (code)
2201 {
2202 case LT_EXPR:
2203 return COMPCODE_LT;
2204 case EQ_EXPR:
2205 return COMPCODE_EQ;
2206 case LE_EXPR:
2207 return COMPCODE_LE;
2208 case GT_EXPR:
2209 return COMPCODE_GT;
2210 case NE_EXPR:
2211 return COMPCODE_NE;
2212 case GE_EXPR:
2213 return COMPCODE_GE;
d1a7edaf
PB
2214 case ORDERED_EXPR:
2215 return COMPCODE_ORD;
2216 case UNORDERED_EXPR:
2217 return COMPCODE_UNORD;
2218 case UNLT_EXPR:
2219 return COMPCODE_UNLT;
2220 case UNEQ_EXPR:
2221 return COMPCODE_UNEQ;
2222 case UNLE_EXPR:
2223 return COMPCODE_UNLE;
2224 case UNGT_EXPR:
2225 return COMPCODE_UNGT;
2226 case LTGT_EXPR:
2227 return COMPCODE_LTGT;
2228 case UNGE_EXPR:
2229 return COMPCODE_UNGE;
8dcb27ed 2230 default:
0bccc606 2231 gcc_unreachable ();
8dcb27ed
RS
2232 }
2233}
2234
2235/* Convert a compcode bit-based encoding of a comparison operator back
2236 to GCC's enum tree_code representation. This function is the
2237 inverse of comparison_to_compcode. */
2238
2239static enum tree_code
d1a7edaf 2240compcode_to_comparison (enum comparison_code code)
8dcb27ed
RS
2241{
2242 switch (code)
2243 {
2244 case COMPCODE_LT:
2245 return LT_EXPR;
2246 case COMPCODE_EQ:
2247 return EQ_EXPR;
2248 case COMPCODE_LE:
2249 return LE_EXPR;
2250 case COMPCODE_GT:
2251 return GT_EXPR;
2252 case COMPCODE_NE:
2253 return NE_EXPR;
2254 case COMPCODE_GE:
2255 return GE_EXPR;
d1a7edaf
PB
2256 case COMPCODE_ORD:
2257 return ORDERED_EXPR;
2258 case COMPCODE_UNORD:
2259 return UNORDERED_EXPR;
2260 case COMPCODE_UNLT:
2261 return UNLT_EXPR;
2262 case COMPCODE_UNEQ:
2263 return UNEQ_EXPR;
2264 case COMPCODE_UNLE:
2265 return UNLE_EXPR;
2266 case COMPCODE_UNGT:
2267 return UNGT_EXPR;
2268 case COMPCODE_LTGT:
2269 return LTGT_EXPR;
2270 case COMPCODE_UNGE:
2271 return UNGE_EXPR;
8dcb27ed 2272 default:
0bccc606 2273 gcc_unreachable ();
8dcb27ed
RS
2274 }
2275}
2276
d1a7edaf
PB
2277/* Return a tree for the comparison which is the combination of
2278 doing the AND or OR (depending on CODE) of the two operations LCODE
2279 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2280 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2281 if this makes the transformation invalid. */
2282
2283tree
db3927fb
AH
2284combine_comparisons (location_t loc,
2285 enum tree_code code, enum tree_code lcode,
d1a7edaf
PB
2286 enum tree_code rcode, tree truth_type,
2287 tree ll_arg, tree lr_arg)
2288{
2289 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2290 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2291 enum comparison_code rcompcode = comparison_to_compcode (rcode);
32e8bb8e 2292 int compcode;
d1a7edaf
PB
2293
2294 switch (code)
2295 {
2296 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2297 compcode = lcompcode & rcompcode;
2298 break;
2299
2300 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2301 compcode = lcompcode | rcompcode;
2302 break;
2303
2304 default:
2305 return NULL_TREE;
2306 }
2307
2308 if (!honor_nans)
2309 {
2310 /* Eliminate unordered comparisons, as well as LTGT and ORD
2311 which are not used unless the mode has NaNs. */
2312 compcode &= ~COMPCODE_UNORD;
2313 if (compcode == COMPCODE_LTGT)
2314 compcode = COMPCODE_NE;
2315 else if (compcode == COMPCODE_ORD)
2316 compcode = COMPCODE_TRUE;
2317 }
2318 else if (flag_trapping_math)
2319 {
d1822754 2320 /* Check that the original operation and the optimized ones will trap
d1a7edaf
PB
2321 under the same condition. */
2322 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2323 && (lcompcode != COMPCODE_EQ)
2324 && (lcompcode != COMPCODE_ORD);
2325 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2326 && (rcompcode != COMPCODE_EQ)
2327 && (rcompcode != COMPCODE_ORD);
2328 bool trap = (compcode & COMPCODE_UNORD) == 0
2329 && (compcode != COMPCODE_EQ)
2330 && (compcode != COMPCODE_ORD);
2331
2332 /* In a short-circuited boolean expression the LHS might be
2333 such that the RHS, if evaluated, will never trap. For
2334 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2335 if neither x nor y is NaN. (This is a mixed blessing: for
2336 example, the expression above will never trap, hence
2337 optimizing it to x < y would be invalid). */
2338 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2339 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2340 rtrap = false;
2341
2342 /* If the comparison was short-circuited, and only the RHS
2343 trapped, we may now generate a spurious trap. */
2344 if (rtrap && !ltrap
2345 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2346 return NULL_TREE;
2347
2348 /* If we changed the conditions that cause a trap, we lose. */
2349 if ((ltrap || rtrap) != trap)
2350 return NULL_TREE;
2351 }
2352
2353 if (compcode == COMPCODE_TRUE)
1b0f3e79 2354 return constant_boolean_node (true, truth_type);
d1a7edaf 2355 else if (compcode == COMPCODE_FALSE)
1b0f3e79 2356 return constant_boolean_node (false, truth_type);
d1a7edaf 2357 else
32e8bb8e
ILT
2358 {
2359 enum tree_code tcode;
2360
2361 tcode = compcode_to_comparison ((enum comparison_code) compcode);
db3927fb 2362 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
32e8bb8e 2363 }
d1a7edaf 2364}
c05a9b68 2365\f
fae111c1
RS
2366/* Return nonzero if two operands (typically of the same tree node)
2367 are necessarily equal. If either argument has side-effects this
1ea7e6ad 2368 function returns zero. FLAGS modifies behavior as follows:
fae111c1 2369
6de9cd9a 2370 If OEP_ONLY_CONST is set, only return nonzero for constants.
6a1746af
RS
2371 This function tests whether the operands are indistinguishable;
2372 it does not test whether they are equal using C's == operation.
2373 The distinction is important for IEEE floating point, because
2374 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
fae111c1
RS
2375 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2376
6de9cd9a 2377 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
fae111c1
RS
2378 even though it may hold multiple values during a function.
2379 This is because a GCC tree node guarantees that nothing else is
2380 executed between the evaluation of its "operands" (which may often
2381 be evaluated in arbitrary order). Hence if the operands themselves
2382 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3dd8069d
PB
2383 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2384 unset means assuming isochronic (or instantaneous) tree equivalence.
2385 Unless comparing arbitrary expression trees, such as from different
2386 statements, this flag can usually be left unset.
6de9cd9a
DN
2387
2388 If OEP_PURE_SAME is set, then pure functions with identical arguments
2389 are considered the same. It is used when the caller has other ways
2390 to ensure that global memory is unchanged in between. */
6d716ca8
RS
2391
2392int
fa233e34 2393operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
6d716ca8 2394{
8df83eae 2395 /* If either is ERROR_MARK, they aren't equal. */
2aac1924
JM
2396 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2397 || TREE_TYPE (arg0) == error_mark_node
2398 || TREE_TYPE (arg1) == error_mark_node)
8df83eae
RK
2399 return 0;
2400
56c47f22
RG
2401 /* Similar, if either does not have a type (like a released SSA name),
2402 they aren't equal. */
2403 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2404 return 0;
2405
ba2e1892
RG
2406 /* Check equality of integer constants before bailing out due to
2407 precision differences. */
2408 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2409 return tree_int_cst_equal (arg0, arg1);
2410
6d716ca8
RS
2411 /* If both types don't have the same signedness, then we can't consider
2412 them equal. We must check this before the STRIP_NOPS calls
b13e7b6c
RG
2413 because they may change the signedness of the arguments. As pointers
2414 strictly don't have a signedness, require either two pointers or
2415 two non-pointers as well. */
2416 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2417 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
6d716ca8
RS
2418 return 0;
2419
09e881c9
BE
2420 /* We cannot consider pointers to different address space equal. */
2421 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2422 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2423 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2424 return 0;
2425
096dce1b
RG
2426 /* If both types don't have the same precision, then it is not safe
2427 to strip NOPs. */
2428 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2429 return 0;
2430
6d716ca8
RS
2431 STRIP_NOPS (arg0);
2432 STRIP_NOPS (arg1);
2433
a04d8591
RG
2434 /* In case both args are comparisons but with different comparison
2435 code, try to swap the comparison operands of one arg to produce
2436 a match and compare that variant. */
2437 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2438 && COMPARISON_CLASS_P (arg0)
2439 && COMPARISON_CLASS_P (arg1))
2440 {
2441 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2442
2443 if (TREE_CODE (arg0) == swap_code)
2444 return operand_equal_p (TREE_OPERAND (arg0, 0),
2445 TREE_OPERAND (arg1, 1), flags)
2446 && operand_equal_p (TREE_OPERAND (arg0, 1),
2447 TREE_OPERAND (arg1, 0), flags);
2448 }
2449
c7cfe938
RK
2450 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2451 /* This is needed for conversions and for COMPONENT_REF.
2452 Might as well play it safe and always test this. */
e89a9554
ZW
2453 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2454 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
c7cfe938 2455 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
6d716ca8
RS
2456 return 0;
2457
c7cfe938
RK
2458 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2459 We don't care about side effects in that case because the SAVE_EXPR
2460 takes care of that for us. In all other cases, two expressions are
2461 equal if they have no side effects. If we have two identical
2462 expressions with side effects that should be treated the same due
2463 to the only side effects being identical SAVE_EXPR's, that will
2464 be detected in the recursive calls below. */
6de9cd9a 2465 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
c7cfe938
RK
2466 && (TREE_CODE (arg0) == SAVE_EXPR
2467 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
6d716ca8
RS
2468 return 1;
2469
c7cfe938
RK
2470 /* Next handle constant cases, those for which we can return 1 even
2471 if ONLY_CONST is set. */
2472 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2473 switch (TREE_CODE (arg0))
2474 {
2475 case INTEGER_CST:
85914552 2476 return tree_int_cst_equal (arg0, arg1);
c7cfe938 2477
325217ed
CF
2478 case FIXED_CST:
2479 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2480 TREE_FIXED_CST (arg1));
2481
c7cfe938 2482 case REAL_CST:
0446c9f3
ZD
2483 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2484 TREE_REAL_CST (arg1)))
2485 return 1;
2486
b8698a0f 2487
0446c9f3
ZD
2488 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2489 {
2490 /* If we do not distinguish between signed and unsigned zero,
2491 consider them equal. */
2492 if (real_zerop (arg0) && real_zerop (arg1))
2493 return 1;
2494 }
2495 return 0;
c7cfe938 2496
69ef87e2
AH
2497 case VECTOR_CST:
2498 {
2499 tree v1, v2;
2500
69ef87e2
AH
2501 v1 = TREE_VECTOR_CST_ELTS (arg0);
2502 v2 = TREE_VECTOR_CST_ELTS (arg1);
2503 while (v1 && v2)
2504 {
875427f0 2505 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
6de9cd9a 2506 flags))
69ef87e2
AH
2507 return 0;
2508 v1 = TREE_CHAIN (v1);
2509 v2 = TREE_CHAIN (v2);
2510 }
2511
40182dbf 2512 return v1 == v2;
69ef87e2
AH
2513 }
2514
c7cfe938
RK
2515 case COMPLEX_CST:
2516 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
6de9cd9a 2517 flags)
c7cfe938 2518 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
6de9cd9a 2519 flags));
c7cfe938
RK
2520
2521 case STRING_CST:
2522 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
71145810 2523 && ! memcmp (TREE_STRING_POINTER (arg0),
c7cfe938
RK
2524 TREE_STRING_POINTER (arg1),
2525 TREE_STRING_LENGTH (arg0)));
2526
2527 case ADDR_EXPR:
2528 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2529 0);
e9a25f70
JL
2530 default:
2531 break;
c7cfe938 2532 }
6d716ca8 2533
6de9cd9a 2534 if (flags & OEP_ONLY_CONST)
6d716ca8
RS
2535 return 0;
2536
38318b73 2537/* Define macros to test an operand from arg0 and arg1 for equality and a
624b15fa
RK
2538 variant that allows null and views null as being different from any
2539 non-null value. In the latter case, if either is null, the both
2540 must be; otherwise, do the normal comparison. */
2541#define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2542 TREE_OPERAND (arg1, N), flags)
2543
2544#define OP_SAME_WITH_NULL(N) \
2545 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2546 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2547
6d716ca8
RS
2548 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2549 {
6615c446 2550 case tcc_unary:
6d716ca8 2551 /* Two conversions are equal only if signedness and modes match. */
266bff3a
JJ
2552 switch (TREE_CODE (arg0))
2553 {
1043771b 2554 CASE_CONVERT:
266bff3a 2555 case FIX_TRUNC_EXPR:
266bff3a
JJ
2556 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2557 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2558 return 0;
2559 break;
2560 default:
2561 break;
2562 }
6d716ca8 2563
624b15fa
RK
2564 return OP_SAME (0);
2565
6d716ca8 2566
6615c446
JO
2567 case tcc_comparison:
2568 case tcc_binary:
624b15fa 2569 if (OP_SAME (0) && OP_SAME (1))
c7cfe938
RK
2570 return 1;
2571
2572 /* For commutative ops, allow the other order. */
3168cb99 2573 return (commutative_tree_code (TREE_CODE (arg0))
c7cfe938 2574 && operand_equal_p (TREE_OPERAND (arg0, 0),
6de9cd9a 2575 TREE_OPERAND (arg1, 1), flags)
6d716ca8 2576 && operand_equal_p (TREE_OPERAND (arg0, 1),
6de9cd9a 2577 TREE_OPERAND (arg1, 0), flags));
6d716ca8 2578
6615c446 2579 case tcc_reference:
21c43754
RS
2580 /* If either of the pointer (or reference) expressions we are
2581 dereferencing contain a side effect, these cannot be equal. */
05ca5990
GRK
2582 if (TREE_SIDE_EFFECTS (arg0)
2583 || TREE_SIDE_EFFECTS (arg1))
2584 return 0;
2585
6d716ca8
RS
2586 switch (TREE_CODE (arg0))
2587 {
2588 case INDIRECT_REF:
7ccf35ed
DN
2589 case ALIGN_INDIRECT_REF:
2590 case MISALIGNED_INDIRECT_REF:
497be978
RH
2591 case REALPART_EXPR:
2592 case IMAGPART_EXPR:
624b15fa 2593 return OP_SAME (0);
6d716ca8 2594
70f34814
RG
2595 case MEM_REF:
2596 /* Require equal access sizes. We can have incomplete types
2597 for array references of variable-sized arrays from the
2598 Fortran frontent though. */
2599 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2600 || (TYPE_SIZE (TREE_TYPE (arg0))
2601 && TYPE_SIZE (TREE_TYPE (arg1))
2602 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2603 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2604 && OP_SAME (0) && OP_SAME (1));
2605
6d716ca8 2606 case ARRAY_REF:
b4e3fabb 2607 case ARRAY_RANGE_REF:
5852948c
RG
2608 /* Operands 2 and 3 may be null.
2609 Compare the array index by value if it is constant first as we
2610 may have different types but same value here. */
624b15fa 2611 return (OP_SAME (0)
5852948c
RG
2612 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2613 TREE_OPERAND (arg1, 1))
2614 || OP_SAME (1))
624b15fa
RK
2615 && OP_SAME_WITH_NULL (2)
2616 && OP_SAME_WITH_NULL (3));
462fdcce
RK
2617
2618 case COMPONENT_REF:
78b76d08
SB
2619 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2620 may be NULL when we're called to compare MEM_EXPRs. */
2621 return OP_SAME_WITH_NULL (0)
2622 && OP_SAME (1)
2623 && OP_SAME_WITH_NULL (2);
a60749f5 2624
40b32ef8 2625 case BIT_FIELD_REF:
624b15fa
RK
2626 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2627
e9a25f70
JL
2628 default:
2629 return 0;
6d716ca8 2630 }
45f97e2e 2631
6615c446 2632 case tcc_expression:
1bfedcc8
JM
2633 switch (TREE_CODE (arg0))
2634 {
2635 case ADDR_EXPR:
2636 case TRUTH_NOT_EXPR:
624b15fa 2637 return OP_SAME (0);
1bfedcc8 2638
54d581a2
RS
2639 case TRUTH_ANDIF_EXPR:
2640 case TRUTH_ORIF_EXPR:
624b15fa 2641 return OP_SAME (0) && OP_SAME (1);
54d581a2
RS
2642
2643 case TRUTH_AND_EXPR:
2644 case TRUTH_OR_EXPR:
2645 case TRUTH_XOR_EXPR:
624b15fa
RK
2646 if (OP_SAME (0) && OP_SAME (1))
2647 return 1;
2648
2649 /* Otherwise take into account this is a commutative operation. */
54d581a2 2650 return (operand_equal_p (TREE_OPERAND (arg0, 0),
624b15fa 2651 TREE_OPERAND (arg1, 1), flags)
54d581a2 2652 && operand_equal_p (TREE_OPERAND (arg0, 1),
624b15fa 2653 TREE_OPERAND (arg1, 0), flags));
54d581a2 2654
05f41289
KG
2655 case COND_EXPR:
2656 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
b8698a0f 2657
5039610b
SL
2658 default:
2659 return 0;
2660 }
2661
2662 case tcc_vl_exp:
2663 switch (TREE_CODE (arg0))
2664 {
21c43754
RS
2665 case CALL_EXPR:
2666 /* If the CALL_EXPRs call different functions, then they
2667 clearly can not be equal. */
5039610b
SL
2668 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2669 flags))
21c43754
RS
2670 return 0;
2671
6de9cd9a
DN
2672 {
2673 unsigned int cef = call_expr_flags (arg0);
2674 if (flags & OEP_PURE_SAME)
2675 cef &= ECF_CONST | ECF_PURE;
2676 else
2677 cef &= ECF_CONST;
2678 if (!cef)
2679 return 0;
2680 }
21c43754 2681
5039610b
SL
2682 /* Now see if all the arguments are the same. */
2683 {
fa233e34
KG
2684 const_call_expr_arg_iterator iter0, iter1;
2685 const_tree a0, a1;
2686 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2687 a1 = first_const_call_expr_arg (arg1, &iter1);
5039610b 2688 a0 && a1;
fa233e34
KG
2689 a0 = next_const_call_expr_arg (&iter0),
2690 a1 = next_const_call_expr_arg (&iter1))
5039610b 2691 if (! operand_equal_p (a0, a1, flags))
21c43754
RS
2692 return 0;
2693
5039610b
SL
2694 /* If we get here and both argument lists are exhausted
2695 then the CALL_EXPRs are equal. */
2696 return ! (a0 || a1);
2697 }
1bfedcc8
JM
2698 default:
2699 return 0;
2700 }
b6cc0a72 2701
6615c446 2702 case tcc_declaration:
6de9cd9a
DN
2703 /* Consider __builtin_sqrt equal to sqrt. */
2704 return (TREE_CODE (arg0) == FUNCTION_DECL
2705 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2706 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2707 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
21c43754 2708
e9a25f70
JL
2709 default:
2710 return 0;
6d716ca8 2711 }
624b15fa
RK
2712
2713#undef OP_SAME
2714#undef OP_SAME_WITH_NULL
6d716ca8 2715}
c05a9b68
RS
2716\f
2717/* Similar to operand_equal_p, but see if ARG0 might have been made by
b6cc0a72 2718 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
6d716ca8 2719
6d716ca8
RS
2720 When in doubt, return 0. */
2721
b6cc0a72 2722static int
fa8db1f7 2723operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
6d716ca8 2724{
c05a9b68 2725 int unsignedp1, unsignedpo;
52de9b6c 2726 tree primarg0, primarg1, primother;
770ae6cc 2727 unsigned int correct_width;
6d716ca8 2728
c05a9b68 2729 if (operand_equal_p (arg0, arg1, 0))
6d716ca8
RS
2730 return 1;
2731
0982a4b8
JM
2732 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2733 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
6d716ca8
RS
2734 return 0;
2735
52de9b6c
RK
2736 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2737 and see if the inner values are the same. This removes any
2738 signedness comparison, which doesn't matter here. */
2739 primarg0 = arg0, primarg1 = arg1;
b6cc0a72
KH
2740 STRIP_NOPS (primarg0);
2741 STRIP_NOPS (primarg1);
52de9b6c
RK
2742 if (operand_equal_p (primarg0, primarg1, 0))
2743 return 1;
2744
c05a9b68
RS
2745 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2746 actual comparison operand, ARG0.
6d716ca8 2747
c05a9b68 2748 First throw away any conversions to wider types
6d716ca8 2749 already present in the operands. */
6d716ca8 2750
c05a9b68
RS
2751 primarg1 = get_narrower (arg1, &unsignedp1);
2752 primother = get_narrower (other, &unsignedpo);
2753
2754 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2755 if (unsignedp1 == unsignedpo
2756 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2757 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
6d716ca8 2758 {
c05a9b68 2759 tree type = TREE_TYPE (arg0);
6d716ca8
RS
2760
2761 /* Make sure shorter operand is extended the right way
2762 to match the longer operand. */
12753674 2763 primarg1 = fold_convert (signed_or_unsigned_type_for
088414c1 2764 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
6d716ca8 2765
088414c1 2766 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
6d716ca8
RS
2767 return 1;
2768 }
2769
2770 return 0;
2771}
2772\f
f72aed24 2773/* See if ARG is an expression that is either a comparison or is performing
c05a9b68
RS
2774 arithmetic on comparisons. The comparisons must only be comparing
2775 two different values, which will be stored in *CVAL1 and *CVAL2; if
cc2902df 2776 they are nonzero it means that some operands have already been found.
c05a9b68 2777 No variables may be used anywhere else in the expression except in the
35e66bd1
RK
2778 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2779 the expression and save_expr needs to be called with CVAL1 and CVAL2.
c05a9b68
RS
2780
2781 If this is true, return 1. Otherwise, return zero. */
2782
2783static int
fa8db1f7 2784twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
c05a9b68
RS
2785{
2786 enum tree_code code = TREE_CODE (arg);
82d6e6fc 2787 enum tree_code_class tclass = TREE_CODE_CLASS (code);
c05a9b68 2788
6615c446 2789 /* We can handle some of the tcc_expression cases here. */
82d6e6fc
KG
2790 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2791 tclass = tcc_unary;
2792 else if (tclass == tcc_expression
c05a9b68
RS
2793 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2794 || code == COMPOUND_EXPR))
82d6e6fc 2795 tclass = tcc_binary;
2315a5db 2796
82d6e6fc 2797 else if (tclass == tcc_expression && code == SAVE_EXPR
d4b60170 2798 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
35e66bd1
RK
2799 {
2800 /* If we've already found a CVAL1 or CVAL2, this expression is
2801 two complex to handle. */
2802 if (*cval1 || *cval2)
2803 return 0;
2804
82d6e6fc 2805 tclass = tcc_unary;
35e66bd1
RK
2806 *save_p = 1;
2807 }
c05a9b68 2808
82d6e6fc 2809 switch (tclass)
c05a9b68 2810 {
6615c446 2811 case tcc_unary:
35e66bd1 2812 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
c05a9b68 2813
6615c446 2814 case tcc_binary:
35e66bd1
RK
2815 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2816 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2817 cval1, cval2, save_p));
c05a9b68 2818
6615c446 2819 case tcc_constant:
c05a9b68
RS
2820 return 1;
2821
6615c446 2822 case tcc_expression:
c05a9b68 2823 if (code == COND_EXPR)
35e66bd1
RK
2824 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2825 cval1, cval2, save_p)
2826 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2827 cval1, cval2, save_p)
c05a9b68 2828 && twoval_comparison_p (TREE_OPERAND (arg, 2),
35e66bd1 2829 cval1, cval2, save_p));
c05a9b68 2830 return 0;
b6cc0a72 2831
6615c446 2832 case tcc_comparison:
c05a9b68
RS
2833 /* First see if we can handle the first operand, then the second. For
2834 the second operand, we know *CVAL1 can't be zero. It must be that
2835 one side of the comparison is each of the values; test for the
2836 case where this isn't true by failing if the two operands
2837 are the same. */
2838
2839 if (operand_equal_p (TREE_OPERAND (arg, 0),
2840 TREE_OPERAND (arg, 1), 0))
2841 return 0;
2842
2843 if (*cval1 == 0)
2844 *cval1 = TREE_OPERAND (arg, 0);
2845 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2846 ;
2847 else if (*cval2 == 0)
2848 *cval2 = TREE_OPERAND (arg, 0);
2849 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2850 ;
2851 else
2852 return 0;
2853
2854 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2855 ;
2856 else if (*cval2 == 0)
2857 *cval2 = TREE_OPERAND (arg, 1);
2858 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2859 ;
2860 else
2861 return 0;
2862
2863 return 1;
c05a9b68 2864
e9a25f70
JL
2865 default:
2866 return 0;
2867 }
c05a9b68
RS
2868}
2869\f
2870/* ARG is a tree that is known to contain just arithmetic operations and
2871 comparisons. Evaluate the operations in the tree substituting NEW0 for
f72aed24 2872 any occurrence of OLD0 as an operand of a comparison and likewise for
c05a9b68
RS
2873 NEW1 and OLD1. */
2874
2875static tree
db3927fb
AH
2876eval_subst (location_t loc, tree arg, tree old0, tree new0,
2877 tree old1, tree new1)
c05a9b68
RS
2878{
2879 tree type = TREE_TYPE (arg);
2880 enum tree_code code = TREE_CODE (arg);
82d6e6fc 2881 enum tree_code_class tclass = TREE_CODE_CLASS (code);
c05a9b68 2882
6615c446 2883 /* We can handle some of the tcc_expression cases here. */
82d6e6fc
KG
2884 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2885 tclass = tcc_unary;
2886 else if (tclass == tcc_expression
c05a9b68 2887 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
82d6e6fc 2888 tclass = tcc_binary;
c05a9b68 2889
82d6e6fc 2890 switch (tclass)
c05a9b68 2891 {
6615c446 2892 case tcc_unary:
db3927fb
AH
2893 return fold_build1_loc (loc, code, type,
2894 eval_subst (loc, TREE_OPERAND (arg, 0),
7f20a5b7 2895 old0, new0, old1, new1));
c05a9b68 2896
6615c446 2897 case tcc_binary:
db3927fb
AH
2898 return fold_build2_loc (loc, code, type,
2899 eval_subst (loc, TREE_OPERAND (arg, 0),
7f20a5b7 2900 old0, new0, old1, new1),
db3927fb 2901 eval_subst (loc, TREE_OPERAND (arg, 1),
7f20a5b7 2902 old0, new0, old1, new1));
c05a9b68 2903
6615c446 2904 case tcc_expression:
c05a9b68
RS
2905 switch (code)
2906 {
2907 case SAVE_EXPR:
db3927fb
AH
2908 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2909 old1, new1);
c05a9b68
RS
2910
2911 case COMPOUND_EXPR:
db3927fb
AH
2912 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2913 old1, new1);
c05a9b68
RS
2914
2915 case COND_EXPR:
db3927fb
AH
2916 return fold_build3_loc (loc, code, type,
2917 eval_subst (loc, TREE_OPERAND (arg, 0),
7f20a5b7 2918 old0, new0, old1, new1),
db3927fb 2919 eval_subst (loc, TREE_OPERAND (arg, 1),
7f20a5b7 2920 old0, new0, old1, new1),
db3927fb 2921 eval_subst (loc, TREE_OPERAND (arg, 2),
7f20a5b7 2922 old0, new0, old1, new1));
e9a25f70
JL
2923 default:
2924 break;
c05a9b68 2925 }
938d968e 2926 /* Fall through - ??? */
c05a9b68 2927
6615c446 2928 case tcc_comparison:
c05a9b68
RS
2929 {
2930 tree arg0 = TREE_OPERAND (arg, 0);
2931 tree arg1 = TREE_OPERAND (arg, 1);
2932
2933 /* We need to check both for exact equality and tree equality. The
2934 former will be true if the operand has a side-effect. In that
2935 case, we know the operand occurred exactly once. */
2936
2937 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2938 arg0 = new0;
2939 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2940 arg0 = new1;
2941
2942 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2943 arg1 = new0;
2944 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2945 arg1 = new1;
2946
db3927fb 2947 return fold_build2_loc (loc, code, type, arg0, arg1);
c05a9b68 2948 }
c05a9b68 2949
e9a25f70
JL
2950 default:
2951 return arg;
2952 }
c05a9b68
RS
2953}
2954\f
6d716ca8
RS
2955/* Return a tree for the case when the result of an expression is RESULT
2956 converted to TYPE and OMITTED was previously an operand of the expression
2957 but is now not needed (e.g., we folded OMITTED * 0).
2958
2959 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2960 the conversion of RESULT to TYPE. */
2961
c0a47a61 2962tree
db3927fb 2963omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
6d716ca8 2964{
db3927fb 2965 tree t = fold_convert_loc (loc, type, result);
6d716ca8 2966
15dc95cb 2967 /* If the resulting operand is an empty statement, just return the omitted
e057e0cd
AP
2968 statement casted to void. */
2969 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
db3927fb
AH
2970 {
2971 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
2972 goto omit_one_operand_exit;
2973 }
e057e0cd 2974
6d716ca8 2975 if (TREE_SIDE_EFFECTS (omitted))
db3927fb
AH
2976 {
2977 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2978 goto omit_one_operand_exit;
2979 }
2980
2981 return non_lvalue_loc (loc, t);
6d716ca8 2982
db3927fb
AH
2983 omit_one_operand_exit:
2984 protected_set_expr_location (t, loc);
2985 return t;
6d716ca8 2986}
4ab3cb65
RK
2987
2988/* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2989
2990static tree
db3927fb
AH
2991pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2992 tree omitted)
4ab3cb65 2993{
db3927fb 2994 tree t = fold_convert_loc (loc, type, result);
4ab3cb65 2995
15dc95cb 2996 /* If the resulting operand is an empty statement, just return the omitted
e057e0cd
AP
2997 statement casted to void. */
2998 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
db3927fb
AH
2999 {
3000 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3001 goto pedantic_omit_one_operand_exit;
3002 }
e057e0cd 3003
4ab3cb65 3004 if (TREE_SIDE_EFFECTS (omitted))
db3927fb
AH
3005 {
3006 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3007 goto pedantic_omit_one_operand_exit;
3008 }
4ab3cb65 3009
db3927fb
AH
3010 return pedantic_non_lvalue_loc (loc, t);
3011
3012 pedantic_omit_one_operand_exit:
3013 protected_set_expr_location (t, loc);
3014 return t;
4ab3cb65 3015}
08039bd8
RS
3016
3017/* Return a tree for the case when the result of an expression is RESULT
3018 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3019 of the expression but are now not needed.
3020
3021 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3022 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3023 evaluated before OMITTED2. Otherwise, if neither has side effects,
3024 just do the conversion of RESULT to TYPE. */
3025
3026tree
db3927fb
AH
3027omit_two_operands_loc (location_t loc, tree type, tree result,
3028 tree omitted1, tree omitted2)
08039bd8 3029{
db3927fb 3030 tree t = fold_convert_loc (loc, type, result);
08039bd8
RS
3031
3032 if (TREE_SIDE_EFFECTS (omitted2))
db3927fb
AH
3033 {
3034 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3035 SET_EXPR_LOCATION (t, loc);
3036 }
08039bd8 3037 if (TREE_SIDE_EFFECTS (omitted1))
db3927fb
AH
3038 {
3039 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3040 SET_EXPR_LOCATION (t, loc);
3041 }
08039bd8 3042
db3927fb 3043 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
08039bd8
RS
3044}
3045
6d716ca8 3046\f
3f783329
RS
3047/* Return a simplified tree node for the truth-negation of ARG. This
3048 never alters ARG itself. We assume that ARG is an operation that
d1a7edaf 3049 returns a truth value (0 or 1).
6d716ca8 3050
d1a7edaf
PB
3051 FIXME: one would think we would fold the result, but it causes
3052 problems with the dominator optimizer. */
d817ed3b 3053
6d716ca8 3054tree
db3927fb 3055fold_truth_not_expr (location_t loc, tree arg)
6d716ca8 3056{
ca80e52b 3057 tree t, type = TREE_TYPE (arg);
c05a9b68 3058 enum tree_code code = TREE_CODE (arg);
db3927fb 3059 location_t loc1, loc2;
6d716ca8 3060
c05a9b68
RS
3061 /* If this is a comparison, we can simply invert it, except for
3062 floating-point non-equality comparisons, in which case we just
3063 enclose a TRUTH_NOT_EXPR around what we have. */
6d716ca8 3064
6615c446 3065 if (TREE_CODE_CLASS (code) == tcc_comparison)
6d716ca8 3066 {
d1a7edaf
PB
3067 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3068 if (FLOAT_TYPE_P (op_type)
3069 && flag_trapping_math
3070 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3071 && code != NE_EXPR && code != EQ_EXPR)
d817ed3b 3072 return NULL_TREE;
ca80e52b
EB
3073
3074 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3075 if (code == ERROR_MARK)
3076 return NULL_TREE;
3077
3078 t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
db3927fb 3079 SET_EXPR_LOCATION (t, loc);
ca80e52b 3080 return t;
c05a9b68 3081 }
6d716ca8 3082
c05a9b68
RS
3083 switch (code)
3084 {
6d716ca8 3085 case INTEGER_CST:
9ace7f9e 3086 return constant_boolean_node (integer_zerop (arg), type);
6d716ca8
RS
3087
3088 case TRUTH_AND_EXPR:
db3927fb
AH
3089 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3090 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3091 if (loc1 == UNKNOWN_LOCATION)
3092 loc1 = loc;
3093 if (loc2 == UNKNOWN_LOCATION)
3094 loc2 = loc;
ca80e52b 3095 t = build2 (TRUTH_OR_EXPR, type,
db3927fb
AH
3096 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3097 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
ca80e52b 3098 break;
6d716ca8
RS
3099
3100 case TRUTH_OR_EXPR:
db3927fb
AH
3101 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3102 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3103 if (loc1 == UNKNOWN_LOCATION)
3104 loc1 = loc;
3105 if (loc2 == UNKNOWN_LOCATION)
3106 loc2 = loc;
ca80e52b 3107 t = build2 (TRUTH_AND_EXPR, type,
db3927fb
AH
3108 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3109 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
ca80e52b 3110 break;
6d716ca8 3111
772447c5
RK
3112 case TRUTH_XOR_EXPR:
3113 /* Here we can invert either operand. We invert the first operand
3114 unless the second operand is a TRUTH_NOT_EXPR in which case our
3115 result is the XOR of the first operand with the inside of the
3116 negation of the second operand. */
3117
3118 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
ca80e52b
EB
3119 t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3120 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
772447c5 3121 else
ca80e52b 3122 t = build2 (TRUTH_XOR_EXPR, type,
db3927fb 3123 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
ca80e52b
EB
3124 TREE_OPERAND (arg, 1));
3125 break;
772447c5 3126
6d716ca8 3127 case TRUTH_ANDIF_EXPR:
db3927fb
AH
3128 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3129 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3130 if (loc1 == UNKNOWN_LOCATION)
3131 loc1 = loc;
3132 if (loc2 == UNKNOWN_LOCATION)
3133 loc2 = loc;
ca80e52b 3134 t = build2 (TRUTH_ORIF_EXPR, type,
db3927fb
AH
3135 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3136 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
ca80e52b 3137 break;
6d716ca8
RS
3138
3139 case TRUTH_ORIF_EXPR:
db3927fb
AH
3140 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3141 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3142 if (loc1 == UNKNOWN_LOCATION)
3143 loc1 = loc;
3144 if (loc2 == UNKNOWN_LOCATION)
3145 loc2 = loc;
ca80e52b 3146 t = build2 (TRUTH_ANDIF_EXPR, type,
db3927fb
AH
3147 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3148 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
ca80e52b 3149 break;
6d716ca8
RS
3150
3151 case TRUTH_NOT_EXPR:
3152 return TREE_OPERAND (arg, 0);
3153
3154 case COND_EXPR:
9ca4afb9
RG
3155 {
3156 tree arg1 = TREE_OPERAND (arg, 1);
3157 tree arg2 = TREE_OPERAND (arg, 2);
db3927fb
AH
3158
3159 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3160 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3161 if (loc1 == UNKNOWN_LOCATION)
3162 loc1 = loc;
3163 if (loc2 == UNKNOWN_LOCATION)
3164 loc2 = loc;
3165
9ca4afb9
RG
3166 /* A COND_EXPR may have a throw as one operand, which
3167 then has void type. Just leave void operands
3168 as they are. */
ca80e52b
EB
3169 t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3170 VOID_TYPE_P (TREE_TYPE (arg1))
db3927fb 3171 ? arg1 : invert_truthvalue_loc (loc1, arg1),
ca80e52b 3172 VOID_TYPE_P (TREE_TYPE (arg2))
db3927fb 3173 ? arg2 : invert_truthvalue_loc (loc2, arg2));
ca80e52b 3174 break;
9ca4afb9 3175 }
6d716ca8 3176
ef9fe0da 3177 case COMPOUND_EXPR:
db3927fb
AH
3178 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3179 if (loc1 == UNKNOWN_LOCATION)
3180 loc1 = loc;
3181 t = build2 (COMPOUND_EXPR, type,
3182 TREE_OPERAND (arg, 0),
3183 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
ca80e52b 3184 break;
ef9fe0da 3185
6d716ca8 3186 case NON_LVALUE_EXPR:
db3927fb
AH
3187 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3188 if (loc1 == UNKNOWN_LOCATION)
3189 loc1 = loc;
3190 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
6d716ca8 3191
84fb43a1 3192 CASE_CONVERT:
6de9cd9a 3193 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
ca80e52b
EB
3194 {
3195 t = build1 (TRUTH_NOT_EXPR, type, arg);
3196 break;
3197 }
3198
3199 /* ... fall through ... */
6de9cd9a 3200
6d716ca8 3201 case FLOAT_EXPR:
db3927fb
AH
3202 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3203 if (loc1 == UNKNOWN_LOCATION)
3204 loc1 = loc;
ca80e52b 3205 t = build1 (TREE_CODE (arg), type,
db3927fb 3206 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
ca80e52b 3207 break;
6d716ca8
RS
3208
3209 case BIT_AND_EXPR:
efc1a4d9 3210 if (!integer_onep (TREE_OPERAND (arg, 1)))
ca80e52b
EB
3211 return NULL_TREE;
3212 t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3213 break;
6d716ca8 3214
dfa90b42 3215 case SAVE_EXPR:
ca80e52b
EB
3216 t = build1 (TRUTH_NOT_EXPR, type, arg);
3217 break;
a25ee332
RK
3218
3219 case CLEANUP_POINT_EXPR:
db3927fb
AH
3220 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3221 if (loc1 == UNKNOWN_LOCATION)
3222 loc1 = loc;
ca80e52b 3223 t = build1 (CLEANUP_POINT_EXPR, type,
db3927fb 3224 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
ca80e52b 3225 break;
e9a25f70
JL
3226
3227 default:
ca80e52b 3228 t = NULL_TREE;
e9a25f70 3229 break;
efc1a4d9 3230 }
d817ed3b 3231
db3927fb
AH
3232 if (t)
3233 SET_EXPR_LOCATION (t, loc);
ca80e52b
EB
3234
3235 return t;
d817ed3b
RG
3236}
3237
3238/* Return a simplified tree node for the truth-negation of ARG. This
3239 never alters ARG itself. We assume that ARG is an operation that
3240 returns a truth value (0 or 1).
3241
3242 FIXME: one would think we would fold the result, but it causes
3243 problems with the dominator optimizer. */
3244
3245tree
db3927fb 3246invert_truthvalue_loc (location_t loc, tree arg)
d817ed3b
RG
3247{
3248 tree tem;
3249
3250 if (TREE_CODE (arg) == ERROR_MARK)
3251 return arg;
3252
db3927fb 3253 tem = fold_truth_not_expr (loc, arg);
d817ed3b 3254 if (!tem)
db3927fb
AH
3255 {
3256 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3257 SET_EXPR_LOCATION (tem, loc);
3258 }
d817ed3b
RG
3259
3260 return tem;
6d716ca8
RS
3261}
3262
3263/* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3264 operands are another bit-wise operation with a common input. If so,
3265 distribute the bit operations to save an operation and possibly two if
3266 constants are involved. For example, convert
fa8db1f7 3267 (A | B) & (A | C) into A | (B & C)
6d716ca8
RS
3268 Further simplification will occur if B and C are constants.
3269
3270 If this optimization cannot be done, 0 will be returned. */
3271
3272static tree
db3927fb
AH
3273distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3274 tree arg0, tree arg1)
6d716ca8
RS
3275{
3276 tree common;
3277 tree left, right;
3278
3279 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3280 || TREE_CODE (arg0) == code
fced8ba3
RS
3281 || (TREE_CODE (arg0) != BIT_AND_EXPR
3282 && TREE_CODE (arg0) != BIT_IOR_EXPR))
6d716ca8
RS
3283 return 0;
3284
3285 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3286 {
3287 common = TREE_OPERAND (arg0, 0);
3288 left = TREE_OPERAND (arg0, 1);
3289 right = TREE_OPERAND (arg1, 1);
3290 }
3291 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3292 {
3293 common = TREE_OPERAND (arg0, 0);
3294 left = TREE_OPERAND (arg0, 1);
3295 right = TREE_OPERAND (arg1, 0);
3296 }
3297 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3298 {
3299 common = TREE_OPERAND (arg0, 1);
3300 left = TREE_OPERAND (arg0, 0);
3301 right = TREE_OPERAND (arg1, 1);
3302 }
3303 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3304 {
3305 common = TREE_OPERAND (arg0, 1);
3306 left = TREE_OPERAND (arg0, 0);
3307 right = TREE_OPERAND (arg1, 0);
3308 }
3309 else
3310 return 0;
3311
db3927fb
AH
3312 common = fold_convert_loc (loc, type, common);
3313 left = fold_convert_loc (loc, type, left);
3314 right = fold_convert_loc (loc, type, right);
3315 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3316 fold_build2_loc (loc, code, type, left, right));
6d716ca8 3317}
f8912a55
PB
3318
3319/* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3320 with code CODE. This optimization is unsafe. */
3321static tree
db3927fb
AH
3322distribute_real_division (location_t loc, enum tree_code code, tree type,
3323 tree arg0, tree arg1)
f8912a55
PB
3324{
3325 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3326 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3327
3328 /* (A / C) +- (B / C) -> (A +- B) / C. */
3329 if (mul0 == mul1
3330 && operand_equal_p (TREE_OPERAND (arg0, 1),
3331 TREE_OPERAND (arg1, 1), 0))
db3927fb
AH
3332 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3333 fold_build2_loc (loc, code, type,
f8912a55
PB
3334 TREE_OPERAND (arg0, 0),
3335 TREE_OPERAND (arg1, 0)),
3336 TREE_OPERAND (arg0, 1));
3337
3338 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3339 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3340 TREE_OPERAND (arg1, 0), 0)
3341 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3342 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3343 {
3344 REAL_VALUE_TYPE r0, r1;
3345 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3346 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3347 if (!mul0)
3348 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3349 if (!mul1)
3350 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3351 real_arithmetic (&r0, code, &r0, &r1);
db3927fb 3352 return fold_build2_loc (loc, MULT_EXPR, type,
f8912a55
PB
3353 TREE_OPERAND (arg0, 0),
3354 build_real (type, r0));
3355 }
3356
3357 return NULL_TREE;
3358}
6d716ca8 3359\f
45dc13b9
JJ
3360/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3361 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3362
3363static tree
db3927fb
AH
3364make_bit_field_ref (location_t loc, tree inner, tree type,
3365 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
45dc13b9
JJ
3366{
3367 tree result, bftype;
3368
3369 if (bitpos == 0)
3370 {
3371 tree size = TYPE_SIZE (TREE_TYPE (inner));
3372 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3373 || POINTER_TYPE_P (TREE_TYPE (inner)))
b8698a0f 3374 && host_integerp (size, 0)
45dc13b9 3375 && tree_low_cst (size, 0) == bitsize)
db3927fb 3376 return fold_convert_loc (loc, type, inner);
45dc13b9
JJ
3377 }
3378
3379 bftype = type;
3380 if (TYPE_PRECISION (bftype) != bitsize
3381 || TYPE_UNSIGNED (bftype) == !unsignedp)
3382 bftype = build_nonstandard_integer_type (bitsize, 0);
3383
3384 result = build3 (BIT_FIELD_REF, bftype, inner,
3385 size_int (bitsize), bitsize_int (bitpos));
db3927fb 3386 SET_EXPR_LOCATION (result, loc);
45dc13b9
JJ
3387
3388 if (bftype != type)
db3927fb 3389 result = fold_convert_loc (loc, type, result);
45dc13b9
JJ
3390
3391 return result;
3392}
3393
3394/* Optimize a bit-field compare.
3395
3396 There are two cases: First is a compare against a constant and the
3397 second is a comparison of two items where the fields are at the same
3398 bit position relative to the start of a chunk (byte, halfword, word)
3399 large enough to contain it. In these cases we can avoid the shift
3400 implicit in bitfield extractions.
3401
3402 For constants, we emit a compare of the shifted constant with the
3403 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3404 compared. For two fields at the same position, we do the ANDs with the
3405 similar mask and compare the result of the ANDs.
3406
3407 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3408 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3409 are the left and right operands of the comparison, respectively.
3410
3411 If the optimization described above can be done, we return the resulting
3412 tree. Otherwise we return zero. */
3413
3414static tree
db3927fb
AH
3415optimize_bit_field_compare (location_t loc, enum tree_code code,
3416 tree compare_type, tree lhs, tree rhs)
45dc13b9
JJ
3417{
3418 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3419 tree type = TREE_TYPE (lhs);
3420 tree signed_type, unsigned_type;
3421 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3422 enum machine_mode lmode, rmode, nmode;
3423 int lunsignedp, runsignedp;
3424 int lvolatilep = 0, rvolatilep = 0;
3425 tree linner, rinner = NULL_TREE;
3426 tree mask;
3427 tree offset;
3428
3429 /* Get all the information about the extractions being done. If the bit size
3430 if the same as the size of the underlying object, we aren't doing an
3431 extraction at all and so can do nothing. We also don't want to
3432 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3433 then will no longer be able to replace it. */
3434 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3435 &lunsignedp, &lvolatilep, false);
3436 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3437 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3438 return 0;
3439
3440 if (!const_p)
3441 {
3442 /* If this is not a constant, we can only do something if bit positions,
3443 sizes, and signedness are the same. */
3444 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3445 &runsignedp, &rvolatilep, false);
3446
3447 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3448 || lunsignedp != runsignedp || offset != 0
3449 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3450 return 0;
3451 }
3452
3453 /* See if we can find a mode to refer to this field. We should be able to,
3454 but fail if we can't. */
6a78b724
DD
3455 if (lvolatilep
3456 && GET_MODE_BITSIZE (lmode) > 0
3457 && flag_strict_volatile_bitfields > 0)
3458 nmode = lmode;
3459 else
3460 nmode = get_best_mode (lbitsize, lbitpos,
3461 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3462 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3463 TYPE_ALIGN (TREE_TYPE (rinner))),
3464 word_mode, lvolatilep || rvolatilep);
45dc13b9
JJ
3465 if (nmode == VOIDmode)
3466 return 0;
3467
3468 /* Set signed and unsigned types of the precision of this mode for the
3469 shifts below. */
3470 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3471 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3472
3473 /* Compute the bit position and size for the new reference and our offset
3474 within it. If the new reference is the same size as the original, we
3475 won't optimize anything, so return zero. */
3476 nbitsize = GET_MODE_BITSIZE (nmode);
3477 nbitpos = lbitpos & ~ (nbitsize - 1);
3478 lbitpos -= nbitpos;
3479 if (nbitsize == lbitsize)
3480 return 0;
3481
3482 if (BYTES_BIG_ENDIAN)
3483 lbitpos = nbitsize - lbitsize - lbitpos;
3484
3485 /* Make the mask to be used against the extracted field. */
3486 mask = build_int_cst_type (unsigned_type, -1);
43a5d30b 3487 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
45dc13b9 3488 mask = const_binop (RSHIFT_EXPR, mask,
43a5d30b 3489 size_int (nbitsize - lbitsize - lbitpos));
45dc13b9
JJ
3490
3491 if (! const_p)
3492 /* If not comparing with constant, just rework the comparison
3493 and return. */
db3927fb
AH
3494 return fold_build2_loc (loc, code, compare_type,
3495 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3496 make_bit_field_ref (loc, linner,
45dc13b9
JJ
3497 unsigned_type,
3498 nbitsize, nbitpos,
3499 1),
3500 mask),
db3927fb
AH
3501 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3502 make_bit_field_ref (loc, rinner,
45dc13b9
JJ
3503 unsigned_type,
3504 nbitsize, nbitpos,
3505 1),
3506 mask));
3507
3508 /* Otherwise, we are handling the constant case. See if the constant is too
3509 big for the field. Warn and return a tree of for 0 (false) if so. We do
3510 this not only for its own sake, but to avoid having to test for this
3511 error case below. If we didn't, we might generate wrong code.
3512
3513 For unsigned fields, the constant shifted right by the field length should
3514 be all zero. For signed fields, the high-order bits should agree with
3515 the sign bit. */
3516
3517 if (lunsignedp)
3518 {
3519 if (! integer_zerop (const_binop (RSHIFT_EXPR,
db3927fb
AH
3520 fold_convert_loc (loc,
3521 unsigned_type, rhs),
43a5d30b 3522 size_int (lbitsize))))
45dc13b9
JJ
3523 {
3524 warning (0, "comparison is always %d due to width of bit-field",
3525 code == NE_EXPR);
3526 return constant_boolean_node (code == NE_EXPR, compare_type);
3527 }
3528 }
3529 else
3530 {
db3927fb
AH
3531 tree tem = const_binop (RSHIFT_EXPR,
3532 fold_convert_loc (loc, signed_type, rhs),
43a5d30b 3533 size_int (lbitsize - 1));
45dc13b9
JJ
3534 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3535 {
3536 warning (0, "comparison is always %d due to width of bit-field",
3537 code == NE_EXPR);
3538 return constant_boolean_node (code == NE_EXPR, compare_type);
3539 }
3540 }
3541
3542 /* Single-bit compares should always be against zero. */
3543 if (lbitsize == 1 && ! integer_zerop (rhs))
3544 {
3545 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3546 rhs = build_int_cst (type, 0);
3547 }
3548
3549 /* Make a new bitfield reference, shift the constant over the
3550 appropriate number of bits and mask it with the computed mask
3551 (in case this was a signed field). If we changed it, make a new one. */
db3927fb 3552 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
45dc13b9
JJ
3553 if (lvolatilep)
3554 {
3555 TREE_SIDE_EFFECTS (lhs) = 1;
3556 TREE_THIS_VOLATILE (lhs) = 1;
3557 }
3558
3559 rhs = const_binop (BIT_AND_EXPR,
3560 const_binop (LSHIFT_EXPR,
db3927fb 3561 fold_convert_loc (loc, unsigned_type, rhs),
43a5d30b
AS
3562 size_int (lbitpos)),
3563 mask);
45dc13b9 3564
db3927fb
AH
3565 lhs = build2 (code, compare_type,
3566 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3567 rhs);
3568 SET_EXPR_LOCATION (lhs, loc);
3569 return lhs;
45dc13b9
JJ
3570}
3571\f
b2215d83 3572/* Subroutine for fold_truthop: decode a field reference.
6d716ca8
RS
3573
3574 If EXP is a comparison reference, we return the innermost reference.
3575
3576 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3577 set to the starting bit number.
3578
3579 If the innermost field can be completely contained in a mode-sized
3580 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3581
3582 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3583 otherwise it is not changed.
3584
3585 *PUNSIGNEDP is set to the signedness of the field.
3586
3587 *PMASK is set to the mask used. This is either contained in a
3588 BIT_AND_EXPR or derived from the width of the field.
3589
38e01259 3590 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
d4453ee5 3591
6d716ca8
RS
3592 Return 0 if this is not a component reference or is one that we can't
3593 do anything with. */
3594
3595static tree
db3927fb 3596decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
75040a04
AJ
3597 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3598 int *punsignedp, int *pvolatilep,
fa8db1f7 3599 tree *pmask, tree *pand_mask)
6d716ca8 3600{
1a8c4ca6 3601 tree outer_type = 0;
6d9f1f5f
RK
3602 tree and_mask = 0;
3603 tree mask, inner, offset;
3604 tree unsigned_type;
770ae6cc 3605 unsigned int precision;
6d716ca8 3606
b6cc0a72 3607 /* All the optimizations using this function assume integer fields.
772ae9f0
RK
3608 There are problems with FP fields since the type_for_size call
3609 below can fail for, e.g., XFmode. */
3610 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3611 return 0;
3612
1a8c4ca6
EB
3613 /* We are interested in the bare arrangement of bits, so strip everything
3614 that doesn't affect the machine mode. However, record the type of the
3615 outermost expression if it may matter below. */
1043771b 3616 if (CONVERT_EXPR_P (exp)
1a8c4ca6
EB
3617 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3618 outer_type = TREE_TYPE (exp);
df7fb8f9 3619 STRIP_NOPS (exp);
6d716ca8
RS
3620
3621 if (TREE_CODE (exp) == BIT_AND_EXPR)
3622 {
6d9f1f5f 3623 and_mask = TREE_OPERAND (exp, 1);
6d716ca8 3624 exp = TREE_OPERAND (exp, 0);
6d9f1f5f
RK
3625 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3626 if (TREE_CODE (and_mask) != INTEGER_CST)
6d716ca8
RS
3627 return 0;
3628 }
3629
f1e60ec6 3630 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2614034e 3631 punsignedp, pvolatilep, false);
02103577 3632 if ((inner == exp && and_mask == 0)
14a774a9
RK
3633 || *pbitsize < 0 || offset != 0
3634 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
c05a9b68 3635 return 0;
b6cc0a72 3636
1a8c4ca6
EB
3637 /* If the number of bits in the reference is the same as the bitsize of
3638 the outer type, then the outer type gives the signedness. Otherwise
3639 (in case of a small bitfield) the signedness is unchanged. */
fae1b38d 3640 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
8df83eae 3641 *punsignedp = TYPE_UNSIGNED (outer_type);
1a8c4ca6 3642
6d9f1f5f 3643 /* Compute the mask to access the bitfield. */
5785c7de 3644 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
6d9f1f5f
RK
3645 precision = TYPE_PRECISION (unsigned_type);
3646
2ac7cbb5 3647 mask = build_int_cst_type (unsigned_type, -1);
3e6688a7 3648
43a5d30b
AS
3649 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3650 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
6d9f1f5f
RK
3651
3652 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3653 if (and_mask != 0)
db3927fb
AH
3654 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3655 fold_convert_loc (loc, unsigned_type, and_mask), mask);
6d716ca8
RS
3656
3657 *pmask = mask;
d4453ee5 3658 *pand_mask = and_mask;
6d716ca8
RS
3659 return inner;
3660}
3661
45dc13b9
JJ
3662/* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3663 bit positions. */
3664
3665static int
3666all_ones_mask_p (const_tree mask, int size)
3667{
3668 tree type = TREE_TYPE (mask);
3669 unsigned int precision = TYPE_PRECISION (type);
3670 tree tmask;
3671
3672 tmask = build_int_cst_type (signed_type_for (type), -1);
3673
3674 return
3675 tree_int_cst_equal (mask,
3676 const_binop (RSHIFT_EXPR,
3677 const_binop (LSHIFT_EXPR, tmask,
43a5d30b
AS
3678 size_int (precision - size)),
3679 size_int (precision - size)));
45dc13b9
JJ
3680}
3681
1f77b5da
RS
3682/* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3683 represents the sign bit of EXP's type. If EXP represents a sign
3684 or zero extension, also test VAL against the unextended type.
3685 The return value is the (sub)expression whose sign bit is VAL,
3686 or NULL_TREE otherwise. */
3687
3688static tree
ac545c64 3689sign_bit_p (tree exp, const_tree val)
1f77b5da 3690{
c87d821b
KH
3691 unsigned HOST_WIDE_INT mask_lo, lo;
3692 HOST_WIDE_INT mask_hi, hi;
1f77b5da
RS
3693 int width;
3694 tree t;
3695
68e82b83 3696 /* Tree EXP must have an integral type. */
1f77b5da
RS
3697 t = TREE_TYPE (exp);
3698 if (! INTEGRAL_TYPE_P (t))
3699 return NULL_TREE;
3700
3701 /* Tree VAL must be an integer constant. */
3702 if (TREE_CODE (val) != INTEGER_CST
455f14dd 3703 || TREE_OVERFLOW (val))
1f77b5da
RS
3704 return NULL_TREE;
3705
3706 width = TYPE_PRECISION (t);
3707 if (width > HOST_BITS_PER_WIDE_INT)
3708 {
3709 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3710 lo = 0;
c87d821b
KH
3711
3712 mask_hi = ((unsigned HOST_WIDE_INT) -1
3713 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3714 mask_lo = -1;
1f77b5da
RS
3715 }
3716 else
3717 {
3718 hi = 0;
3719 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
c87d821b
KH
3720
3721 mask_hi = 0;
3722 mask_lo = ((unsigned HOST_WIDE_INT) -1
3723 >> (HOST_BITS_PER_WIDE_INT - width));
1f77b5da
RS
3724 }
3725
c87d821b
KH
3726 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3727 treat VAL as if it were unsigned. */
3728 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3729 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
1f77b5da
RS
3730 return exp;
3731
3732 /* Handle extension from a narrower type. */
3733 if (TREE_CODE (exp) == NOP_EXPR
3734 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3735 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3736
3737 return NULL_TREE;
3738}
3739
b2215d83
TW
3740/* Subroutine for fold_truthop: determine if an operand is simple enough
3741 to be evaluated unconditionally. */
3742
b6cc0a72 3743static int
ac545c64 3744simple_operand_p (const_tree exp)
b2215d83
TW
3745{
3746 /* Strip any conversions that don't change the machine mode. */
1d481ba8 3747 STRIP_NOPS (exp);
b2215d83 3748
6615c446 3749 return (CONSTANT_CLASS_P (exp)
1d481ba8 3750 || TREE_CODE (exp) == SSA_NAME
2f939d94 3751 || (DECL_P (exp)
b2215d83
TW
3752 && ! TREE_ADDRESSABLE (exp)
3753 && ! TREE_THIS_VOLATILE (exp)
8227896c
TW
3754 && ! DECL_NONLOCAL (exp)
3755 /* Don't regard global variables as simple. They may be
3756 allocated in ways unknown to the compiler (shared memory,
3757 #pragma weak, etc). */
3758 && ! TREE_PUBLIC (exp)
3759 && ! DECL_EXTERNAL (exp)
3760 /* Loading a static variable is unduly expensive, but global
3761 registers aren't expensive. */
3762 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
b2215d83 3763}
6d716ca8 3764\f
ebde8a27
RK
3765/* The following functions are subroutines to fold_range_test and allow it to
3766 try to change a logical combination of comparisons into a range test.
3767
3768 For example, both
fa8db1f7 3769 X == 2 || X == 3 || X == 4 || X == 5
ebde8a27 3770 and
fa8db1f7 3771 X >= 2 && X <= 5
ebde8a27
RK
3772 are converted to
3773 (unsigned) (X - 2) <= 3
3774
956d6950 3775 We describe each set of comparisons as being either inside or outside
ebde8a27
RK
3776 a range, using a variable named like IN_P, and then describe the
3777 range with a lower and upper bound. If one of the bounds is omitted,
3778 it represents either the highest or lowest value of the type.
3779
3780 In the comments below, we represent a range by two numbers in brackets
956d6950 3781 preceded by a "+" to designate being inside that range, or a "-" to
ebde8a27
RK
3782 designate being outside that range, so the condition can be inverted by
3783 flipping the prefix. An omitted bound is represented by a "-". For
3784 example, "- [-, 10]" means being outside the range starting at the lowest
3785 possible value and ending at 10, in other words, being greater than 10.
3786 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3787 always false.
3788
3789 We set up things so that the missing bounds are handled in a consistent
3790 manner so neither a missing bound nor "true" and "false" need to be
3791 handled using a special case. */
3792
3793/* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3794 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3795 and UPPER1_P are nonzero if the respective argument is an upper bound
3796 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3797 must be specified for a comparison. ARG1 will be converted to ARG0's
3798 type if both are specified. */
ef659ec0 3799
ebde8a27 3800static tree
75040a04
AJ
3801range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3802 tree arg1, int upper1_p)
ebde8a27 3803{
27bae8e5 3804 tree tem;
ebde8a27
RK
3805 int result;
3806 int sgn0, sgn1;
ef659ec0 3807
ebde8a27
RK
3808 /* If neither arg represents infinity, do the normal operation.
3809 Else, if not a comparison, return infinity. Else handle the special
3810 comparison rules. Note that most of the cases below won't occur, but
3811 are handled for consistency. */
ef659ec0 3812
ebde8a27 3813 if (arg0 != 0 && arg1 != 0)
27bae8e5 3814 {
7f20a5b7
KH
3815 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3816 arg0, fold_convert (TREE_TYPE (arg0), arg1));
27bae8e5
RK
3817 STRIP_NOPS (tem);
3818 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3819 }
ef659ec0 3820
6615c446 3821 if (TREE_CODE_CLASS (code) != tcc_comparison)
ebde8a27
RK
3822 return 0;
3823
3824 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
d7b3ea38
NS
3825 for neither. In real maths, we cannot assume open ended ranges are
3826 the same. But, this is computer arithmetic, where numbers are finite.
3827 We can therefore make the transformation of any unbounded range with
3828 the value Z, Z being greater than any representable number. This permits
30f7a378 3829 us to treat unbounded ranges as equal. */
ebde8a27 3830 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4e644c93 3831 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
ebde8a27
RK
3832 switch (code)
3833 {
d7b3ea38
NS
3834 case EQ_EXPR:
3835 result = sgn0 == sgn1;
3836 break;
3837 case NE_EXPR:
3838 result = sgn0 != sgn1;
ebde8a27 3839 break;
d7b3ea38 3840 case LT_EXPR:
ebde8a27
RK
3841 result = sgn0 < sgn1;
3842 break;
d7b3ea38
NS
3843 case LE_EXPR:
3844 result = sgn0 <= sgn1;
3845 break;
3846 case GT_EXPR:
ebde8a27
RK
3847 result = sgn0 > sgn1;
3848 break;
d7b3ea38
NS
3849 case GE_EXPR:
3850 result = sgn0 >= sgn1;
3851 break;
e9a25f70 3852 default:
0bccc606 3853 gcc_unreachable ();
ebde8a27
RK
3854 }
3855
1b0f3e79 3856 return constant_boolean_node (result, type);
ebde8a27 3857}
b6cc0a72 3858\f
ebde8a27
RK
3859/* Given EXP, a logical expression, set the range it is testing into
3860 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
6ac01510
ILT
3861 actually being tested. *PLOW and *PHIGH will be made of the same
3862 type as the returned expression. If EXP is not a comparison, we
3863 will most likely not be returning a useful value and range. Set
3864 *STRICT_OVERFLOW_P to true if the return value is only valid
3865 because signed overflow is undefined; otherwise, do not change
3866 *STRICT_OVERFLOW_P. */
ef659ec0 3867
a243fb4a 3868tree
6ac01510
ILT
3869make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3870 bool *strict_overflow_p)
ef659ec0 3871{
ebde8a27 3872 enum tree_code code;
d1822754
EC
3873 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3874 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
ebde8a27
RK
3875 int in_p, n_in_p;
3876 tree low, high, n_low, n_high;
db3927fb 3877 location_t loc = EXPR_LOCATION (exp);
ef659ec0 3878
ebde8a27
RK
3879 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3880 and see if we can refine the range. Some of the cases below may not
3881 happen, but it doesn't seem worth worrying about this. We "continue"
3882 the outer loop when we've changed something; otherwise we "break"
3883 the switch, which will "break" the while. */
ef659ec0 3884
088414c1 3885 in_p = 0;
57decb7e 3886 low = high = build_int_cst (TREE_TYPE (exp), 0);
ebde8a27
RK
3887
3888 while (1)
ef659ec0 3889 {
ebde8a27 3890 code = TREE_CODE (exp);
d1822754 3891 exp_type = TREE_TYPE (exp);
30d68b86
MM
3892
3893 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3894 {
5039610b 3895 if (TREE_OPERAND_LENGTH (exp) > 0)
d17811fd 3896 arg0 = TREE_OPERAND (exp, 0);
6615c446
JO
3897 if (TREE_CODE_CLASS (code) == tcc_comparison
3898 || TREE_CODE_CLASS (code) == tcc_unary
3899 || TREE_CODE_CLASS (code) == tcc_binary)
d1822754 3900 arg0_type = TREE_TYPE (arg0);
6615c446
JO
3901 if (TREE_CODE_CLASS (code) == tcc_binary
3902 || TREE_CODE_CLASS (code) == tcc_comparison
3903 || (TREE_CODE_CLASS (code) == tcc_expression
5039610b 3904 && TREE_OPERAND_LENGTH (exp) > 1))
30d68b86
MM
3905 arg1 = TREE_OPERAND (exp, 1);
3906 }
ef659ec0 3907
ebde8a27
RK
3908 switch (code)
3909 {
3910 case TRUTH_NOT_EXPR:
3911 in_p = ! in_p, exp = arg0;
3912 continue;
3913
3914 case EQ_EXPR: case NE_EXPR:
3915 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3916 /* We can only do something if the range is testing for zero
3917 and if the second operand is an integer constant. Note that
3918 saying something is "in" the range we make is done by
3919 complementing IN_P since it will set in the initial case of
3920 being not equal to zero; "out" is leaving it alone. */
3921 if (low == 0 || high == 0
3922 || ! integer_zerop (low) || ! integer_zerop (high)
3923 || TREE_CODE (arg1) != INTEGER_CST)
3924 break;
ef659ec0 3925
ebde8a27
RK
3926 switch (code)
3927 {
3928 case NE_EXPR: /* - [c, c] */
3929 low = high = arg1;
3930 break;
3931 case EQ_EXPR: /* + [c, c] */
3932 in_p = ! in_p, low = high = arg1;
3933 break;
3934 case GT_EXPR: /* - [-, c] */
3935 low = 0, high = arg1;
3936 break;
3937 case GE_EXPR: /* + [c, -] */
3938 in_p = ! in_p, low = arg1, high = 0;
3939 break;
3940 case LT_EXPR: /* - [c, -] */
3941 low = arg1, high = 0;
3942 break;
3943 case LE_EXPR: /* + [-, c] */
3944 in_p = ! in_p, low = 0, high = arg1;
3945 break;
e9a25f70 3946 default:
0bccc606 3947 gcc_unreachable ();
ebde8a27 3948 }
ef659ec0 3949
7f423031 3950 /* If this is an unsigned comparison, we also know that EXP is
0e1c7fc7
RK
3951 greater than or equal to zero. We base the range tests we make
3952 on that fact, so we record it here so we can parse existing
d1822754
EC
3953 range tests. We test arg0_type since often the return type
3954 of, e.g. EQ_EXPR, is boolean. */
3955 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
ebde8a27 3956 {
e9ea8bd5
RS
3957 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3958 in_p, low, high, 1,
57decb7e 3959 build_int_cst (arg0_type, 0),
0e1c7fc7 3960 NULL_TREE))
ebde8a27 3961 break;
ef659ec0 3962
ebde8a27 3963 in_p = n_in_p, low = n_low, high = n_high;
0e1c7fc7 3964
368ebcd6 3965 /* If the high bound is missing, but we have a nonzero low
1358cdc5
RK
3966 bound, reverse the range so it goes from zero to the low bound
3967 minus 1. */
3968 if (high == 0 && low && ! integer_zerop (low))
0e1c7fc7
RK
3969 {
3970 in_p = ! in_p;
3971 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3972 integer_one_node, 0);
57decb7e 3973 low = build_int_cst (arg0_type, 0);
0e1c7fc7 3974 }
ebde8a27 3975 }
d1822754
EC
3976
3977 exp = arg0;
ebde8a27
RK
3978 continue;
3979
3980 case NEGATE_EXPR:
3981 /* (-x) IN [a,b] -> x in [-b, -a] */
d1822754 3982 n_low = range_binop (MINUS_EXPR, exp_type,
57decb7e 3983 build_int_cst (exp_type, 0),
088414c1 3984 0, high, 1);
d1822754 3985 n_high = range_binop (MINUS_EXPR, exp_type,
57decb7e 3986 build_int_cst (exp_type, 0),
088414c1 3987 0, low, 0);
ebde8a27
RK
3988 low = n_low, high = n_high;
3989 exp = arg0;
3990 continue;
3991
3992 case BIT_NOT_EXPR:
3993 /* ~ X -> -X - 1 */
d1822754 3994 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
57decb7e 3995 build_int_cst (exp_type, 1));
db3927fb 3996 SET_EXPR_LOCATION (exp, loc);
ebde8a27
RK
3997 continue;
3998
3999 case PLUS_EXPR: case MINUS_EXPR:
4000 if (TREE_CODE (arg1) != INTEGER_CST)
4001 break;
4002
c078a437
KH
4003 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4004 move a constant to the other side. */
eeef0e45
ILT
4005 if (!TYPE_UNSIGNED (arg0_type)
4006 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
c078a437
KH
4007 break;
4008
ebde8a27
RK
4009 /* If EXP is signed, any overflow in the computation is undefined,
4010 so we don't worry about it so long as our computations on
4011 the bounds don't overflow. For unsigned, overflow is defined
4012 and this is exactly the right thing. */
4013 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
d1822754 4014 arg0_type, low, 0, arg1, 0);
ebde8a27 4015 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
d1822754 4016 arg0_type, high, 1, arg1, 0);
ebde8a27
RK
4017 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4018 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4019 break;
4020
6ac01510
ILT
4021 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4022 *strict_overflow_p = true;
4023
3c00684e
JL
4024 /* Check for an unsigned range which has wrapped around the maximum
4025 value thus making n_high < n_low, and normalize it. */
5a9d82a6 4026 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3c00684e 4027 {
d1822754 4028 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
0e1c7fc7 4029 integer_one_node, 0);
d1822754 4030 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
c2b63960
AO
4031 integer_one_node, 0);
4032
4033 /* If the range is of the form +/- [ x+1, x ], we won't
4034 be able to normalize it. But then, it represents the
4035 whole range or the empty set, so make it
4036 +/- [ -, - ]. */
4037 if (tree_int_cst_equal (n_low, low)
4038 && tree_int_cst_equal (n_high, high))
4039 low = high = 0;
4040 else
4041 in_p = ! in_p;
3c00684e 4042 }
5a9d82a6
JW
4043 else
4044 low = n_low, high = n_high;
27bae8e5 4045
ebde8a27
RK
4046 exp = arg0;
4047 continue;
4048
1043771b 4049 CASE_CONVERT: case NON_LVALUE_EXPR:
d1822754 4050 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
7d12cee1
JL
4051 break;
4052
d1822754
EC
4053 if (! INTEGRAL_TYPE_P (arg0_type)
4054 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4055 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
ebde8a27
RK
4056 break;
4057
ce2157a1 4058 n_low = low, n_high = high;
ebde8a27 4059
ce2157a1 4060 if (n_low != 0)
db3927fb 4061 n_low = fold_convert_loc (loc, arg0_type, n_low);
ce2157a1
JL
4062
4063 if (n_high != 0)
db3927fb 4064 n_high = fold_convert_loc (loc, arg0_type, n_high);
ce2157a1 4065
ce2157a1 4066
d1822754 4067 /* If we're converting arg0 from an unsigned type, to exp,
61ada8ae 4068 a signed type, we will be doing the comparison as unsigned.
d1822754
EC
4069 The tests above have already verified that LOW and HIGH
4070 are both positive.
4071
4072 So we have to ensure that we will handle large unsigned
4073 values the same way that the current signed bounds treat
4074 negative values. */
4075
4076 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
ce2157a1 4077 {
e1ee5cdc 4078 tree high_positive;
325217ed
CF
4079 tree equiv_type;
4080 /* For fixed-point modes, we need to pass the saturating flag
4081 as the 2nd parameter. */
4082 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4083 equiv_type = lang_hooks.types.type_for_mode
4084 (TYPE_MODE (arg0_type),
4085 TYPE_SATURATING (arg0_type));
4086 else
4087 equiv_type = lang_hooks.types.type_for_mode
4088 (TYPE_MODE (arg0_type), 1);
e1ee5cdc
RH
4089
4090 /* A range without an upper bound is, naturally, unbounded.
4091 Since convert would have cropped a very large value, use
14a774a9
RK
4092 the max value for the destination type. */
4093 high_positive
4094 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
d1822754 4095 : TYPE_MAX_VALUE (arg0_type);
e1ee5cdc 4096
d1822754 4097 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
db3927fb
AH
4098 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4099 fold_convert_loc (loc, arg0_type,
4100 high_positive),
000d8d44 4101 build_int_cst (arg0_type, 1));
b6cc0a72 4102
ce2157a1
JL
4103 /* If the low bound is specified, "and" the range with the
4104 range for which the original unsigned value will be
4105 positive. */
4106 if (low != 0)
4107 {
4108 if (! merge_ranges (&n_in_p, &n_low, &n_high,
088414c1 4109 1, n_low, n_high, 1,
db3927fb
AH
4110 fold_convert_loc (loc, arg0_type,
4111 integer_zero_node),
ce2157a1
JL
4112 high_positive))
4113 break;
4114
4115 in_p = (n_in_p == in_p);
4116 }
4117 else
4118 {
4119 /* Otherwise, "or" the range with the range of the input
4120 that will be interpreted as negative. */
4121 if (! merge_ranges (&n_in_p, &n_low, &n_high,
088414c1 4122 0, n_low, n_high, 1,
db3927fb
AH
4123 fold_convert_loc (loc, arg0_type,
4124 integer_zero_node),
ce2157a1
JL
4125 high_positive))
4126 break;
4127
4128 in_p = (in_p != n_in_p);
4129 }
4130 }
ebde8a27
RK
4131
4132 exp = arg0;
ce2157a1 4133 low = n_low, high = n_high;
ebde8a27 4134 continue;
ce2157a1
JL
4135
4136 default:
4137 break;
ef659ec0 4138 }
ebde8a27
RK
4139
4140 break;
ef659ec0 4141 }
ebde8a27 4142
80906567
RK
4143 /* If EXP is a constant, we can evaluate whether this is true or false. */
4144 if (TREE_CODE (exp) == INTEGER_CST)
4145 {
4146 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4147 exp, 0, low, 0))
4148 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4149 exp, 1, high, 1)));
4150 low = high = 0;
4151 exp = 0;
4152 }
4153
ebde8a27
RK
4154 *pin_p = in_p, *plow = low, *phigh = high;
4155 return exp;
4156}
4157\f
4158/* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4159 type, TYPE, return an expression to test if EXP is in (or out of, depending
e1af8299 4160 on IN_P) the range. Return 0 if the test couldn't be created. */
ebde8a27 4161
a243fb4a 4162tree
db3927fb
AH
4163build_range_check (location_t loc, tree type, tree exp, int in_p,
4164 tree low, tree high)
ebde8a27 4165{
849d624b 4166 tree etype = TREE_TYPE (exp), value;
ebde8a27 4167
f60c951c
JDA
4168#ifdef HAVE_canonicalize_funcptr_for_compare
4169 /* Disable this optimization for function pointer expressions
4170 on targets that require function pointer canonicalization. */
4171 if (HAVE_canonicalize_funcptr_for_compare
4172 && TREE_CODE (etype) == POINTER_TYPE
4173 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4174 return NULL_TREE;
4175#endif
4176
e1af8299
JJ
4177 if (! in_p)
4178 {
db3927fb 4179 value = build_range_check (loc, type, exp, 1, low, high);
e1af8299 4180 if (value != 0)
db3927fb 4181 return invert_truthvalue_loc (loc, value);
e1af8299
JJ
4182
4183 return 0;
4184 }
ebde8a27 4185
dbfb1116 4186 if (low == 0 && high == 0)
57decb7e 4187 return build_int_cst (type, 1);
ebde8a27 4188
dbfb1116 4189 if (low == 0)
db3927fb
AH
4190 return fold_build2_loc (loc, LE_EXPR, type, exp,
4191 fold_convert_loc (loc, etype, high));
ebde8a27 4192
dbfb1116 4193 if (high == 0)
db3927fb
AH
4194 return fold_build2_loc (loc, GE_EXPR, type, exp,
4195 fold_convert_loc (loc, etype, low));
ebde8a27 4196
dbfb1116 4197 if (operand_equal_p (low, high, 0))
db3927fb
AH
4198 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4199 fold_convert_loc (loc, etype, low));
ebde8a27 4200
dbfb1116 4201 if (integer_zerop (low))
ef659ec0 4202 {
8df83eae 4203 if (! TYPE_UNSIGNED (etype))
dd3f0101 4204 {
ca5ba2a3 4205 etype = unsigned_type_for (etype);
db3927fb
AH
4206 high = fold_convert_loc (loc, etype, high);
4207 exp = fold_convert_loc (loc, etype, exp);
dd3f0101 4208 }
db3927fb 4209 return build_range_check (loc, type, exp, 1, 0, high);
ebde8a27 4210 }
ef659ec0 4211
dbfb1116
RS
4212 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4213 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4214 {
4215 unsigned HOST_WIDE_INT lo;
4216 HOST_WIDE_INT hi;
4217 int prec;
4218
4219 prec = TYPE_PRECISION (etype);
4220 if (prec <= HOST_BITS_PER_WIDE_INT)
dd3f0101
KH
4221 {
4222 hi = 0;
4223 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4224 }
dbfb1116 4225 else
dd3f0101
KH
4226 {
4227 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4228 lo = (unsigned HOST_WIDE_INT) -1;
4229 }
dbfb1116
RS
4230
4231 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
dd3f0101 4232 {
8df83eae 4233 if (TYPE_UNSIGNED (etype))
dd3f0101 4234 {
972afb58
JJ
4235 tree signed_etype = signed_type_for (etype);
4236 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4237 etype
4238 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4239 else
4240 etype = signed_etype;
db3927fb 4241 exp = fold_convert_loc (loc, etype, exp);
dd3f0101 4242 }
db3927fb 4243 return fold_build2_loc (loc, GT_EXPR, type, exp,
57decb7e 4244 build_int_cst (etype, 0));
dd3f0101 4245 }
dbfb1116
RS
4246 }
4247
f8fe0545 4248 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
84fb43a1
EB
4249 This requires wrap-around arithmetics for the type of the expression.
4250 First make sure that arithmetics in this type is valid, then make sure
4251 that it wraps around. */
4252 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4253 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4254 TYPE_UNSIGNED (etype));
f8fe0545 4255
84fb43a1 4256 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
e1af8299
JJ
4257 {
4258 tree utype, minv, maxv;
4259
4260 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4261 for the type in question, as we rely on this here. */
ca5ba2a3 4262 utype = unsigned_type_for (etype);
db3927fb 4263 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
f8fe0545
EB
4264 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4265 integer_one_node, 1);
db3927fb 4266 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
f8fe0545
EB
4267
4268 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4269 minv, 1, maxv, 1)))
4270 etype = utype;
4271 else
4272 return 0;
e1af8299
JJ
4273 }
4274
db3927fb
AH
4275 high = fold_convert_loc (loc, etype, high);
4276 low = fold_convert_loc (loc, etype, low);
4277 exp = fold_convert_loc (loc, etype, exp);
438090c3 4278
43a5d30b 4279 value = const_binop (MINUS_EXPR, high, low);
f8fe0545 4280
5be014d5
AP
4281
4282 if (POINTER_TYPE_P (etype))
4283 {
4284 if (value != 0 && !TREE_OVERFLOW (value))
4285 {
db3927fb
AH
4286 low = fold_convert_loc (loc, sizetype, low);
4287 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
4288 return build_range_check (loc, type,
4289 fold_build2_loc (loc, POINTER_PLUS_EXPR,
4290 etype, exp, low),
5be014d5
AP
4291 1, build_int_cst (etype, 0), value);
4292 }
4293 return 0;
4294 }
4295
f8fe0545 4296 if (value != 0 && !TREE_OVERFLOW (value))
db3927fb
AH
4297 return build_range_check (loc, type,
4298 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
f8fe0545 4299 1, build_int_cst (etype, 0), value);
dbfb1116
RS
4300
4301 return 0;
ebde8a27
RK
4302}
4303\f
2f96b754
EB
4304/* Return the predecessor of VAL in its type, handling the infinite case. */
4305
4306static tree
4307range_predecessor (tree val)
4308{
4309 tree type = TREE_TYPE (val);
4310
1464eeb8
EB
4311 if (INTEGRAL_TYPE_P (type)
4312 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
2f96b754
EB
4313 return 0;
4314 else
4315 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4316}
4317
4318/* Return the successor of VAL in its type, handling the infinite case. */
4319
4320static tree
4321range_successor (tree val)
4322{
4323 tree type = TREE_TYPE (val);
4324
1464eeb8
EB
4325 if (INTEGRAL_TYPE_P (type)
4326 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
2f96b754
EB
4327 return 0;
4328 else
4329 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4330}
4331
b6cc0a72 4332/* Given two ranges, see if we can merge them into one. Return 1 if we
ebde8a27 4333 can, 0 if we can't. Set the output range into the specified parameters. */
ef659ec0 4334
a243fb4a 4335bool
75040a04
AJ
4336merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4337 tree high0, int in1_p, tree low1, tree high1)
ebde8a27
RK
4338{
4339 int no_overlap;
4340 int subset;
4341 int temp;
4342 tree tem;
4343 int in_p;
4344 tree low, high;
ce2157a1
JL
4345 int lowequal = ((low0 == 0 && low1 == 0)
4346 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4347 low0, 0, low1, 0)));
4348 int highequal = ((high0 == 0 && high1 == 0)
4349 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4350 high0, 1, high1, 1)));
4351
4352 /* Make range 0 be the range that starts first, or ends last if they
4353 start at the same value. Swap them if it isn't. */
b6cc0a72 4354 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
ebde8a27 4355 low0, 0, low1, 0))
ce2157a1 4356 || (lowequal
ebde8a27 4357 && integer_onep (range_binop (GT_EXPR, integer_type_node,
ce2157a1 4358 high1, 1, high0, 1))))
ebde8a27
RK
4359 {
4360 temp = in0_p, in0_p = in1_p, in1_p = temp;
4361 tem = low0, low0 = low1, low1 = tem;
4362 tem = high0, high0 = high1, high1 = tem;
4363 }
ef659ec0 4364
ebde8a27
RK
4365 /* Now flag two cases, whether the ranges are disjoint or whether the
4366 second range is totally subsumed in the first. Note that the tests
4367 below are simplified by the ones above. */
4368 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4369 high0, 1, low1, 0));
5df8a1f2 4370 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
ebde8a27
RK
4371 high1, 1, high0, 1));
4372
4373 /* We now have four cases, depending on whether we are including or
4374 excluding the two ranges. */
4375 if (in0_p && in1_p)
4376 {
4377 /* If they don't overlap, the result is false. If the second range
4378 is a subset it is the result. Otherwise, the range is from the start
4379 of the second to the end of the first. */
4380 if (no_overlap)
4381 in_p = 0, low = high = 0;
4382 else if (subset)
4383 in_p = 1, low = low1, high = high1;
4384 else
4385 in_p = 1, low = low1, high = high0;
4386 }
ef659ec0 4387
ebde8a27
RK
4388 else if (in0_p && ! in1_p)
4389 {
ce2157a1
JL
4390 /* If they don't overlap, the result is the first range. If they are
4391 equal, the result is false. If the second range is a subset of the
4392 first, and the ranges begin at the same place, we go from just after
f8fe0545 4393 the end of the second range to the end of the first. If the second
ce2157a1
JL
4394 range is not a subset of the first, or if it is a subset and both
4395 ranges end at the same place, the range starts at the start of the
4396 first range and ends just before the second range.
4397 Otherwise, we can't describe this as a single range. */
ebde8a27
RK
4398 if (no_overlap)
4399 in_p = 1, low = low0, high = high0;
ce2157a1 4400 else if (lowequal && highequal)
405862dd 4401 in_p = 0, low = high = 0;
ce2157a1
JL
4402 else if (subset && lowequal)
4403 {
f8fe0545
EB
4404 low = range_successor (high1);
4405 high = high0;
39ac2ffc
ILT
4406 in_p = 1;
4407 if (low == 0)
4408 {
4409 /* We are in the weird situation where high0 > high1 but
4410 high1 has no successor. Punt. */
4411 return 0;
4412 }
ce2157a1
JL
4413 }
4414 else if (! subset || highequal)
ebde8a27 4415 {
f8fe0545
EB
4416 low = low0;
4417 high = range_predecessor (low1);
39ac2ffc
ILT
4418 in_p = 1;
4419 if (high == 0)
4420 {
4421 /* low0 < low1 but low1 has no predecessor. Punt. */
4422 return 0;
4423 }
ebde8a27 4424 }
ce2157a1
JL
4425 else
4426 return 0;
ebde8a27 4427 }
ef659ec0 4428
ebde8a27
RK
4429 else if (! in0_p && in1_p)
4430 {
4431 /* If they don't overlap, the result is the second range. If the second
4432 is a subset of the first, the result is false. Otherwise,
4433 the range starts just after the first range and ends at the
4434 end of the second. */
4435 if (no_overlap)
4436 in_p = 1, low = low1, high = high1;
14a774a9 4437 else if (subset || highequal)
ebde8a27
RK
4438 in_p = 0, low = high = 0;
4439 else
4440 {
f8fe0545
EB
4441 low = range_successor (high0);
4442 high = high1;
39ac2ffc
ILT
4443 in_p = 1;
4444 if (low == 0)
4445 {
4446 /* high1 > high0 but high0 has no successor. Punt. */
4447 return 0;
4448 }
ef659ec0
TW
4449 }
4450 }
4451
ebde8a27
RK
4452 else
4453 {
4454 /* The case where we are excluding both ranges. Here the complex case
4455 is if they don't overlap. In that case, the only time we have a
4456 range is if they are adjacent. If the second is a subset of the
4457 first, the result is the first. Otherwise, the range to exclude
4458 starts at the beginning of the first range and ends at the end of the
4459 second. */
4460 if (no_overlap)
4461 {
4462 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
f8fe0545 4463 range_successor (high0),
ebde8a27
RK
4464 1, low1, 0)))
4465 in_p = 0, low = low0, high = high1;
4466 else
e1af8299
JJ
4467 {
4468 /* Canonicalize - [min, x] into - [-, x]. */
4469 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4470 switch (TREE_CODE (TREE_TYPE (low0)))
4471 {
4472 case ENUMERAL_TYPE:
4473 if (TYPE_PRECISION (TREE_TYPE (low0))
4474 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4475 break;
4476 /* FALLTHROUGH */
4477 case INTEGER_TYPE:
e1af8299
JJ
4478 if (tree_int_cst_equal (low0,
4479 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4480 low0 = 0;
4481 break;
4482 case POINTER_TYPE:
4483 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4484 && integer_zerop (low0))
4485 low0 = 0;
4486 break;
4487 default:
4488 break;
4489 }
4490
4491 /* Canonicalize - [x, max] into - [x, -]. */
4492 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4493 switch (TREE_CODE (TREE_TYPE (high1)))
4494 {
4495 case ENUMERAL_TYPE:
4496 if (TYPE_PRECISION (TREE_TYPE (high1))
4497 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4498 break;
4499 /* FALLTHROUGH */
4500 case INTEGER_TYPE:
e1af8299
JJ
4501 if (tree_int_cst_equal (high1,
4502 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4503 high1 = 0;
4504 break;
4505 case POINTER_TYPE:
4506 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4507 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4508 high1, 1,
4509 integer_one_node, 1)))
4510 high1 = 0;
4511 break;
4512 default:
4513 break;
4514 }
4515
4516 /* The ranges might be also adjacent between the maximum and
4517 minimum values of the given type. For
4518 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4519 return + [x + 1, y - 1]. */
4520 if (low0 == 0 && high1 == 0)
4521 {
2f96b754
EB
4522 low = range_successor (high0);
4523 high = range_predecessor (low1);
e1af8299
JJ
4524 if (low == 0 || high == 0)
4525 return 0;
4526
4527 in_p = 1;
4528 }
4529 else
4530 return 0;
4531 }
ebde8a27
RK
4532 }
4533 else if (subset)
4534 in_p = 0, low = low0, high = high0;
4535 else
4536 in_p = 0, low = low0, high = high1;
4537 }
f5902869 4538
ebde8a27
RK
4539 *pin_p = in_p, *plow = low, *phigh = high;
4540 return 1;
4541}
2c486ea7
PB
4542\f
4543
4544/* Subroutine of fold, looking inside expressions of the form
2851dd68
PB
4545 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4546 of the COND_EXPR. This function is being used also to optimize
4547 A op B ? C : A, by reversing the comparison first.
2c486ea7
PB
4548
4549 Return a folded expression whose code is not a COND_EXPR
4550 anymore, or NULL_TREE if no folding opportunity is found. */
4551
4552static tree
db3927fb
AH
4553fold_cond_expr_with_comparison (location_t loc, tree type,
4554 tree arg0, tree arg1, tree arg2)
2c486ea7
PB
4555{
4556 enum tree_code comp_code = TREE_CODE (arg0);
4557 tree arg00 = TREE_OPERAND (arg0, 0);
4558 tree arg01 = TREE_OPERAND (arg0, 1);
2851dd68 4559 tree arg1_type = TREE_TYPE (arg1);
2c486ea7 4560 tree tem;
2851dd68
PB
4561
4562 STRIP_NOPS (arg1);
2c486ea7
PB
4563 STRIP_NOPS (arg2);
4564
4565 /* If we have A op 0 ? A : -A, consider applying the following
4566 transformations:
4567
4568 A == 0? A : -A same as -A
4569 A != 0? A : -A same as A
4570 A >= 0? A : -A same as abs (A)
4571 A > 0? A : -A same as abs (A)
4572 A <= 0? A : -A same as -abs (A)
4573 A < 0? A : -A same as -abs (A)
4574
4575 None of these transformations work for modes with signed
4576 zeros. If A is +/-0, the first two transformations will
4577 change the sign of the result (from +0 to -0, or vice
4578 versa). The last four will fix the sign of the result,
4579 even though the original expressions could be positive or
4580 negative, depending on the sign of A.
4581
4582 Note that all these transformations are correct if A is
4583 NaN, since the two alternatives (A and -A) are also NaNs. */
5ce0e197
UB
4584 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4585 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4586 ? real_zerop (arg01)
4587 : integer_zerop (arg01))
a10d70ba
PH
4588 && ((TREE_CODE (arg2) == NEGATE_EXPR
4589 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4590 /* In the case that A is of the form X-Y, '-A' (arg2) may
4591 have already been folded to Y-X, check for that. */
4592 || (TREE_CODE (arg1) == MINUS_EXPR
4593 && TREE_CODE (arg2) == MINUS_EXPR
4594 && operand_equal_p (TREE_OPERAND (arg1, 0),
4595 TREE_OPERAND (arg2, 1), 0)
4596 && operand_equal_p (TREE_OPERAND (arg1, 1),
4597 TREE_OPERAND (arg2, 0), 0))))
2c486ea7
PB
4598 switch (comp_code)
4599 {
4600 case EQ_EXPR:
3ae472c2 4601 case UNEQ_EXPR:
db3927fb
AH
4602 tem = fold_convert_loc (loc, arg1_type, arg1);
4603 return pedantic_non_lvalue_loc (loc,
4604 fold_convert_loc (loc, type,
4605 negate_expr (tem)));
2c486ea7 4606 case NE_EXPR:
3ae472c2 4607 case LTGT_EXPR:
db3927fb 4608 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
3ae472c2
RS
4609 case UNGE_EXPR:
4610 case UNGT_EXPR:
4611 if (flag_trapping_math)
4612 break;
4613 /* Fall through. */
2c486ea7
PB
4614 case GE_EXPR:
4615 case GT_EXPR:
2851dd68 4616 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
db3927fb 4617 arg1 = fold_convert_loc (loc, signed_type_for
2851dd68 4618 (TREE_TYPE (arg1)), arg1);
db3927fb
AH
4619 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4620 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
3ae472c2
RS
4621 case UNLE_EXPR:
4622 case UNLT_EXPR:
4623 if (flag_trapping_math)
4624 break;
2c486ea7
PB
4625 case LE_EXPR:
4626 case LT_EXPR:
2851dd68 4627 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
db3927fb 4628 arg1 = fold_convert_loc (loc, signed_type_for
2851dd68 4629 (TREE_TYPE (arg1)), arg1);
db3927fb
AH
4630 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4631 return negate_expr (fold_convert_loc (loc, type, tem));
2c486ea7 4632 default:
6615c446 4633 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
3ae472c2 4634 break;
2c486ea7
PB
4635 }
4636
4637 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4638 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4639 both transformations are correct when A is NaN: A != 0
4640 is then true, and A == 0 is false. */
4641
5ce0e197
UB
4642 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4643 && integer_zerop (arg01) && integer_zerop (arg2))
2c486ea7
PB
4644 {
4645 if (comp_code == NE_EXPR)
db3927fb 4646 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
2c486ea7 4647 else if (comp_code == EQ_EXPR)
57decb7e 4648 return build_int_cst (type, 0);
2c486ea7
PB
4649 }
4650
4651 /* Try some transformations of A op B ? A : B.
4652
4653 A == B? A : B same as B
4654 A != B? A : B same as A
4655 A >= B? A : B same as max (A, B)
4656 A > B? A : B same as max (B, A)
4657 A <= B? A : B same as min (A, B)
4658 A < B? A : B same as min (B, A)
4659
4660 As above, these transformations don't work in the presence
4661 of signed zeros. For example, if A and B are zeros of
4662 opposite sign, the first two transformations will change
4663 the sign of the result. In the last four, the original
4664 expressions give different results for (A=+0, B=-0) and
4665 (A=-0, B=+0), but the transformed expressions do not.
4666
4667 The first two transformations are correct if either A or B
4668 is a NaN. In the first transformation, the condition will
4669 be false, and B will indeed be chosen. In the case of the
4670 second transformation, the condition A != B will be true,
4671 and A will be chosen.
4672
4673 The conversions to max() and min() are not correct if B is
4674 a number and A is not. The conditions in the original
4675 expressions will be false, so all four give B. The min()
4676 and max() versions would give a NaN instead. */
5ce0e197
UB
4677 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4678 && operand_equal_for_comparison_p (arg01, arg2, arg00)
283da5df
RS
4679 /* Avoid these transformations if the COND_EXPR may be used
4680 as an lvalue in the C++ front-end. PR c++/19199. */
4681 && (in_gimple_form
6b4e9576
FJ
4682 || (strcmp (lang_hooks.name, "GNU C++") != 0
4683 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
283da5df
RS
4684 || ! maybe_lvalue_p (arg1)
4685 || ! maybe_lvalue_p (arg2)))
2c486ea7
PB
4686 {
4687 tree comp_op0 = arg00;
4688 tree comp_op1 = arg01;
4689 tree comp_type = TREE_TYPE (comp_op0);
4690
4691 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4692 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4693 {
4694 comp_type = type;
2851dd68 4695 comp_op0 = arg1;
2c486ea7
PB
4696 comp_op1 = arg2;
4697 }
4698
4699 switch (comp_code)
4700 {
4701 case EQ_EXPR:
db3927fb 4702 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
2c486ea7 4703 case NE_EXPR:
db3927fb 4704 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
2c486ea7
PB
4705 case LE_EXPR:
4706 case LT_EXPR:
3ae472c2
RS
4707 case UNLE_EXPR:
4708 case UNLT_EXPR:
2c486ea7
PB
4709 /* In C++ a ?: expression can be an lvalue, so put the
4710 operand which will be used if they are equal first
4711 so that we can convert this back to the
4712 corresponding COND_EXPR. */
2851dd68 4713 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
e9ea8bd5 4714 {
db3927fb
AH
4715 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4716 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
3ae472c2 4717 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
db3927fb
AH
4718 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4719 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4720 comp_op1, comp_op0);
4721 return pedantic_non_lvalue_loc (loc,
4722 fold_convert_loc (loc, type, tem));
e9ea8bd5 4723 }
2c486ea7
PB
4724 break;
4725 case GE_EXPR:
4726 case GT_EXPR:
3ae472c2
RS
4727 case UNGE_EXPR:
4728 case UNGT_EXPR:
2851dd68 4729 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
e9ea8bd5 4730 {
db3927fb
AH
4731 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4732 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
3ae472c2 4733 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
db3927fb
AH
4734 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4735 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4736 comp_op1, comp_op0);
4737 return pedantic_non_lvalue_loc (loc,
4738 fold_convert_loc (loc, type, tem));
e9ea8bd5 4739 }
2c486ea7 4740 break;
3ae472c2
RS
4741 case UNEQ_EXPR:
4742 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
db3927fb
AH
4743 return pedantic_non_lvalue_loc (loc,
4744 fold_convert_loc (loc, type, arg2));
3ae472c2
RS
4745 break;
4746 case LTGT_EXPR:
4747 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
db3927fb
AH
4748 return pedantic_non_lvalue_loc (loc,
4749 fold_convert_loc (loc, type, arg1));
3ae472c2 4750 break;
2c486ea7 4751 default:
6615c446 4752 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
3ae472c2 4753 break;
2c486ea7
PB
4754 }
4755 }
4756
4757 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4758 we might still be able to simplify this. For example,
4759 if C1 is one less or one more than C2, this might have started
4760 out as a MIN or MAX and been transformed by this function.
4761 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4762
4763 if (INTEGRAL_TYPE_P (type)
4764 && TREE_CODE (arg01) == INTEGER_CST
4765 && TREE_CODE (arg2) == INTEGER_CST)
4766 switch (comp_code)
4767 {
4768 case EQ_EXPR:
b9da76de
JJ
4769 if (TREE_CODE (arg1) == INTEGER_CST)
4770 break;
2c486ea7 4771 /* We can replace A with C1 in this case. */
db3927fb
AH
4772 arg1 = fold_convert_loc (loc, type, arg01);
4773 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
2c486ea7
PB
4774
4775 case LT_EXPR:
b4e4232d
JJ
4776 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4777 MIN_EXPR, to preserve the signedness of the comparison. */
2c486ea7
PB
4778 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4779 OEP_ONLY_CONST)
4780 && operand_equal_p (arg01,
4781 const_binop (PLUS_EXPR, arg2,
43a5d30b 4782 build_int_cst (type, 1)),
2c486ea7 4783 OEP_ONLY_CONST))
b4e4232d 4784 {
db3927fb
AH
4785 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4786 fold_convert_loc (loc, TREE_TYPE (arg00),
4787 arg2));
b8698a0f 4788 return pedantic_non_lvalue_loc (loc,
db3927fb 4789 fold_convert_loc (loc, type, tem));
b4e4232d 4790 }
2c486ea7
PB
4791 break;
4792
4793 case LE_EXPR:
b4e4232d
JJ
4794 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4795 as above. */
2c486ea7
PB
4796 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4797 OEP_ONLY_CONST)
4798 && operand_equal_p (arg01,
4799 const_binop (MINUS_EXPR, arg2,
43a5d30b 4800 build_int_cst (type, 1)),
2c486ea7 4801 OEP_ONLY_CONST))
b4e4232d 4802 {
db3927fb
AH
4803 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4804 fold_convert_loc (loc, TREE_TYPE (arg00),
4805 arg2));
4806 return pedantic_non_lvalue_loc (loc,
4807 fold_convert_loc (loc, type, tem));
b4e4232d 4808 }
2c486ea7
PB
4809 break;
4810
4811 case GT_EXPR:
30349c74
PB
4812 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4813 MAX_EXPR, to preserve the signedness of the comparison. */
2c486ea7
PB
4814 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4815 OEP_ONLY_CONST)
4816 && operand_equal_p (arg01,
4817 const_binop (MINUS_EXPR, arg2,
43a5d30b 4818 build_int_cst (type, 1)),
2c486ea7 4819 OEP_ONLY_CONST))
b4e4232d 4820 {
db3927fb
AH
4821 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4822 fold_convert_loc (loc, TREE_TYPE (arg00),
4823 arg2));
4824 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
b4e4232d 4825 }
2c486ea7
PB
4826 break;
4827
4828 case GE_EXPR:
30349c74 4829 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
2c486ea7
PB
4830 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4831 OEP_ONLY_CONST)
4832 && operand_equal_p (arg01,
4833 const_binop (PLUS_EXPR, arg2,
43a5d30b 4834 build_int_cst (type, 1)),
2c486ea7 4835 OEP_ONLY_CONST))
b4e4232d 4836 {
db3927fb
AH
4837 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4838 fold_convert_loc (loc, TREE_TYPE (arg00),
4839 arg2));
4840 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
b4e4232d 4841 }
2c486ea7
PB
4842 break;
4843 case NE_EXPR:
4844 break;
4845 default:
0bccc606 4846 gcc_unreachable ();
2c486ea7
PB
4847 }
4848
4849 return NULL_TREE;
4850}
4851
4852
ebde8a27 4853\f
b8610a53 4854#ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
3a4fd356 4855#define LOGICAL_OP_NON_SHORT_CIRCUIT \
7f4b6d20 4856 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
3a4fd356 4857 false) >= 2)
85e50b6b
DE
4858#endif
4859
ebde8a27
RK
4860/* EXP is some logical combination of boolean tests. See if we can
4861 merge it into some range test. Return the new tree if so. */
ef659ec0 4862
ebde8a27 4863static tree
db3927fb
AH
4864fold_range_test (location_t loc, enum tree_code code, tree type,
4865 tree op0, tree op1)
ebde8a27 4866{
e1f04615
KH
4867 int or_op = (code == TRUTH_ORIF_EXPR
4868 || code == TRUTH_OR_EXPR);
ebde8a27
RK
4869 int in0_p, in1_p, in_p;
4870 tree low0, low1, low, high0, high1, high;
6ac01510
ILT
4871 bool strict_overflow_p = false;
4872 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4873 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
ebde8a27 4874 tree tem;
6ac01510
ILT
4875 const char * const warnmsg = G_("assuming signed overflow does not occur "
4876 "when simplifying range test");
ef659ec0 4877
ebde8a27
RK
4878 /* If this is an OR operation, invert both sides; we will invert
4879 again at the end. */
4880 if (or_op)
4881 in0_p = ! in0_p, in1_p = ! in1_p;
4882
4883 /* If both expressions are the same, if we can merge the ranges, and we
80906567
RK
4884 can build the range test, return it or it inverted. If one of the
4885 ranges is always true or always false, consider it to be the same
4886 expression as the other. */
4887 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
ebde8a27
RK
4888 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4889 in1_p, low1, high1)
db3927fb 4890 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
80906567
RK
4891 lhs != 0 ? lhs
4892 : rhs != 0 ? rhs : integer_zero_node,
ebde8a27 4893 in_p, low, high))))
6ac01510
ILT
4894 {
4895 if (strict_overflow_p)
4896 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
db3927fb 4897 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6ac01510 4898 }
ebde8a27
RK
4899
4900 /* On machines where the branch cost is expensive, if this is a
4901 short-circuited branch and the underlying object on both sides
4902 is the same, make a non-short-circuit operation. */
b8610a53 4903 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
7cf5c9e1 4904 && lhs != 0 && rhs != 0
e1f04615
KH
4905 && (code == TRUTH_ANDIF_EXPR
4906 || code == TRUTH_ORIF_EXPR)
ebde8a27 4907 && operand_equal_p (lhs, rhs, 0))
ef659ec0 4908 {
f0eebf28 4909 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
9ec36da5
JL
4910 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4911 which cases we can't do this. */
ebde8a27 4912 if (simple_operand_p (lhs))
db3927fb
AH
4913 {
4914 tem = build2 (code == TRUTH_ANDIF_EXPR
4915 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4916 type, op0, op1);
4917 SET_EXPR_LOCATION (tem, loc);
4918 return tem;
4919 }
f0eebf28 4920
5785c7de 4921 else if (lang_hooks.decls.global_bindings_p () == 0
7a6cdb44 4922 && ! CONTAINS_PLACEHOLDER_P (lhs))
ebde8a27
RK
4923 {
4924 tree common = save_expr (lhs);
4925
db3927fb 4926 if (0 != (lhs = build_range_check (loc, type, common,
ebde8a27
RK
4927 or_op ? ! in0_p : in0_p,
4928 low0, high0))
db3927fb 4929 && (0 != (rhs = build_range_check (loc, type, common,
ebde8a27
RK
4930 or_op ? ! in1_p : in1_p,
4931 low1, high1))))
6ac01510
ILT
4932 {
4933 if (strict_overflow_p)
4934 fold_overflow_warning (warnmsg,
4935 WARN_STRICT_OVERFLOW_COMPARISON);
db3927fb
AH
4936 tem = build2 (code == TRUTH_ANDIF_EXPR
4937 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4938 type, lhs, rhs);
4939 SET_EXPR_LOCATION (tem, loc);
4940 return tem;
6ac01510 4941 }
ebde8a27 4942 }
ef659ec0 4943 }
de153e82 4944
de153e82 4945 return 0;
ef659ec0
TW
4946}
4947\f
02103577 4948/* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
25216284 4949 bit value. Arrange things so the extra bits will be set to zero if and
d4453ee5
RK
4950 only if C is signed-extended to its full width. If MASK is nonzero,
4951 it is an INTEGER_CST that should be AND'ed with the extra bits. */
02103577
RK
4952
4953static tree
fa8db1f7 4954unextend (tree c, int p, int unsignedp, tree mask)
02103577
RK
4955{
4956 tree type = TREE_TYPE (c);
4957 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4958 tree temp;
4959
4960 if (p == modesize || unsignedp)
4961 return c;
4962
02103577 4963 /* We work by getting just the sign bit into the low-order bit, then
9faa82d8 4964 into the high-order bit, then sign-extend. We then XOR that value
02103577 4965 with C. */
43a5d30b
AS
4966 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4967 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
cf85c69b
JW
4968
4969 /* We must use a signed type in order to get an arithmetic right shift.
4970 However, we must also avoid introducing accidental overflows, so that
b6cc0a72 4971 a subsequent call to integer_zerop will work. Hence we must
cf85c69b
JW
4972 do the type conversion here. At this point, the constant is either
4973 zero or one, and the conversion to a signed type can never overflow.
4974 We could get an overflow if this conversion is done anywhere else. */
8df83eae 4975 if (TYPE_UNSIGNED (type))
12753674 4976 temp = fold_convert (signed_type_for (type), temp);
cf85c69b 4977
43a5d30b
AS
4978 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4979 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
d4453ee5 4980 if (mask != 0)
088414c1 4981 temp = const_binop (BIT_AND_EXPR, temp,
43a5d30b 4982 fold_convert (TREE_TYPE (c), mask));
cf85c69b 4983 /* If necessary, convert the type back to match the type of C. */
8df83eae 4984 if (TYPE_UNSIGNED (type))
088414c1 4985 temp = fold_convert (type, temp);
d4453ee5 4986
43a5d30b 4987 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
02103577
RK
4988}
4989\f
27d0d96a
BS
4990/* For an expression that has the form
4991 (A && B) || ~B
4992 or
4993 (A || B) && ~B,
4994 we can drop one of the inner expressions and simplify to
4995 A || ~B
4996 or
4997 A && ~B
4998 LOC is the location of the resulting expression. OP is the inner
4999 logical operation; the left-hand side in the examples above, while CMPOP
5000 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5001 removing a condition that guards another, as in
5002 (A != NULL && A->...) || A == NULL
5003 which we must not transform. If RHS_ONLY is true, only eliminate the
5004 right-most operand of the inner logical operation. */
5005
5006static tree
5007merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5008 bool rhs_only)
5009{
5010 tree type = TREE_TYPE (cmpop);
5011 enum tree_code code = TREE_CODE (cmpop);
5012 enum tree_code truthop_code = TREE_CODE (op);
5013 tree lhs = TREE_OPERAND (op, 0);
5014 tree rhs = TREE_OPERAND (op, 1);
5015 tree orig_lhs = lhs, orig_rhs = rhs;
5016 enum tree_code rhs_code = TREE_CODE (rhs);
5017 enum tree_code lhs_code = TREE_CODE (lhs);
5018 enum tree_code inv_code;
5019
5020 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5021 return NULL_TREE;
5022
5023 if (TREE_CODE_CLASS (code) != tcc_comparison)
5024 return NULL_TREE;
5025
5026 if (rhs_code == truthop_code)
5027 {
5028 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5029 if (newrhs != NULL_TREE)
5030 {
5031 rhs = newrhs;
5032 rhs_code = TREE_CODE (rhs);
5033 }
5034 }
5035 if (lhs_code == truthop_code && !rhs_only)
5036 {
5037 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5038 if (newlhs != NULL_TREE)
5039 {
5040 lhs = newlhs;
5041 lhs_code = TREE_CODE (lhs);
5042 }
5043 }
5044
5045 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5046 if (inv_code == rhs_code
5047 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5048 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5049 return lhs;
5050 if (!rhs_only && inv_code == lhs_code
5051 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5052 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5053 return rhs;
5054 if (rhs != orig_rhs || lhs != orig_lhs)
5055 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5056 lhs, rhs);
5057 return NULL_TREE;
5058}
5059
b2215d83
TW
5060/* Find ways of folding logical expressions of LHS and RHS:
5061 Try to merge two comparisons to the same innermost item.
5062 Look for range tests like "ch >= '0' && ch <= '9'".
5063 Look for combinations of simple terms on machines with expensive branches
5064 and evaluate the RHS unconditionally.
6d716ca8
RS
5065
5066 For example, if we have p->a == 2 && p->b == 4 and we can make an
5067 object large enough to span both A and B, we can do this with a comparison
5068 against the object ANDed with the a mask.
5069
5070 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5071 operations to do this with one comparison.
5072
5073 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5074 function and the one above.
5075
5076 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5077 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5078
5079 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5080 two operands.
5081
5082 We return the simplified tree or 0 if no optimization is possible. */
5083
5084static tree
db3927fb
AH
5085fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5086 tree lhs, tree rhs)
6d716ca8 5087{
f42ef510 5088 /* If this is the "or" of two comparisons, we can do something if
6d716ca8 5089 the comparisons are NE_EXPR. If this is the "and", we can do something
b6cc0a72 5090 if the comparisons are EQ_EXPR. I.e.,
fa8db1f7 5091 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6d716ca8
RS
5092
5093 WANTED_CODE is this operation code. For single bit fields, we can
5094 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5095 comparison for one-bit fields. */
5096
b2215d83 5097 enum tree_code wanted_code;
6d716ca8 5098 enum tree_code lcode, rcode;
b2215d83 5099 tree ll_arg, lr_arg, rl_arg, rr_arg;
6d716ca8 5100 tree ll_inner, lr_inner, rl_inner, rr_inner;
770ae6cc
RK
5101 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5102 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
45dc13b9
JJ
5103 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5104 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6d716ca8
RS
5105 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5106 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
45dc13b9 5107 enum machine_mode lnmode, rnmode;
6d716ca8 5108 tree ll_mask, lr_mask, rl_mask, rr_mask;
d4453ee5 5109 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
b2215d83 5110 tree l_const, r_const;
45dc13b9
JJ
5111 tree lntype, rntype, result;
5112 HOST_WIDE_INT first_bit, end_bit;
b2215d83 5113 int volatilep;
47392a21
MM
5114 tree orig_lhs = lhs, orig_rhs = rhs;
5115 enum tree_code orig_code = code;
6d716ca8 5116
ebde8a27
RK
5117 /* Start by getting the comparison codes. Fail if anything is volatile.
5118 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5119 it were surrounded with a NE_EXPR. */
6d716ca8 5120
ebde8a27 5121 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
b2215d83
TW
5122 return 0;
5123
6d716ca8
RS
5124 lcode = TREE_CODE (lhs);
5125 rcode = TREE_CODE (rhs);
ef659ec0 5126
96d4cf0a 5127 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
59ce6d6b 5128 {
e9ea8bd5 5129 lhs = build2 (NE_EXPR, truth_type, lhs,
57decb7e 5130 build_int_cst (TREE_TYPE (lhs), 0));
59ce6d6b
RS
5131 lcode = NE_EXPR;
5132 }
96d4cf0a
RK
5133
5134 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
59ce6d6b 5135 {
e9ea8bd5 5136 rhs = build2 (NE_EXPR, truth_type, rhs,
57decb7e 5137 build_int_cst (TREE_TYPE (rhs), 0));
59ce6d6b
RS
5138 rcode = NE_EXPR;
5139 }
96d4cf0a 5140
6615c446
JO
5141 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5142 || TREE_CODE_CLASS (rcode) != tcc_comparison)
ef659ec0
TW
5143 return 0;
5144
b2215d83
TW
5145 ll_arg = TREE_OPERAND (lhs, 0);
5146 lr_arg = TREE_OPERAND (lhs, 1);
5147 rl_arg = TREE_OPERAND (rhs, 0);
5148 rr_arg = TREE_OPERAND (rhs, 1);
b6cc0a72 5149
8dcb27ed
RS
5150 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5151 if (simple_operand_p (ll_arg)
d1a7edaf 5152 && simple_operand_p (lr_arg))
8dcb27ed 5153 {
d1a7edaf 5154 tree result;
8dcb27ed
RS
5155 if (operand_equal_p (ll_arg, rl_arg, 0)
5156 && operand_equal_p (lr_arg, rr_arg, 0))
d1a7edaf 5157 {
db3927fb 5158 result = combine_comparisons (loc, code, lcode, rcode,
d1a7edaf
PB
5159 truth_type, ll_arg, lr_arg);
5160 if (result)
5161 return result;
5162 }
8dcb27ed
RS
5163 else if (operand_equal_p (ll_arg, rr_arg, 0)
5164 && operand_equal_p (lr_arg, rl_arg, 0))
d1a7edaf 5165 {
db3927fb 5166 result = combine_comparisons (loc, code, lcode,
d1a7edaf
PB
5167 swap_tree_comparison (rcode),
5168 truth_type, ll_arg, lr_arg);
5169 if (result)
5170 return result;
5171 }
8dcb27ed
RS
5172 }
5173
d1a7edaf
PB
5174 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5175 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5176
8227896c 5177 /* If the RHS can be evaluated unconditionally and its operands are
b2215d83
TW
5178 simple, it wins to evaluate the RHS unconditionally on machines
5179 with expensive branches. In this case, this isn't a comparison
1d691c53
RK
5180 that can be merged. Avoid doing this if the RHS is a floating-point
5181 comparison since those can trap. */
b2215d83 5182
7f4b6d20 5183 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
3a4fd356 5184 false) >= 2
1d691c53 5185 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
b2215d83 5186 && simple_operand_p (rl_arg)
8227896c 5187 && simple_operand_p (rr_arg))
01c58f26
RS
5188 {
5189 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5190 if (code == TRUTH_OR_EXPR
5191 && lcode == NE_EXPR && integer_zerop (lr_arg)
5192 && rcode == NE_EXPR && integer_zerop (rr_arg)
87a72aa8
AP
5193 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5194 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
db3927fb
AH
5195 {
5196 result = build2 (NE_EXPR, truth_type,
5197 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5198 ll_arg, rl_arg),
5199 build_int_cst (TREE_TYPE (ll_arg), 0));
5200 goto fold_truthop_exit;
5201 }
01c58f26
RS
5202
5203 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5204 if (code == TRUTH_AND_EXPR
5205 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5206 && rcode == EQ_EXPR && integer_zerop (rr_arg)
87a72aa8
AP
5207 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5208 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
db3927fb
AH
5209 {
5210 result = build2 (EQ_EXPR, truth_type,
5211 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5212 ll_arg, rl_arg),
5213 build_int_cst (TREE_TYPE (ll_arg), 0));
5214 goto fold_truthop_exit;
5215 }
01c58f26 5216
b8610a53 5217 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
47392a21
MM
5218 {
5219 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
db3927fb
AH
5220 {
5221 result = build2 (code, truth_type, lhs, rhs);
5222 goto fold_truthop_exit;
5223 }
47392a21
MM
5224 return NULL_TREE;
5225 }
01c58f26 5226 }
b2215d83 5227
ef659ec0
TW
5228 /* See if the comparisons can be merged. Then get all the parameters for
5229 each side. */
5230
6d716ca8 5231 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
ef659ec0 5232 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6d716ca8
RS
5233 return 0;
5234
b2215d83 5235 volatilep = 0;
db3927fb 5236 ll_inner = decode_field_reference (loc, ll_arg,
6d716ca8 5237 &ll_bitsize, &ll_bitpos, &ll_mode,
d4453ee5
RK
5238 &ll_unsignedp, &volatilep, &ll_mask,
5239 &ll_and_mask);
db3927fb 5240 lr_inner = decode_field_reference (loc, lr_arg,
6d716ca8 5241 &lr_bitsize, &lr_bitpos, &lr_mode,
d4453ee5
RK
5242 &lr_unsignedp, &volatilep, &lr_mask,
5243 &lr_and_mask);
db3927fb 5244 rl_inner = decode_field_reference (loc, rl_arg,
6d716ca8 5245 &rl_bitsize, &rl_bitpos, &rl_mode,
d4453ee5
RK
5246 &rl_unsignedp, &volatilep, &rl_mask,
5247 &rl_and_mask);
db3927fb 5248 rr_inner = decode_field_reference (loc, rr_arg,
6d716ca8 5249 &rr_bitsize, &rr_bitpos, &rr_mode,
d4453ee5
RK
5250 &rr_unsignedp, &volatilep, &rr_mask,
5251 &rr_and_mask);
6d716ca8
RS
5252
5253 /* It must be true that the inner operation on the lhs of each
5254 comparison must be the same if we are to be able to do anything.
5255 Then see if we have constants. If not, the same must be true for
5256 the rhs's. */
5257 if (volatilep || ll_inner == 0 || rl_inner == 0
5258 || ! operand_equal_p (ll_inner, rl_inner, 0))
5259 return 0;
5260
b2215d83
TW
5261 if (TREE_CODE (lr_arg) == INTEGER_CST
5262 && TREE_CODE (rr_arg) == INTEGER_CST)
5263 l_const = lr_arg, r_const = rr_arg;
6d716ca8
RS
5264 else if (lr_inner == 0 || rr_inner == 0
5265 || ! operand_equal_p (lr_inner, rr_inner, 0))
5266 return 0;
b2215d83
TW
5267 else
5268 l_const = r_const = 0;
6d716ca8
RS
5269
5270 /* If either comparison code is not correct for our logical operation,
5271 fail. However, we can convert a one-bit comparison against zero into
5272 the opposite comparison against that bit being set in the field. */
b2215d83 5273
9c0ae98b 5274 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6d716ca8
RS
5275 if (lcode != wanted_code)
5276 {
5277 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5a6b3365 5278 {
2bd21a02
AS
5279 /* Make the left operand unsigned, since we are only interested
5280 in the value of one bit. Otherwise we are doing the wrong
5281 thing below. */
5282 ll_unsignedp = 1;
71a874cd 5283 l_const = ll_mask;
5a6b3365 5284 }
6d716ca8
RS
5285 else
5286 return 0;
5287 }
5288
71a874cd 5289 /* This is analogous to the code for l_const above. */
6d716ca8
RS
5290 if (rcode != wanted_code)
5291 {
5292 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5a6b3365 5293 {
2bd21a02 5294 rl_unsignedp = 1;
71a874cd 5295 r_const = rl_mask;
5a6b3365 5296 }
6d716ca8
RS
5297 else
5298 return 0;
5299 }
5300
5301 /* See if we can find a mode that contains both fields being compared on
5302 the left. If we can't, fail. Otherwise, update all constants and masks
5303 to be relative to a field of that size. */
5304 first_bit = MIN (ll_bitpos, rl_bitpos);
5305 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5306 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5307 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5308 volatilep);
5309 if (lnmode == VOIDmode)
5310 return 0;
5311
5312 lnbitsize = GET_MODE_BITSIZE (lnmode);
5313 lnbitpos = first_bit & ~ (lnbitsize - 1);
5785c7de 5314 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6d716ca8
RS
5315 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5316
f76b9db2
ILT
5317 if (BYTES_BIG_ENDIAN)
5318 {
5319 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5320 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5321 }
6d716ca8 5322
db3927fb 5323 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
43a5d30b 5324 size_int (xll_bitpos));
db3927fb 5325 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
43a5d30b 5326 size_int (xrl_bitpos));
6d716ca8 5327
6d716ca8
RS
5328 if (l_const)
5329 {
db3927fb 5330 l_const = fold_convert_loc (loc, lntype, l_const);
b6cc0a72 5331 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
43a5d30b 5332 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
02103577 5333 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
db3927fb 5334 fold_build1_loc (loc, BIT_NOT_EXPR,
43a5d30b 5335 lntype, ll_mask))))
02103577 5336 {
d4ee4d25 5337 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
b6cc0a72 5338
1b0f3e79 5339 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
02103577 5340 }
6d716ca8
RS
5341 }
5342 if (r_const)
5343 {
db3927fb 5344 r_const = fold_convert_loc (loc, lntype, r_const);
d4453ee5 5345 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
43a5d30b 5346 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
02103577 5347 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
db3927fb 5348 fold_build1_loc (loc, BIT_NOT_EXPR,
43a5d30b 5349 lntype, rl_mask))))
02103577 5350 {
d4ee4d25 5351 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
ab87f8c8 5352
1b0f3e79 5353 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
02103577 5354 }
6d716ca8
RS
5355 }
5356
45dc13b9
JJ
5357 /* If the right sides are not constant, do the same for it. Also,
5358 disallow this optimization if a size or signedness mismatch occurs
5359 between the left and right sides. */
5360 if (l_const == 0)
5361 {
5362 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5363 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5364 /* Make sure the two fields on the right
5365 correspond to the left without being swapped. */
5366 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5367 return 0;
5368
5369 first_bit = MIN (lr_bitpos, rr_bitpos);
5370 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5371 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5372 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5373 volatilep);
5374 if (rnmode == VOIDmode)
5375 return 0;
5376
5377 rnbitsize = GET_MODE_BITSIZE (rnmode);
5378 rnbitpos = first_bit & ~ (rnbitsize - 1);
5379 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5380 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5381
5382 if (BYTES_BIG_ENDIAN)
5383 {
5384 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5385 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5386 }
5387
db3927fb
AH
5388 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5389 rntype, lr_mask),
43a5d30b 5390 size_int (xlr_bitpos));
db3927fb
AH
5391 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5392 rntype, rr_mask),
43a5d30b 5393 size_int (xrr_bitpos));
45dc13b9
JJ
5394
5395 /* Make a mask that corresponds to both fields being compared.
5396 Do this for both items being compared. If the operands are the
5397 same size and the bits being compared are in the same position
5398 then we can do this by masking both and comparing the masked
5399 results. */
43a5d30b
AS
5400 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5401 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
45dc13b9
JJ
5402 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5403 {
db3927fb 5404 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
45dc13b9
JJ
5405 ll_unsignedp || rl_unsignedp);
5406 if (! all_ones_mask_p (ll_mask, lnbitsize))
5407 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5408
db3927fb 5409 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
45dc13b9
JJ
5410 lr_unsignedp || rr_unsignedp);
5411 if (! all_ones_mask_p (lr_mask, rnbitsize))
5412 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5413
db3927fb
AH
5414 result = build2 (wanted_code, truth_type, lhs, rhs);
5415 goto fold_truthop_exit;
45dc13b9
JJ
5416 }
5417
5418 /* There is still another way we can do something: If both pairs of
5419 fields being compared are adjacent, we may be able to make a wider
5420 field containing them both.
5421
5422 Note that we still must mask the lhs/rhs expressions. Furthermore,
5423 the mask must be shifted to account for the shift done by
5424 make_bit_field_ref. */
5425 if ((ll_bitsize + ll_bitpos == rl_bitpos
5426 && lr_bitsize + lr_bitpos == rr_bitpos)
5427 || (ll_bitpos == rl_bitpos + rl_bitsize
5428 && lr_bitpos == rr_bitpos + rr_bitsize))
5429 {
5430 tree type;
5431
db3927fb
AH
5432 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5433 ll_bitsize + rl_bitsize,
45dc13b9 5434 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
db3927fb
AH
5435 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5436 lr_bitsize + rr_bitsize,
45dc13b9
JJ
5437 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5438
5439 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
43a5d30b 5440 size_int (MIN (xll_bitpos, xrl_bitpos)));
45dc13b9 5441 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
43a5d30b 5442 size_int (MIN (xlr_bitpos, xrr_bitpos)));
45dc13b9
JJ
5443
5444 /* Convert to the smaller type before masking out unwanted bits. */
5445 type = lntype;
5446 if (lntype != rntype)
5447 {
5448 if (lnbitsize > rnbitsize)
5449 {
db3927fb
AH
5450 lhs = fold_convert_loc (loc, rntype, lhs);
5451 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
45dc13b9
JJ
5452 type = rntype;
5453 }
5454 else if (lnbitsize < rnbitsize)
5455 {
db3927fb
AH
5456 rhs = fold_convert_loc (loc, lntype, rhs);
5457 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
45dc13b9
JJ
5458 type = lntype;
5459 }
5460 }
5461
5462 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5463 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5464
5465 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5466 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5467
db3927fb
AH
5468 result = build2 (wanted_code, truth_type, lhs, rhs);
5469 goto fold_truthop_exit;
45dc13b9
JJ
5470 }
5471
5472 return 0;
5473 }
5474
6d716ca8
RS
5475 /* Handle the case of comparisons with constants. If there is something in
5476 common between the masks, those bits of the constants must be the same.
5477 If not, the condition is always false. Test for this to avoid generating
5478 incorrect code below. */
43a5d30b 5479 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6d716ca8 5480 if (! integer_zerop (result)
43a5d30b
AS
5481 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5482 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6d716ca8
RS
5483 {
5484 if (wanted_code == NE_EXPR)
5485 {
d4ee4d25 5486 warning (0, "%<or%> of unmatched not-equal tests is always 1");
1b0f3e79 5487 return constant_boolean_node (true, truth_type);
6d716ca8
RS
5488 }
5489 else
5490 {
d4ee4d25 5491 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
1b0f3e79 5492 return constant_boolean_node (false, truth_type);
6d716ca8
RS
5493 }
5494 }
5495
45dc13b9
JJ
5496 /* Construct the expression we will return. First get the component
5497 reference we will make. Unless the mask is all ones the width of
5498 that field, perform the mask operation. Then compare with the
5499 merged constant. */
db3927fb 5500 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
45dc13b9
JJ
5501 ll_unsignedp || rl_unsignedp);
5502
43a5d30b 5503 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
45dc13b9 5504 if (! all_ones_mask_p (ll_mask, lnbitsize))
db3927fb
AH
5505 {
5506 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5507 SET_EXPR_LOCATION (result, loc);
5508 }
45dc13b9 5509
db3927fb 5510 result = build2 (wanted_code, truth_type, result,
43a5d30b 5511 const_binop (BIT_IOR_EXPR, l_const, r_const));
db3927fb
AH
5512
5513 fold_truthop_exit:
5514 SET_EXPR_LOCATION (result, loc);
5515 return result;
6d716ca8
RS
5516}
5517\f
b6cc0a72 5518/* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
14a774a9
RK
5519 constant. */
5520
5521static tree
db3927fb
AH
5522optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5523 tree op0, tree op1)
14a774a9 5524{
d7e5b287 5525 tree arg0 = op0;
14a774a9 5526 enum tree_code op_code;
c071e8bc 5527 tree comp_const;
14a774a9
RK
5528 tree minmax_const;
5529 int consts_equal, consts_lt;
5530 tree inner;
5531
5532 STRIP_SIGN_NOPS (arg0);
5533
5534 op_code = TREE_CODE (arg0);
5535 minmax_const = TREE_OPERAND (arg0, 1);
db3927fb 5536 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
14a774a9
RK
5537 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5538 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5539 inner = TREE_OPERAND (arg0, 0);
5540
5541 /* If something does not permit us to optimize, return the original tree. */
5542 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5543 || TREE_CODE (comp_const) != INTEGER_CST
455f14dd 5544 || TREE_OVERFLOW (comp_const)
14a774a9 5545 || TREE_CODE (minmax_const) != INTEGER_CST
455f14dd 5546 || TREE_OVERFLOW (minmax_const))
d7e5b287 5547 return NULL_TREE;
14a774a9
RK
5548
5549 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5550 and GT_EXPR, doing the rest with recursive calls using logical
5551 simplifications. */
d7e5b287 5552 switch (code)
14a774a9
RK
5553 {
5554 case NE_EXPR: case LT_EXPR: case LE_EXPR:
d7e5b287 5555 {
db3927fb
AH
5556 tree tem
5557 = optimize_minmax_comparison (loc,
5558 invert_tree_comparison (code, false),
5559 type, op0, op1);
d817ed3b 5560 if (tem)
db3927fb 5561 return invert_truthvalue_loc (loc, tem);
d817ed3b 5562 return NULL_TREE;
d7e5b287 5563 }
14a774a9
RK
5564
5565 case GE_EXPR:
5566 return
db3927fb 5567 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
7f20a5b7 5568 optimize_minmax_comparison
db3927fb 5569 (loc, EQ_EXPR, type, arg0, comp_const),
7f20a5b7 5570 optimize_minmax_comparison
db3927fb 5571 (loc, GT_EXPR, type, arg0, comp_const));
14a774a9
RK
5572
5573 case EQ_EXPR:
5574 if (op_code == MAX_EXPR && consts_equal)
5575 /* MAX (X, 0) == 0 -> X <= 0 */
db3927fb 5576 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
14a774a9
RK
5577
5578 else if (op_code == MAX_EXPR && consts_lt)
5579 /* MAX (X, 0) == 5 -> X == 5 */
db3927fb 5580 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
14a774a9
RK
5581
5582 else if (op_code == MAX_EXPR)
5583 /* MAX (X, 0) == -1 -> false */
db3927fb 5584 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
14a774a9
RK
5585
5586 else if (consts_equal)
5587 /* MIN (X, 0) == 0 -> X >= 0 */
db3927fb 5588 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
14a774a9
RK
5589
5590 else if (consts_lt)
5591 /* MIN (X, 0) == 5 -> false */
db3927fb 5592 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
14a774a9
RK
5593
5594 else
5595 /* MIN (X, 0) == -1 -> X == -1 */
db3927fb 5596 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
14a774a9
RK
5597
5598 case GT_EXPR:
5599 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5600 /* MAX (X, 0) > 0 -> X > 0
5601 MAX (X, 0) > 5 -> X > 5 */
db3927fb 5602 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
14a774a9
RK
5603
5604 else if (op_code == MAX_EXPR)
5605 /* MAX (X, 0) > -1 -> true */
db3927fb 5606 return omit_one_operand_loc (loc, type, integer_one_node, inner);
14a774a9
RK
5607
5608 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5609 /* MIN (X, 0) > 0 -> false
5610 MIN (X, 0) > 5 -> false */
db3927fb 5611 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
14a774a9
RK
5612
5613 else
5614 /* MIN (X, 0) > -1 -> X > -1 */
db3927fb 5615 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
14a774a9
RK
5616
5617 default:
d7e5b287 5618 return NULL_TREE;
14a774a9
RK
5619 }
5620}
5621\f
1baa375f
RK
5622/* T is an integer expression that is being multiplied, divided, or taken a
5623 modulus (CODE says which and what kind of divide or modulus) by a
5624 constant C. See if we can eliminate that operation by folding it with
5625 other operations already in T. WIDE_TYPE, if non-null, is a type that
5626 should be used for the computation if wider than our type.
5627
cff27795
EB
5628 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5629 (X * 2) + (Y * 4). We must, however, be assured that either the original
8e1ca098
RH
5630 expression would not overflow or that overflow is undefined for the type
5631 in the language in question.
5632
1baa375f 5633 If we return a non-null expression, it is an equivalent form of the
6ac01510
ILT
5634 original computation, but need not be in the original type.
5635
5636 We set *STRICT_OVERFLOW_P to true if the return values depends on
5637 signed overflow being undefined. Otherwise we do not change
5638 *STRICT_OVERFLOW_P. */
1baa375f
RK
5639
5640static tree
6ac01510
ILT
5641extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5642 bool *strict_overflow_p)
cdd4b0d4
AB
5643{
5644 /* To avoid exponential search depth, refuse to allow recursion past
5645 three levels. Beyond that (1) it's highly unlikely that we'll find
5646 something interesting and (2) we've probably processed it before
5647 when we built the inner expression. */
5648
5649 static int depth;
5650 tree ret;
5651
5652 if (depth > 3)
5653 return NULL;
5654
5655 depth++;
6ac01510 5656 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
cdd4b0d4
AB
5657 depth--;
5658
5659 return ret;
5660}
5661
5662static tree
6ac01510
ILT
5663extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5664 bool *strict_overflow_p)
1baa375f
RK
5665{
5666 tree type = TREE_TYPE (t);
5667 enum tree_code tcode = TREE_CODE (t);
b6cc0a72 5668 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
1baa375f
RK
5669 > GET_MODE_SIZE (TYPE_MODE (type)))
5670 ? wide_type : type);
5671 tree t1, t2;
5672 int same_p = tcode == code;
9d0878fd 5673 tree op0 = NULL_TREE, op1 = NULL_TREE;
6ac01510 5674 bool sub_strict_overflow_p;
1baa375f
RK
5675
5676 /* Don't deal with constants of zero here; they confuse the code below. */
5677 if (integer_zerop (c))
8e1ca098 5678 return NULL_TREE;
1baa375f 5679
6615c446 5680 if (TREE_CODE_CLASS (tcode) == tcc_unary)
1baa375f
RK
5681 op0 = TREE_OPERAND (t, 0);
5682
6615c446 5683 if (TREE_CODE_CLASS (tcode) == tcc_binary)
1baa375f
RK
5684 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5685
5686 /* Note that we need not handle conditional operations here since fold
5687 already handles those cases. So just do arithmetic here. */
5688 switch (tcode)
5689 {
5690 case INTEGER_CST:
5691 /* For a constant, we can always simplify if we are a multiply
5692 or (for divide and modulus) if it is a multiple of our constant. */
5693 if (code == MULT_EXPR
43a5d30b 5694 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
088414c1 5695 return const_binop (code, fold_convert (ctype, t),
43a5d30b 5696 fold_convert (ctype, c));
1baa375f
RK
5697 break;
5698
1043771b 5699 CASE_CONVERT: case NON_LVALUE_EXPR:
43e4a9d8 5700 /* If op0 is an expression ... */
6615c446
JO
5701 if ((COMPARISON_CLASS_P (op0)
5702 || UNARY_CLASS_P (op0)
5703 || BINARY_CLASS_P (op0)
5039610b 5704 || VL_EXP_CLASS_P (op0)
6615c446 5705 || EXPRESSION_CLASS_P (op0))
fcb4587e
RG
5706 /* ... and has wrapping overflow, and its type is smaller
5707 than ctype, then we cannot pass through as widening. */
5708 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
43e4a9d8
EB
5709 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5710 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
fcb4587e
RG
5711 && (TYPE_PRECISION (ctype)
5712 > TYPE_PRECISION (TREE_TYPE (op0))))
a0fac73d
RS
5713 /* ... or this is a truncation (t is narrower than op0),
5714 then we cannot pass through this narrowing. */
fcb4587e
RG
5715 || (TYPE_PRECISION (type)
5716 < TYPE_PRECISION (TREE_TYPE (op0)))
068d2c9d
MM
5717 /* ... or signedness changes for division or modulus,
5718 then we cannot pass through this conversion. */
5719 || (code != MULT_EXPR
8df83eae 5720 && (TYPE_UNSIGNED (ctype)
ac029795
RG
5721 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5722 /* ... or has undefined overflow while the converted to
5723 type has not, we cannot do the operation in the inner type
5724 as that would introduce undefined overflow. */
5725 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5726 && !TYPE_OVERFLOW_UNDEFINED (type))))
eff9c80d
RH
5727 break;
5728
1baa375f 5729 /* Pass the constant down and see if we can make a simplification. If
59adecfa
RK
5730 we can, replace this expression with the inner simplification for
5731 possible later conversion to our or some other type. */
088414c1 5732 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
3cd58fd7 5733 && TREE_CODE (t2) == INTEGER_CST
455f14dd 5734 && !TREE_OVERFLOW (t2)
3cd58fd7
OH
5735 && (0 != (t1 = extract_muldiv (op0, t2, code,
5736 code == MULT_EXPR
6ac01510
ILT
5737 ? ctype : NULL_TREE,
5738 strict_overflow_p))))
1baa375f
RK
5739 return t1;
5740 break;
5741
47d42ce2
JJ
5742 case ABS_EXPR:
5743 /* If widening the type changes it from signed to unsigned, then we
5744 must avoid building ABS_EXPR itself as unsigned. */
5745 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5746 {
12753674 5747 tree cstype = (*signed_type_for) (ctype);
6ac01510
ILT
5748 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5749 != 0)
47d42ce2 5750 {
7f20a5b7 5751 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
47d42ce2
JJ
5752 return fold_convert (ctype, t1);
5753 }
5754 break;
5755 }
a0857153
RG
5756 /* If the constant is negative, we cannot simplify this. */
5757 if (tree_int_cst_sgn (c) == -1)
5758 break;
47d42ce2
JJ
5759 /* FALLTHROUGH */
5760 case NEGATE_EXPR:
6ac01510
ILT
5761 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5762 != 0)
7f20a5b7 5763 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
1baa375f
RK
5764 break;
5765
5766 case MIN_EXPR: case MAX_EXPR:
13393c8a
JW
5767 /* If widening the type changes the signedness, then we can't perform
5768 this optimization as that changes the result. */
8df83eae 5769 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
13393c8a
JW
5770 break;
5771
1baa375f 5772 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6ac01510
ILT
5773 sub_strict_overflow_p = false;
5774 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5775 &sub_strict_overflow_p)) != 0
5776 && (t2 = extract_muldiv (op1, c, code, wide_type,
5777 &sub_strict_overflow_p)) != 0)
59adecfa
RK
5778 {
5779 if (tree_int_cst_sgn (c) < 0)
5780 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6ac01510
ILT
5781 if (sub_strict_overflow_p)
5782 *strict_overflow_p = true;
7f20a5b7
KH
5783 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5784 fold_convert (ctype, t2));
59adecfa 5785 }
1baa375f
RK
5786 break;
5787
1baa375f
RK
5788 case LSHIFT_EXPR: case RSHIFT_EXPR:
5789 /* If the second operand is constant, this is a multiplication
5790 or floor division, by a power of two, so we can treat it that
9e629a80
JM
5791 way unless the multiplier or divisor overflows. Signed
5792 left-shift overflow is implementation-defined rather than
5793 undefined in C90, so do not convert signed left shift into
5794 multiplication. */
1baa375f 5795 if (TREE_CODE (op1) == INTEGER_CST
9e629a80 5796 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
d08230fe
NC
5797 /* const_binop may not detect overflow correctly,
5798 so check for it explicitly here. */
5799 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5800 && TREE_INT_CST_HIGH (op1) == 0
088414c1
RS
5801 && 0 != (t1 = fold_convert (ctype,
5802 const_binop (LSHIFT_EXPR,
5803 size_one_node,
43a5d30b 5804 op1)))
455f14dd 5805 && !TREE_OVERFLOW (t1))
59ce6d6b
RS
5806 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5807 ? MULT_EXPR : FLOOR_DIV_EXPR,
db3927fb
AH
5808 ctype,
5809 fold_convert (ctype, op0),
5810 t1),
6ac01510 5811 c, code, wide_type, strict_overflow_p);
1baa375f
RK
5812 break;
5813
5814 case PLUS_EXPR: case MINUS_EXPR:
5815 /* See if we can eliminate the operation on both sides. If we can, we
5816 can return a new PLUS or MINUS. If we can't, the only remaining
5817 cases where we can do anything are if the second operand is a
5818 constant. */
6ac01510
ILT
5819 sub_strict_overflow_p = false;
5820 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5821 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
fba2c0cd
JJ
5822 if (t1 != 0 && t2 != 0
5823 && (code == MULT_EXPR
b77f3744
CE
5824 /* If not multiplication, we can only do this if both operands
5825 are divisible by c. */
5826 || (multiple_of_p (ctype, op0, c)
5827 && multiple_of_p (ctype, op1, c))))
6ac01510
ILT
5828 {
5829 if (sub_strict_overflow_p)
5830 *strict_overflow_p = true;
5831 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5832 fold_convert (ctype, t2));
5833 }
1baa375f 5834
59adecfa
RK
5835 /* If this was a subtraction, negate OP1 and set it to be an addition.
5836 This simplifies the logic below. */
5837 if (tcode == MINUS_EXPR)
ffaf6f25
EB
5838 {
5839 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5840 /* If OP1 was not easily negatable, the constant may be OP0. */
5841 if (TREE_CODE (op0) == INTEGER_CST)
5842 {
5843 tree tem = op0;
5844 op0 = op1;
5845 op1 = tem;
5846 tem = t1;
5847 t1 = t2;
5848 t2 = tem;
5849 }
5850 }
59adecfa 5851
f9011d04
RK
5852 if (TREE_CODE (op1) != INTEGER_CST)
5853 break;
5854
59adecfa
RK
5855 /* If either OP1 or C are negative, this optimization is not safe for
5856 some of the division and remainder types while for others we need
5857 to change the code. */
5858 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5859 {
5860 if (code == CEIL_DIV_EXPR)
5861 code = FLOOR_DIV_EXPR;
59adecfa
RK
5862 else if (code == FLOOR_DIV_EXPR)
5863 code = CEIL_DIV_EXPR;
0629440f
RK
5864 else if (code != MULT_EXPR
5865 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
59adecfa
RK
5866 break;
5867 }
5868
12644a9a
TM
5869 /* If it's a multiply or a division/modulus operation of a multiple
5870 of our constant, do the operation and verify it doesn't overflow. */
5871 if (code == MULT_EXPR
43a5d30b 5872 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
dd3f0101 5873 {
088414c1 5874 op1 = const_binop (code, fold_convert (ctype, op1),
43a5d30b 5875 fold_convert (ctype, c));
41ba7ed7
RS
5876 /* We allow the constant to overflow with wrapping semantics. */
5877 if (op1 == 0
eeef0e45 5878 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
dd3f0101
KH
5879 break;
5880 }
12644a9a 5881 else
dd3f0101 5882 break;
59adecfa 5883
23cdce68
RH
5884 /* If we have an unsigned type is not a sizetype, we cannot widen
5885 the operation since it will change the result if the original
5886 computation overflowed. */
8df83eae 5887 if (TYPE_UNSIGNED (ctype)
7393c642 5888 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
23cdce68
RH
5889 && ctype != type)
5890 break;
5891
1baa375f 5892 /* If we were able to eliminate our operation from the first side,
59adecfa
RK
5893 apply our operation to the second side and reform the PLUS. */
5894 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
7f20a5b7 5895 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
1baa375f
RK
5896
5897 /* The last case is if we are a multiply. In that case, we can
5898 apply the distributive law to commute the multiply and addition
30f7a378 5899 if the multiplication of the constants doesn't overflow. */
59adecfa 5900 if (code == MULT_EXPR)
7f20a5b7
KH
5901 return fold_build2 (tcode, ctype,
5902 fold_build2 (code, ctype,
5903 fold_convert (ctype, op0),
5904 fold_convert (ctype, c)),
5905 op1);
1baa375f
RK
5906
5907 break;
5908
5909 case MULT_EXPR:
5910 /* We have a special case here if we are doing something like
5911 (C * 8) % 4 since we know that's zero. */
5912 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5913 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
beeab17c
RG
5914 /* If the multiplication can overflow we cannot optimize this.
5915 ??? Until we can properly mark individual operations as
5916 not overflowing we need to treat sizetype special here as
5917 stor-layout relies on this opimization to make
5918 DECL_FIELD_BIT_OFFSET always a constant. */
5919 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5920 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5921 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
1baa375f 5922 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
43a5d30b 5923 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
beeab17c
RG
5924 {
5925 *strict_overflow_p = true;
5926 return omit_one_operand (type, integer_zero_node, op0);
5927 }
1baa375f 5928
30f7a378 5929 /* ... fall through ... */
1baa375f
RK
5930
5931 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5932 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5933 /* If we can extract our operation from the LHS, do so and return a
5934 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5935 do something only if the second operand is a constant. */
5936 if (same_p
6ac01510
ILT
5937 && (t1 = extract_muldiv (op0, c, code, wide_type,
5938 strict_overflow_p)) != 0)
7f20a5b7
KH
5939 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5940 fold_convert (ctype, op1));
1baa375f 5941 else if (tcode == MULT_EXPR && code == MULT_EXPR
6ac01510
ILT
5942 && (t1 = extract_muldiv (op1, c, code, wide_type,
5943 strict_overflow_p)) != 0)
7f20a5b7
KH
5944 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5945 fold_convert (ctype, t1));
1baa375f
RK
5946 else if (TREE_CODE (op1) != INTEGER_CST)
5947 return 0;
5948
5949 /* If these are the same operation types, we can associate them
5950 assuming no overflow. */
5951 if (tcode == code
db3927fb
AH
5952 && 0 != (t1 = int_const_binop (MULT_EXPR,
5953 fold_convert (ctype, op1),
81ad578e 5954 fold_convert (ctype, c), 1))
9589f23e 5955 && 0 != (t1 = force_fit_type_double (ctype, tree_to_double_int (t1),
81ad578e
RG
5956 (TYPE_UNSIGNED (ctype)
5957 && tcode != MULT_EXPR) ? -1 : 1,
5958 TREE_OVERFLOW (t1)))
455f14dd 5959 && !TREE_OVERFLOW (t1))
7f20a5b7 5960 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
1baa375f
RK
5961
5962 /* If these operations "cancel" each other, we have the main
5963 optimizations of this pass, which occur when either constant is a
5964 multiple of the other, in which case we replace this with either an
b6cc0a72 5965 operation or CODE or TCODE.
8e1ca098 5966
f5143c46 5967 If we have an unsigned type that is not a sizetype, we cannot do
8e1ca098
RH
5968 this since it will change the result if the original computation
5969 overflowed. */
eeef0e45 5970 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
7393c642 5971 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
8e1ca098
RH
5972 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5973 || (tcode == MULT_EXPR
5974 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
e6ebd07f
ZD
5975 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5976 && code != MULT_EXPR)))
1baa375f 5977 {
43a5d30b 5978 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6ac01510
ILT
5979 {
5980 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5981 *strict_overflow_p = true;
5982 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5983 fold_convert (ctype,
5984 const_binop (TRUNC_DIV_EXPR,
43a5d30b 5985 op1, c)));
6ac01510 5986 }
43a5d30b 5987 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6ac01510
ILT
5988 {
5989 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5990 *strict_overflow_p = true;
5991 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5992 fold_convert (ctype,
5993 const_binop (TRUNC_DIV_EXPR,
43a5d30b 5994 c, op1)));
6ac01510 5995 }
1baa375f
RK
5996 }
5997 break;
5998
5999 default:
6000 break;
6001 }
6002
6003 return 0;
6004}
6005\f
f628873f
MM
6006/* Return a node which has the indicated constant VALUE (either 0 or
6007 1), and is of the indicated TYPE. */
6008
e9ea8bd5 6009tree
fa8db1f7 6010constant_boolean_node (int value, tree type)
f628873f
MM
6011{
6012 if (type == integer_type_node)
6013 return value ? integer_one_node : integer_zero_node;
9bb80bb2
RS
6014 else if (type == boolean_type_node)
6015 return value ? boolean_true_node : boolean_false_node;
b6cc0a72 6016 else
7d60be94 6017 return build_int_cst (type, value);
f628873f
MM
6018}
6019
020d90ee 6020
1f77b5da 6021/* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
68626d4f
MM
6022 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6023 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
cc2902df 6024 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
68626d4f
MM
6025 COND is the first argument to CODE; otherwise (as in the example
6026 given here), it is the second argument. TYPE is the type of the
2b8a92de 6027 original expression. Return NULL_TREE if no simplification is
b3e65ebb 6028 possible. */
68626d4f
MM
6029
6030static tree
db3927fb
AH
6031fold_binary_op_with_conditional_arg (location_t loc,
6032 enum tree_code code,
e9da788c
KH
6033 tree type, tree op0, tree op1,
6034 tree cond, tree arg, int cond_first_p)
68626d4f 6035{
e9da788c 6036 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
92db3ec9 6037 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
68626d4f
MM
6038 tree test, true_value, false_value;
6039 tree lhs = NULL_TREE;
6040 tree rhs = NULL_TREE;
b3e65ebb 6041
68626d4f
MM
6042 if (TREE_CODE (cond) == COND_EXPR)
6043 {
6044 test = TREE_OPERAND (cond, 0);
6045 true_value = TREE_OPERAND (cond, 1);
6046 false_value = TREE_OPERAND (cond, 2);
6047 /* If this operand throws an expression, then it does not make
6048 sense to try to perform a logical or arithmetic operation
f4085d4c 6049 involving it. */
68626d4f 6050 if (VOID_TYPE_P (TREE_TYPE (true_value)))
f4085d4c 6051 lhs = true_value;
68626d4f 6052 if (VOID_TYPE_P (TREE_TYPE (false_value)))
f4085d4c 6053 rhs = false_value;
68626d4f
MM
6054 }
6055 else
6056 {
6057 tree testtype = TREE_TYPE (cond);
6058 test = cond;
1b0f3e79
RS
6059 true_value = constant_boolean_node (true, testtype);
6060 false_value = constant_boolean_node (false, testtype);
68626d4f 6061 }
dd3f0101 6062
9e9ef331
EB
6063 /* This transformation is only worthwhile if we don't have to wrap ARG
6064 in a SAVE_EXPR and the operation can be simplified on at least one
6065 of the branches once its pushed inside the COND_EXPR. */
6066 if (!TREE_CONSTANT (arg)
6067 && (TREE_SIDE_EFFECTS (arg)
6068 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6069 return NULL_TREE;
6070
db3927fb 6071 arg = fold_convert_loc (loc, arg_type, arg);
68626d4f 6072 if (lhs == 0)
3b70b82a 6073 {
db3927fb 6074 true_value = fold_convert_loc (loc, cond_type, true_value);
6405f32f 6075 if (cond_first_p)
db3927fb 6076 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6405f32f 6077 else
db3927fb 6078 lhs = fold_build2_loc (loc, code, type, arg, true_value);
3b70b82a 6079 }
68626d4f 6080 if (rhs == 0)
3b70b82a 6081 {
db3927fb 6082 false_value = fold_convert_loc (loc, cond_type, false_value);
6405f32f 6083 if (cond_first_p)
db3927fb 6084 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6405f32f 6085 else
db3927fb 6086 rhs = fold_build2_loc (loc, code, type, arg, false_value);
3b70b82a 6087 }
f4085d4c 6088
9e9ef331
EB
6089 /* Check that we have simplified at least one of the branches. */
6090 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6091 return NULL_TREE;
6092
6093 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
68626d4f
MM
6094}
6095
ab87f8c8 6096\f
71925bc0
RS
6097/* Subroutine of fold() that checks for the addition of +/- 0.0.
6098
6099 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6100 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6101 ADDEND is the same as X.
6102
cc2902df 6103 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
71925bc0
RS
6104 and finite. The problematic cases are when X is zero, and its mode
6105 has signed zeros. In the case of rounding towards -infinity,
6106 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6107 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6108
2dc0f633 6109bool
ac545c64 6110fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
71925bc0
RS
6111{
6112 if (!real_zerop (addend))
6113 return false;
6114
3bc400cd
RS
6115 /* Don't allow the fold with -fsignaling-nans. */
6116 if (HONOR_SNANS (TYPE_MODE (type)))
6117 return false;
6118
71925bc0
RS
6119 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6120 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6121 return true;
6122
6123 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6124 if (TREE_CODE (addend) == REAL_CST
6125 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6126 negate = !negate;
6127
6128 /* The mode has signed zeros, and we have to honor their sign.
6129 In this situation, there is only one case we can return true for.
6130 X - 0 is the same as X unless rounding towards -infinity is
6131 supported. */
6132 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6133}
6134
c876997f
RS
6135/* Subroutine of fold() that checks comparisons of built-in math
6136 functions against real constants.
6137
6138 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6139 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6140 is the type of the result and ARG0 and ARG1 are the operands of the
6141 comparison. ARG1 must be a TREE_REAL_CST.
6142
6143 The function returns the constant folded tree if a simplification
6144 can be made, and NULL_TREE otherwise. */
6145
6146static tree
db3927fb
AH
6147fold_mathfn_compare (location_t loc,
6148 enum built_in_function fcode, enum tree_code code,
75040a04 6149 tree type, tree arg0, tree arg1)
c876997f
RS
6150{
6151 REAL_VALUE_TYPE c;
6152
82b4201f 6153 if (BUILTIN_SQRT_P (fcode))
c876997f 6154 {
5039610b 6155 tree arg = CALL_EXPR_ARG (arg0, 0);
c876997f
RS
6156 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6157
6158 c = TREE_REAL_CST (arg1);
6159 if (REAL_VALUE_NEGATIVE (c))
6160 {
6161 /* sqrt(x) < y is always false, if y is negative. */
6162 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
db3927fb 6163 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
c876997f
RS
6164
6165 /* sqrt(x) > y is always true, if y is negative and we
6166 don't care about NaNs, i.e. negative values of x. */
6167 if (code == NE_EXPR || !HONOR_NANS (mode))
db3927fb 6168 return omit_one_operand_loc (loc, type, integer_one_node, arg);
c876997f
RS
6169
6170 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
db3927fb 6171 return fold_build2_loc (loc, GE_EXPR, type, arg,
7f20a5b7 6172 build_real (TREE_TYPE (arg), dconst0));
c876997f
RS
6173 }
6174 else if (code == GT_EXPR || code == GE_EXPR)
6175 {
6176 REAL_VALUE_TYPE c2;
6177
6178 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6179 real_convert (&c2, mode, &c2);
6180
6181 if (REAL_VALUE_ISINF (c2))
6182 {
6183 /* sqrt(x) > y is x == +Inf, when y is very large. */
6184 if (HONOR_INFINITIES (mode))
db3927fb 6185 return fold_build2_loc (loc, EQ_EXPR, type, arg,
7f20a5b7 6186 build_real (TREE_TYPE (arg), c2));
c876997f
RS
6187
6188 /* sqrt(x) > y is always false, when y is very large
6189 and we don't care about infinities. */
db3927fb 6190 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
c876997f
RS
6191 }
6192
6193 /* sqrt(x) > c is the same as x > c*c. */
db3927fb 6194 return fold_build2_loc (loc, code, type, arg,
7f20a5b7 6195 build_real (TREE_TYPE (arg), c2));
c876997f
RS
6196 }
6197 else if (code == LT_EXPR || code == LE_EXPR)
6198 {
6199 REAL_VALUE_TYPE c2;
6200
6201 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6202 real_convert (&c2, mode, &c2);
6203
6204 if (REAL_VALUE_ISINF (c2))
6205 {
6206 /* sqrt(x) < y is always true, when y is a very large
6207 value and we don't care about NaNs or Infinities. */
6208 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
db3927fb 6209 return omit_one_operand_loc (loc, type, integer_one_node, arg);
c876997f
RS
6210
6211 /* sqrt(x) < y is x != +Inf when y is very large and we
6212 don't care about NaNs. */
6213 if (! HONOR_NANS (mode))
db3927fb 6214 return fold_build2_loc (loc, NE_EXPR, type, arg,
7f20a5b7 6215 build_real (TREE_TYPE (arg), c2));
c876997f
RS
6216
6217 /* sqrt(x) < y is x >= 0 when y is very large and we
6218 don't care about Infinities. */
6219 if (! HONOR_INFINITIES (mode))
db3927fb 6220 return fold_build2_loc (loc, GE_EXPR, type, arg,
7f20a5b7 6221 build_real (TREE_TYPE (arg), dconst0));
c876997f
RS
6222
6223 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5785c7de 6224 if (lang_hooks.decls.global_bindings_p () != 0
7a6cdb44 6225 || CONTAINS_PLACEHOLDER_P (arg))
c876997f
RS
6226 return NULL_TREE;
6227
6228 arg = save_expr (arg);
db3927fb
AH
6229 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6230 fold_build2_loc (loc, GE_EXPR, type, arg,
7f20a5b7
KH
6231 build_real (TREE_TYPE (arg),
6232 dconst0)),
db3927fb 6233 fold_build2_loc (loc, NE_EXPR, type, arg,
7f20a5b7
KH
6234 build_real (TREE_TYPE (arg),
6235 c2)));
c876997f
RS
6236 }
6237
6238 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6239 if (! HONOR_NANS (mode))
db3927fb 6240 return fold_build2_loc (loc, code, type, arg,
7f20a5b7 6241 build_real (TREE_TYPE (arg), c2));
c876997f
RS
6242
6243 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5785c7de 6244 if (lang_hooks.decls.global_bindings_p () == 0
7a6cdb44 6245 && ! CONTAINS_PLACEHOLDER_P (arg))
c876997f
RS
6246 {
6247 arg = save_expr (arg);
db3927fb
AH
6248 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6249 fold_build2_loc (loc, GE_EXPR, type, arg,
7f20a5b7
KH
6250 build_real (TREE_TYPE (arg),
6251 dconst0)),
db3927fb 6252 fold_build2_loc (loc, code, type, arg,
7f20a5b7
KH
6253 build_real (TREE_TYPE (arg),
6254 c2)));
c876997f
RS
6255 }
6256 }
6257 }
6258
6259 return NULL_TREE;
6260}
6261
9ddae796
RS
6262/* Subroutine of fold() that optimizes comparisons against Infinities,
6263 either +Inf or -Inf.
6264
6265 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6266 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6267 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6268
6269 The function returns the constant folded tree if a simplification
6270 can be made, and NULL_TREE otherwise. */
6271
6272static tree
db3927fb
AH
6273fold_inf_compare (location_t loc, enum tree_code code, tree type,
6274 tree arg0, tree arg1)
9ddae796 6275{
18c2511c
RS
6276 enum machine_mode mode;
6277 REAL_VALUE_TYPE max;
6278 tree temp;
6279 bool neg;
6280
6281 mode = TYPE_MODE (TREE_TYPE (arg0));
6282
9ddae796 6283 /* For negative infinity swap the sense of the comparison. */
18c2511c
RS
6284 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6285 if (neg)
9ddae796
RS
6286 code = swap_tree_comparison (code);
6287
6288 switch (code)
6289 {
6290 case GT_EXPR:
6291 /* x > +Inf is always false, if with ignore sNANs. */
18c2511c 6292 if (HONOR_SNANS (mode))
9ddae796 6293 return NULL_TREE;
db3927fb 6294 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9ddae796
RS
6295
6296 case LE_EXPR:
6297 /* x <= +Inf is always true, if we don't case about NaNs. */
18c2511c 6298 if (! HONOR_NANS (mode))
db3927fb 6299 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9ddae796
RS
6300
6301 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5785c7de 6302 if (lang_hooks.decls.global_bindings_p () == 0
7a6cdb44 6303 && ! CONTAINS_PLACEHOLDER_P (arg0))
9ddae796
RS
6304 {
6305 arg0 = save_expr (arg0);
db3927fb 6306 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
9ddae796
RS
6307 }
6308 break;
6309
18c2511c
RS
6310 case EQ_EXPR:
6311 case GE_EXPR:
6312 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6313 real_maxval (&max, neg, mode);
db3927fb 6314 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
7f20a5b7 6315 arg0, build_real (TREE_TYPE (arg0), max));
18c2511c
RS
6316
6317 case LT_EXPR:
6318 /* x < +Inf is always equal to x <= DBL_MAX. */
6319 real_maxval (&max, neg, mode);
db3927fb 6320 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
7f20a5b7 6321 arg0, build_real (TREE_TYPE (arg0), max));
18c2511c
RS
6322
6323 case NE_EXPR:
6324 /* x != +Inf is always equal to !(x > DBL_MAX). */
6325 real_maxval (&max, neg, mode);
6326 if (! HONOR_NANS (mode))
db3927fb 6327 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
7f20a5b7 6328 arg0, build_real (TREE_TYPE (arg0), max));
3100d647 6329
db3927fb 6330 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
7f20a5b7 6331 arg0, build_real (TREE_TYPE (arg0), max));
db3927fb 6332 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
9ddae796
RS
6333
6334 default:
6335 break;
6336 }
6337
6338 return NULL_TREE;
6339}
71925bc0 6340
8dc2384c 6341/* Subroutine of fold() that optimizes comparisons of a division by
1ea7e6ad 6342 a nonzero integer constant against an integer constant, i.e.
8dc2384c
RS
6343 X/C1 op C2.
6344
6345 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6346 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6347 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6348
6349 The function returns the constant folded tree if a simplification
6350 can be made, and NULL_TREE otherwise. */
6351
6352static tree
db3927fb
AH
6353fold_div_compare (location_t loc,
6354 enum tree_code code, tree type, tree arg0, tree arg1)
8dc2384c
RS
6355{
6356 tree prod, tmp, hi, lo;
6357 tree arg00 = TREE_OPERAND (arg0, 0);
6358 tree arg01 = TREE_OPERAND (arg0, 1);
9589f23e 6359 double_int val;
6b7283ac 6360 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
d56ee62b 6361 bool neg_overflow;
8dc2384c
RS
6362 int overflow;
6363
6364 /* We have to do this the hard way to detect unsigned overflow.
6365 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6b7283ac
EB
6366 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6367 TREE_INT_CST_HIGH (arg01),
6368 TREE_INT_CST_LOW (arg1),
6369 TREE_INT_CST_HIGH (arg1),
9589f23e
AS
6370 &val.low, &val.high, unsigned_p);
6371 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
d56ee62b 6372 neg_overflow = false;
8dc2384c 6373
6b7283ac 6374 if (unsigned_p)
8dc2384c 6375 {
000d8d44
RS
6376 tmp = int_const_binop (MINUS_EXPR, arg01,
6377 build_int_cst (TREE_TYPE (arg01), 1), 0);
8dc2384c
RS
6378 lo = prod;
6379
6380 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6b7283ac
EB
6381 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6382 TREE_INT_CST_HIGH (prod),
6383 TREE_INT_CST_LOW (tmp),
6384 TREE_INT_CST_HIGH (tmp),
9589f23e
AS
6385 &val.low, &val.high, unsigned_p);
6386 hi = force_fit_type_double (TREE_TYPE (arg00), val,
d95787e6 6387 -1, overflow | TREE_OVERFLOW (prod));
8dc2384c
RS
6388 }
6389 else if (tree_int_cst_sgn (arg01) >= 0)
6390 {
000d8d44
RS
6391 tmp = int_const_binop (MINUS_EXPR, arg01,
6392 build_int_cst (TREE_TYPE (arg01), 1), 0);
8dc2384c
RS
6393 switch (tree_int_cst_sgn (arg1))
6394 {
6395 case -1:
d56ee62b 6396 neg_overflow = true;
8dc2384c
RS
6397 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6398 hi = prod;
6399 break;
6400
6401 case 0:
6402 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6403 hi = tmp;
6404 break;
6405
6406 case 1:
6407 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6408 lo = prod;
6409 break;
6410
6411 default:
0bccc606 6412 gcc_unreachable ();
8dc2384c
RS
6413 }
6414 }
6415 else
6416 {
d2e74f6f
RS
6417 /* A negative divisor reverses the relational operators. */
6418 code = swap_tree_comparison (code);
6419
000d8d44
RS
6420 tmp = int_const_binop (PLUS_EXPR, arg01,
6421 build_int_cst (TREE_TYPE (arg01), 1), 0);
8dc2384c
RS
6422 switch (tree_int_cst_sgn (arg1))
6423 {
6424 case -1:
6425 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6426 lo = prod;
6427 break;
6428
6429 case 0:
6430 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6431 lo = tmp;
6432 break;
6433
6434 case 1:
d56ee62b
RS
6435 neg_overflow = true;
6436 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
8dc2384c
RS
6437 hi = prod;
6438 break;
6439
6440 default:
0bccc606 6441 gcc_unreachable ();
8dc2384c
RS
6442 }
6443 }
6444
6445 switch (code)
6446 {
6447 case EQ_EXPR:
6448 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
db3927fb 6449 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
8dc2384c 6450 if (TREE_OVERFLOW (hi))
db3927fb 6451 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
8dc2384c 6452 if (TREE_OVERFLOW (lo))
db3927fb
AH
6453 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6454 return build_range_check (loc, type, arg00, 1, lo, hi);
8dc2384c
RS
6455
6456 case NE_EXPR:
6457 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
db3927fb 6458 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
8dc2384c 6459 if (TREE_OVERFLOW (hi))
db3927fb 6460 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
8dc2384c 6461 if (TREE_OVERFLOW (lo))
db3927fb
AH
6462 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6463 return build_range_check (loc, type, arg00, 0, lo, hi);
8dc2384c
RS
6464
6465 case LT_EXPR:
6466 if (TREE_OVERFLOW (lo))
d56ee62b
RS
6467 {
6468 tmp = neg_overflow ? integer_zero_node : integer_one_node;
db3927fb 6469 return omit_one_operand_loc (loc, type, tmp, arg00);
d56ee62b 6470 }
db3927fb 6471 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
8dc2384c
RS
6472
6473 case LE_EXPR:
6474 if (TREE_OVERFLOW (hi))
d56ee62b
RS
6475 {
6476 tmp = neg_overflow ? integer_zero_node : integer_one_node;
db3927fb 6477 return omit_one_operand_loc (loc, type, tmp, arg00);
d56ee62b 6478 }
db3927fb 6479 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
8dc2384c
RS
6480
6481 case GT_EXPR:
6482 if (TREE_OVERFLOW (hi))
d56ee62b
RS
6483 {
6484 tmp = neg_overflow ? integer_one_node : integer_zero_node;
db3927fb 6485 return omit_one_operand_loc (loc, type, tmp, arg00);
d56ee62b 6486 }
db3927fb 6487 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
8dc2384c
RS
6488
6489 case GE_EXPR:
6490 if (TREE_OVERFLOW (lo))
d56ee62b
RS
6491 {
6492 tmp = neg_overflow ? integer_one_node : integer_zero_node;
db3927fb 6493 return omit_one_operand_loc (loc, type, tmp, arg00);
d56ee62b 6494 }
db3927fb 6495 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
8dc2384c
RS
6496
6497 default:
6498 break;
6499 }
6500
6501 return NULL_TREE;
6502}
6503
6504
7960bf22 6505/* If CODE with arguments ARG0 and ARG1 represents a single bit
a94400fd
KH
6506 equality/inequality test, then return a simplified form of the test
6507 using a sign testing. Otherwise return NULL. TYPE is the desired
6508 result type. */
d1822754 6509
a94400fd 6510static tree
db3927fb
AH
6511fold_single_bit_test_into_sign_test (location_t loc,
6512 enum tree_code code, tree arg0, tree arg1,
a94400fd 6513 tree result_type)
7960bf22 6514{
7960bf22
JL
6515 /* If this is testing a single bit, we can optimize the test. */
6516 if ((code == NE_EXPR || code == EQ_EXPR)
6517 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6518 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6519 {
7960bf22
JL
6520 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6521 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
a94400fd
KH
6522 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6523
1f7a8dcc
RS
6524 if (arg00 != NULL_TREE
6525 /* This is only a win if casting to a signed type is cheap,
6526 i.e. when arg00's type is not a partial mode. */
6527 && TYPE_PRECISION (TREE_TYPE (arg00))
6528 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
7960bf22 6529 {
12753674 6530 tree stype = signed_type_for (TREE_TYPE (arg00));
db3927fb
AH
6531 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6532 result_type,
6533 fold_convert_loc (loc, stype, arg00),
57decb7e 6534 build_int_cst (stype, 0));
7960bf22 6535 }
a94400fd
KH
6536 }
6537
6538 return NULL_TREE;
6539}
6540
6541/* If CODE with arguments ARG0 and ARG1 represents a single bit
6542 equality/inequality test, then return a simplified form of
6543 the test using shifts and logical operations. Otherwise return
6544 NULL. TYPE is the desired result type. */
6545
6546tree
db3927fb
AH
6547fold_single_bit_test (location_t loc, enum tree_code code,
6548 tree arg0, tree arg1, tree result_type)
a94400fd
KH
6549{
6550 /* If this is testing a single bit, we can optimize the test. */
6551 if ((code == NE_EXPR || code == EQ_EXPR)
6552 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6553 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6554 {
6555 tree inner = TREE_OPERAND (arg0, 0);
6556 tree type = TREE_TYPE (arg0);
6557 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6558 enum machine_mode operand_mode = TYPE_MODE (type);
6559 int ops_unsigned;
6560 tree signed_type, unsigned_type, intermediate_type;
000d8d44 6561 tree tem, one;
a94400fd
KH
6562
6563 /* First, see if we can fold the single bit test into a sign-bit
6564 test. */
db3927fb 6565 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
a94400fd
KH
6566 result_type);
6567 if (tem)
6568 return tem;
c87d821b 6569
d1822754 6570 /* Otherwise we have (A & C) != 0 where C is a single bit,
7960bf22
JL
6571 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6572 Similarly for (A & C) == 0. */
6573
6574 /* If INNER is a right shift of a constant and it plus BITNUM does
6575 not overflow, adjust BITNUM and INNER. */
6576 if (TREE_CODE (inner) == RSHIFT_EXPR
6577 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6578 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6579 && bitnum < TYPE_PRECISION (type)
6580 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6581 bitnum - TYPE_PRECISION (type)))
6582 {
6583 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6584 inner = TREE_OPERAND (inner, 0);
6585 }
6586
6587 /* If we are going to be able to omit the AND below, we must do our
6588 operations as unsigned. If we must use the AND, we have a choice.
6589 Normally unsigned is faster, but for some machines signed is. */
7960bf22 6590#ifdef LOAD_EXTEND_OP
b8698a0f 6591 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
2a1a3cd5 6592 && !flag_syntax_only) ? 0 : 1;
7960bf22 6593#else
c87d821b 6594 ops_unsigned = 1;
7960bf22 6595#endif
7960bf22 6596
5785c7de
RS
6597 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6598 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
e7824b3e 6599 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
db3927fb 6600 inner = fold_convert_loc (loc, intermediate_type, inner);
7960bf22
JL
6601
6602 if (bitnum != 0)
59ce6d6b
RS
6603 inner = build2 (RSHIFT_EXPR, intermediate_type,
6604 inner, size_int (bitnum));
7960bf22 6605
000d8d44
RS
6606 one = build_int_cst (intermediate_type, 1);
6607
7960bf22 6608 if (code == EQ_EXPR)
db3927fb 6609 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7960bf22
JL
6610
6611 /* Put the AND last so it can combine with more things. */
000d8d44 6612 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7960bf22
JL
6613
6614 /* Make sure to return the proper type. */
db3927fb 6615 inner = fold_convert_loc (loc, result_type, inner);
7960bf22
JL
6616
6617 return inner;
6618 }
6619 return NULL_TREE;
6620}
5dfa45d0 6621
05d362b8
RS
6622/* Check whether we are allowed to reorder operands arg0 and arg1,
6623 such that the evaluation of arg1 occurs before arg0. */
6624
6625static bool
ac545c64 6626reorder_operands_p (const_tree arg0, const_tree arg1)
05d362b8
RS
6627{
6628 if (! flag_evaluation_order)
3e6688a7 6629 return true;
05d362b8
RS
6630 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6631 return true;
6632 return ! TREE_SIDE_EFFECTS (arg0)
6633 && ! TREE_SIDE_EFFECTS (arg1);
6634}
6635
37af03cb
RS
6636/* Test whether it is preferable two swap two operands, ARG0 and
6637 ARG1, for example because ARG0 is an integer constant and ARG1
05d362b8
RS
6638 isn't. If REORDER is true, only recommend swapping if we can
6639 evaluate the operands in reverse order. */
37af03cb 6640
fd660b1b 6641bool
fa233e34 6642tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
37af03cb
RS
6643{
6644 STRIP_SIGN_NOPS (arg0);
6645 STRIP_SIGN_NOPS (arg1);
6646
6647 if (TREE_CODE (arg1) == INTEGER_CST)
6648 return 0;
6649 if (TREE_CODE (arg0) == INTEGER_CST)
6650 return 1;
6651
6652 if (TREE_CODE (arg1) == REAL_CST)
6653 return 0;
6654 if (TREE_CODE (arg0) == REAL_CST)
6655 return 1;
6656
325217ed
CF
6657 if (TREE_CODE (arg1) == FIXED_CST)
6658 return 0;
6659 if (TREE_CODE (arg0) == FIXED_CST)
6660 return 1;
6661
37af03cb
RS
6662 if (TREE_CODE (arg1) == COMPLEX_CST)
6663 return 0;
6664 if (TREE_CODE (arg0) == COMPLEX_CST)
6665 return 1;
6666
6667 if (TREE_CONSTANT (arg1))
6668 return 0;
6669 if (TREE_CONSTANT (arg0))
6670 return 1;
d1822754 6671
7f4b6d20 6672 if (optimize_function_for_size_p (cfun))
a352244f 6673 return 0;
37af03cb 6674
05d362b8
RS
6675 if (reorder && flag_evaluation_order
6676 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6677 return 0;
6678
fd660b1b
JL
6679 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6680 for commutative and comparison operators. Ensuring a canonical
6681 form allows the optimizers to find additional redundancies without
6682 having to explicitly check for both orderings. */
6683 if (TREE_CODE (arg0) == SSA_NAME
6684 && TREE_CODE (arg1) == SSA_NAME
6685 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6686 return 1;
6687
421076b5
RG
6688 /* Put SSA_NAMEs last. */
6689 if (TREE_CODE (arg1) == SSA_NAME)
6690 return 0;
6691 if (TREE_CODE (arg0) == SSA_NAME)
6692 return 1;
6693
6694 /* Put variables last. */
6695 if (DECL_P (arg1))
6696 return 0;
6697 if (DECL_P (arg0))
6698 return 1;
6699
37af03cb
RS
6700 return 0;
6701}
6702
18522563
ZD
6703/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6704 ARG0 is extended to a wider type. */
6705
6706static tree
db3927fb
AH
6707fold_widened_comparison (location_t loc, enum tree_code code,
6708 tree type, tree arg0, tree arg1)
18522563
ZD
6709{
6710 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6711 tree arg1_unw;
6712 tree shorter_type, outer_type;
6713 tree min, max;
6714 bool above, below;
6715
6716 if (arg0_unw == arg0)
6717 return NULL_TREE;
6718 shorter_type = TREE_TYPE (arg0_unw);
2a0958c5 6719
6c6d9d33
JDA
6720#ifdef HAVE_canonicalize_funcptr_for_compare
6721 /* Disable this optimization if we're casting a function pointer
6722 type on targets that require function pointer canonicalization. */
6723 if (HAVE_canonicalize_funcptr_for_compare
6724 && TREE_CODE (shorter_type) == POINTER_TYPE
6725 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6726 return NULL_TREE;
6727#endif
6728
2a0958c5
JJ
6729 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6730 return NULL_TREE;
6731
8f768a5a 6732 arg1_unw = get_unwidened (arg1, NULL_TREE);
18522563
ZD
6733
6734 /* If possible, express the comparison in the shorter mode. */
6735 if ((code == EQ_EXPR || code == NE_EXPR
6736 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6737 && (TREE_TYPE (arg1_unw) == shorter_type
02765a37 6738 || ((TYPE_PRECISION (shorter_type)
2e1d2474 6739 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
02765a37
RG
6740 && (TYPE_UNSIGNED (shorter_type)
6741 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
18522563 6742 || (TREE_CODE (arg1_unw) == INTEGER_CST
a7e1c928
AP
6743 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6744 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
18522563 6745 && int_fits_type_p (arg1_unw, shorter_type))))
db3927fb
AH
6746 return fold_build2_loc (loc, code, type, arg0_unw,
6747 fold_convert_loc (loc, shorter_type, arg1_unw));
18522563 6748
1630e763
AS
6749 if (TREE_CODE (arg1_unw) != INTEGER_CST
6750 || TREE_CODE (shorter_type) != INTEGER_TYPE
6751 || !int_fits_type_p (arg1_unw, shorter_type))
18522563
ZD
6752 return NULL_TREE;
6753
6754 /* If we are comparing with the integer that does not fit into the range
6755 of the shorter type, the result is known. */
6756 outer_type = TREE_TYPE (arg1_unw);
6757 min = lower_bound_in_type (outer_type, shorter_type);
6758 max = upper_bound_in_type (outer_type, shorter_type);
6759
6760 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6761 max, arg1_unw));
6762 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6763 arg1_unw, min));
6764
6765 switch (code)
6766 {
6767 case EQ_EXPR:
6768 if (above || below)
db3927fb 6769 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
18522563
ZD
6770 break;
6771
6772 case NE_EXPR:
6773 if (above || below)
db3927fb 6774 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
18522563
ZD
6775 break;
6776
6777 case LT_EXPR:
6778 case LE_EXPR:
6779 if (above)
db3927fb 6780 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
18522563 6781 else if (below)
db3927fb 6782 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
18522563
ZD
6783
6784 case GT_EXPR:
6785 case GE_EXPR:
6786 if (above)
db3927fb 6787 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
18522563 6788 else if (below)
db3927fb 6789 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
18522563
ZD
6790
6791 default:
6792 break;
6793 }
6794
6795 return NULL_TREE;
6796}
6797
6798/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6799 ARG0 just the signedness is changed. */
6800
6801static tree
db3927fb 6802fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
18522563
ZD
6803 tree arg0, tree arg1)
6804{
b8fca551 6805 tree arg0_inner;
18522563
ZD
6806 tree inner_type, outer_type;
6807
1043771b 6808 if (!CONVERT_EXPR_P (arg0))
18522563
ZD
6809 return NULL_TREE;
6810
6811 outer_type = TREE_TYPE (arg0);
6812 arg0_inner = TREE_OPERAND (arg0, 0);
6813 inner_type = TREE_TYPE (arg0_inner);
6814
6c6d9d33
JDA
6815#ifdef HAVE_canonicalize_funcptr_for_compare
6816 /* Disable this optimization if we're casting a function pointer
6817 type on targets that require function pointer canonicalization. */
6818 if (HAVE_canonicalize_funcptr_for_compare
6819 && TREE_CODE (inner_type) == POINTER_TYPE
6820 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6821 return NULL_TREE;
6822#endif
6823
18522563
ZD
6824 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6825 return NULL_TREE;
6826
6827 if (TREE_CODE (arg1) != INTEGER_CST
1043771b 6828 && !(CONVERT_EXPR_P (arg1)
18522563
ZD
6829 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6830 return NULL_TREE;
6831
8ebc39d8
RG
6832 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6833 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
18522563
ZD
6834 && code != NE_EXPR
6835 && code != EQ_EXPR)
6836 return NULL_TREE;
6837
6838 if (TREE_CODE (arg1) == INTEGER_CST)
9589f23e
AS
6839 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6840 0, TREE_OVERFLOW (arg1));
18522563 6841 else
db3927fb 6842 arg1 = fold_convert_loc (loc, inner_type, arg1);
18522563 6843
db3927fb 6844 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
18522563
ZD
6845}
6846
5be014d5 6847/* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
db3927fb
AH
6848 step of the array. Reconstructs s and delta in the case of s *
6849 delta being an integer constant (and thus already folded). ADDR is
6850 the address. MULT is the multiplicative expression. If the
6851 function succeeds, the new address expression is returned.
6852 Otherwise NULL_TREE is returned. LOC is the location of the
6853 resulting expression. */
38b0dcb8
ZD
6854
6855static tree
db3927fb 6856try_move_mult_to_index (location_t loc, tree addr, tree op1)
38b0dcb8
ZD
6857{
6858 tree s, delta, step;
38b0dcb8
ZD
6859 tree ref = TREE_OPERAND (addr, 0), pref;
6860 tree ret, pos;
6861 tree itype;
713e3ec9 6862 bool mdim = false;
38b0dcb8 6863
5be014d5
AP
6864 /* Strip the nops that might be added when converting op1 to sizetype. */
6865 STRIP_NOPS (op1);
6866
c5542940
RG
6867 /* Canonicalize op1 into a possibly non-constant delta
6868 and an INTEGER_CST s. */
6869 if (TREE_CODE (op1) == MULT_EXPR)
38b0dcb8 6870 {
c5542940
RG
6871 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6872
6873 STRIP_NOPS (arg0);
6874 STRIP_NOPS (arg1);
b8698a0f 6875
c5542940
RG
6876 if (TREE_CODE (arg0) == INTEGER_CST)
6877 {
6878 s = arg0;
6879 delta = arg1;
6880 }
6881 else if (TREE_CODE (arg1) == INTEGER_CST)
6882 {
6883 s = arg1;
6884 delta = arg0;
6885 }
6886 else
6887 return NULL_TREE;
38b0dcb8 6888 }
c5542940 6889 else if (TREE_CODE (op1) == INTEGER_CST)
38b0dcb8 6890 {
c5542940
RG
6891 delta = op1;
6892 s = NULL_TREE;
38b0dcb8
ZD
6893 }
6894 else
c5542940
RG
6895 {
6896 /* Simulate we are delta * 1. */
6897 delta = op1;
6898 s = integer_one_node;
6899 }
38b0dcb8
ZD
6900
6901 for (;; ref = TREE_OPERAND (ref, 0))
6902 {
6903 if (TREE_CODE (ref) == ARRAY_REF)
6904 {
8e281a8d
RG
6905 tree domain;
6906
713e3ec9
RG
6907 /* Remember if this was a multi-dimensional array. */
6908 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6909 mdim = true;
6910
8e281a8d
RG
6911 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6912 if (! domain)
03b0db0a 6913 continue;
8e281a8d 6914 itype = TREE_TYPE (domain);
03b0db0a 6915
38b0dcb8 6916 step = array_ref_element_size (ref);
38b0dcb8
ZD
6917 if (TREE_CODE (step) != INTEGER_CST)
6918 continue;
6919
c5542940
RG
6920 if (s)
6921 {
6922 if (! tree_int_cst_equal (step, s))
6923 continue;
6924 }
6925 else
6926 {
6927 /* Try if delta is a multiple of step. */
194ac52a 6928 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
03b0db0a 6929 if (! tmp)
c5542940 6930 continue;
03b0db0a 6931 delta = tmp;
c5542940 6932 }
38b0dcb8 6933
713e3ec9
RG
6934 /* Only fold here if we can verify we do not overflow one
6935 dimension of a multi-dimensional array. */
6936 if (mdim)
6937 {
6938 tree tmp;
6939
6940 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
8e281a8d
RG
6941 || !TYPE_MAX_VALUE (domain)
6942 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
713e3ec9
RG
6943 continue;
6944
db3927fb 6945 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
8e281a8d
RG
6946 fold_convert_loc (loc, itype,
6947 TREE_OPERAND (ref, 1)),
6948 fold_convert_loc (loc, itype, delta));
713e3ec9
RG
6949 if (!tmp
6950 || TREE_CODE (tmp) != INTEGER_CST
8e281a8d 6951 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
713e3ec9
RG
6952 continue;
6953 }
6954
38b0dcb8
ZD
6955 break;
6956 }
713e3ec9
RG
6957 else
6958 mdim = false;
38b0dcb8
ZD
6959
6960 if (!handled_component_p (ref))
6961 return NULL_TREE;
6962 }
6963
6964 /* We found the suitable array reference. So copy everything up to it,
6965 and replace the index. */
6966
6967 pref = TREE_OPERAND (addr, 0);
6968 ret = copy_node (pref);
db3927fb 6969 SET_EXPR_LOCATION (ret, loc);
38b0dcb8
ZD
6970 pos = ret;
6971
6972 while (pref != ref)
6973 {
6974 pref = TREE_OPERAND (pref, 0);
6975 TREE_OPERAND (pos, 0) = copy_node (pref);
6976 pos = TREE_OPERAND (pos, 0);
6977 }
6978
db3927fb
AH
6979 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
6980 fold_convert_loc (loc, itype,
6981 TREE_OPERAND (pos, 1)),
6982 fold_convert_loc (loc, itype, delta));
38b0dcb8 6983
db3927fb 6984 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
38b0dcb8
ZD
6985}
6986
1d481ba8
ZD
6987
6988/* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6989 means A >= Y && A != MAX, but in this case we know that
6990 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6991
6992static tree
db3927fb 6993fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
1d481ba8
ZD
6994{
6995 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6996
6997 if (TREE_CODE (bound) == LT_EXPR)
6998 a = TREE_OPERAND (bound, 0);
6999 else if (TREE_CODE (bound) == GT_EXPR)
7000 a = TREE_OPERAND (bound, 1);
7001 else
7002 return NULL_TREE;
7003
7004 typea = TREE_TYPE (a);
7005 if (!INTEGRAL_TYPE_P (typea)
7006 && !POINTER_TYPE_P (typea))
7007 return NULL_TREE;
7008
7009 if (TREE_CODE (ineq) == LT_EXPR)
7010 {
7011 a1 = TREE_OPERAND (ineq, 1);
7012 y = TREE_OPERAND (ineq, 0);
7013 }
7014 else if (TREE_CODE (ineq) == GT_EXPR)
7015 {
7016 a1 = TREE_OPERAND (ineq, 0);
7017 y = TREE_OPERAND (ineq, 1);
7018 }
7019 else
7020 return NULL_TREE;
7021
7022 if (TREE_TYPE (a1) != typea)
7023 return NULL_TREE;
7024
5be014d5
AP
7025 if (POINTER_TYPE_P (typea))
7026 {
7027 /* Convert the pointer types into integer before taking the difference. */
db3927fb
AH
7028 tree ta = fold_convert_loc (loc, ssizetype, a);
7029 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7030 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
5be014d5
AP
7031 }
7032 else
db3927fb 7033 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
5be014d5
AP
7034
7035 if (!diff || !integer_onep (diff))
7036 return NULL_TREE;
1d481ba8 7037
db3927fb 7038 return fold_build2_loc (loc, GE_EXPR, type, a, y);
1d481ba8
ZD
7039}
7040
0ed9a3e3
RG
7041/* Fold a sum or difference of at least one multiplication.
7042 Returns the folded tree or NULL if no simplification could be made. */
7043
7044static tree
db3927fb
AH
7045fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7046 tree arg0, tree arg1)
0ed9a3e3
RG
7047{
7048 tree arg00, arg01, arg10, arg11;
7049 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7050
7051 /* (A * C) +- (B * C) -> (A+-B) * C.
7052 (A * C) +- A -> A * (C+-1).
7053 We are most concerned about the case where C is a constant,
7054 but other combinations show up during loop reduction. Since
7055 it is not difficult, try all four possibilities. */
7056
7057 if (TREE_CODE (arg0) == MULT_EXPR)
7058 {
7059 arg00 = TREE_OPERAND (arg0, 0);
7060 arg01 = TREE_OPERAND (arg0, 1);
7061 }
b462d62d
RG
7062 else if (TREE_CODE (arg0) == INTEGER_CST)
7063 {
7064 arg00 = build_one_cst (type);
7065 arg01 = arg0;
7066 }
0ed9a3e3
RG
7067 else
7068 {
325217ed
CF
7069 /* We cannot generate constant 1 for fract. */
7070 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7071 return NULL_TREE;
0ed9a3e3 7072 arg00 = arg0;
bfabddb6 7073 arg01 = build_one_cst (type);
0ed9a3e3
RG
7074 }
7075 if (TREE_CODE (arg1) == MULT_EXPR)
7076 {
7077 arg10 = TREE_OPERAND (arg1, 0);
7078 arg11 = TREE_OPERAND (arg1, 1);
7079 }
b462d62d
RG
7080 else if (TREE_CODE (arg1) == INTEGER_CST)
7081 {
7082 arg10 = build_one_cst (type);
cef158f9
RG
7083 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7084 the purpose of this canonicalization. */
7085 if (TREE_INT_CST_HIGH (arg1) == -1
7086 && negate_expr_p (arg1)
7087 && code == PLUS_EXPR)
7088 {
7089 arg11 = negate_expr (arg1);
7090 code = MINUS_EXPR;
7091 }
7092 else
7093 arg11 = arg1;
b462d62d 7094 }
0ed9a3e3
RG
7095 else
7096 {
325217ed
CF
7097 /* We cannot generate constant 1 for fract. */
7098 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7099 return NULL_TREE;
0ed9a3e3 7100 arg10 = arg1;
bfabddb6 7101 arg11 = build_one_cst (type);
0ed9a3e3
RG
7102 }
7103 same = NULL_TREE;
7104
7105 if (operand_equal_p (arg01, arg11, 0))
7106 same = arg01, alt0 = arg00, alt1 = arg10;
7107 else if (operand_equal_p (arg00, arg10, 0))
7108 same = arg00, alt0 = arg01, alt1 = arg11;
7109 else if (operand_equal_p (arg00, arg11, 0))
7110 same = arg00, alt0 = arg01, alt1 = arg10;
7111 else if (operand_equal_p (arg01, arg10, 0))
7112 same = arg01, alt0 = arg00, alt1 = arg11;
7113
7114 /* No identical multiplicands; see if we can find a common
7115 power-of-two factor in non-power-of-two multiplies. This
7116 can help in multi-dimensional array access. */
7117 else if (host_integerp (arg01, 0)
7118 && host_integerp (arg11, 0))
7119 {
7120 HOST_WIDE_INT int01, int11, tmp;
7121 bool swap = false;
7122 tree maybe_same;
7123 int01 = TREE_INT_CST_LOW (arg01);
7124 int11 = TREE_INT_CST_LOW (arg11);
7125
7126 /* Move min of absolute values to int11. */
7127 if ((int01 >= 0 ? int01 : -int01)
7128 < (int11 >= 0 ? int11 : -int11))
7129 {
7130 tmp = int01, int01 = int11, int11 = tmp;
7131 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7132 maybe_same = arg01;
7133 swap = true;
7134 }
7135 else
7136 maybe_same = arg11;
7137
299b87f8
RG
7138 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7139 /* The remainder should not be a constant, otherwise we
7140 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7141 increased the number of multiplications necessary. */
7142 && TREE_CODE (arg10) != INTEGER_CST)
0ed9a3e3 7143 {
db3927fb 7144 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
0ed9a3e3
RG
7145 build_int_cst (TREE_TYPE (arg00),
7146 int01 / int11));
7147 alt1 = arg10;
7148 same = maybe_same;
7149 if (swap)
7150 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7151 }
7152 }
7153
7154 if (same)
db3927fb
AH
7155 return fold_build2_loc (loc, MULT_EXPR, type,
7156 fold_build2_loc (loc, code, type,
7157 fold_convert_loc (loc, type, alt0),
7158 fold_convert_loc (loc, type, alt1)),
7159 fold_convert_loc (loc, type, same));
0ed9a3e3
RG
7160
7161 return NULL_TREE;
7162}
7163
78bf6e2f
RS
7164/* Subroutine of native_encode_expr. Encode the INTEGER_CST
7165 specified by EXPR into the buffer PTR of length LEN bytes.
7166 Return the number of bytes placed in the buffer, or zero
7167 upon failure. */
7168
7169static int
fa233e34 7170native_encode_int (const_tree expr, unsigned char *ptr, int len)
78bf6e2f
RS
7171{
7172 tree type = TREE_TYPE (expr);
7173 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7174 int byte, offset, word, words;
7175 unsigned char value;
7176
7177 if (total_bytes > len)
7178 return 0;
7179 words = total_bytes / UNITS_PER_WORD;
7180
7181 for (byte = 0; byte < total_bytes; byte++)
7182 {
7183 int bitpos = byte * BITS_PER_UNIT;
7184 if (bitpos < HOST_BITS_PER_WIDE_INT)
7185 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7186 else
7187 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7188 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7189
7190 if (total_bytes > UNITS_PER_WORD)
7191 {
7192 word = byte / UNITS_PER_WORD;
7193 if (WORDS_BIG_ENDIAN)
7194 word = (words - 1) - word;
7195 offset = word * UNITS_PER_WORD;
7196 if (BYTES_BIG_ENDIAN)
7197 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7198 else
7199 offset += byte % UNITS_PER_WORD;
7200 }
7201 else
7202 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7203 ptr[offset] = value;
7204 }
7205 return total_bytes;
7206}
7207
7208
7209/* Subroutine of native_encode_expr. Encode the REAL_CST
7210 specified by EXPR into the buffer PTR of length LEN bytes.
7211 Return the number of bytes placed in the buffer, or zero
7212 upon failure. */
7213
7214static int
fa233e34 7215native_encode_real (const_tree expr, unsigned char *ptr, int len)
78bf6e2f
RS
7216{
7217 tree type = TREE_TYPE (expr);
7218 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
0a9430a8 7219 int byte, offset, word, words, bitpos;
78bf6e2f
RS
7220 unsigned char value;
7221
7222 /* There are always 32 bits in each long, no matter the size of
7223 the hosts long. We handle floating point representations with
7224 up to 192 bits. */
7225 long tmp[6];
7226
7227 if (total_bytes > len)
7228 return 0;
54193313 7229 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
78bf6e2f
RS
7230
7231 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7232
0a9430a8
JJ
7233 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7234 bitpos += BITS_PER_UNIT)
78bf6e2f 7235 {
0a9430a8 7236 byte = (bitpos / BITS_PER_UNIT) & 3;
78bf6e2f
RS
7237 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7238
0a9430a8 7239 if (UNITS_PER_WORD < 4)
78bf6e2f
RS
7240 {
7241 word = byte / UNITS_PER_WORD;
0a9430a8 7242 if (WORDS_BIG_ENDIAN)
78bf6e2f
RS
7243 word = (words - 1) - word;
7244 offset = word * UNITS_PER_WORD;
7245 if (BYTES_BIG_ENDIAN)
7246 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7247 else
7248 offset += byte % UNITS_PER_WORD;
7249 }
7250 else
0a9430a8
JJ
7251 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7252 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
78bf6e2f
RS
7253 }
7254 return total_bytes;
7255}
7256
7257/* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7258 specified by EXPR into the buffer PTR of length LEN bytes.
7259 Return the number of bytes placed in the buffer, or zero
7260 upon failure. */
7261
7262static int
fa233e34 7263native_encode_complex (const_tree expr, unsigned char *ptr, int len)
78bf6e2f
RS
7264{
7265 int rsize, isize;
7266 tree part;
7267
7268 part = TREE_REALPART (expr);
7269 rsize = native_encode_expr (part, ptr, len);
7270 if (rsize == 0)
7271 return 0;
7272 part = TREE_IMAGPART (expr);
7273 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7274 if (isize != rsize)
7275 return 0;
7276 return rsize + isize;
7277}
7278
7279
7280/* Subroutine of native_encode_expr. Encode the VECTOR_CST
7281 specified by EXPR into the buffer PTR of length LEN bytes.
7282 Return the number of bytes placed in the buffer, or zero
7283 upon failure. */
7284
7285static int
fa233e34 7286native_encode_vector (const_tree expr, unsigned char *ptr, int len)
78bf6e2f 7287{
15b1c12a 7288 int i, size, offset, count;
1000b34d 7289 tree itype, elem, elements;
78bf6e2f 7290
78bf6e2f
RS
7291 offset = 0;
7292 elements = TREE_VECTOR_CST_ELTS (expr);
7293 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
1000b34d
RS
7294 itype = TREE_TYPE (TREE_TYPE (expr));
7295 size = GET_MODE_SIZE (TYPE_MODE (itype));
78bf6e2f
RS
7296 for (i = 0; i < count; i++)
7297 {
7298 if (elements)
7299 {
7300 elem = TREE_VALUE (elements);
7301 elements = TREE_CHAIN (elements);
7302 }
7303 else
7304 elem = NULL_TREE;
7305
7306 if (elem)
7307 {
1000b34d 7308 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
78bf6e2f
RS
7309 return 0;
7310 }
1000b34d 7311 else
78bf6e2f
RS
7312 {
7313 if (offset + size > len)
7314 return 0;
7315 memset (ptr+offset, 0, size);
7316 }
78bf6e2f
RS
7317 offset += size;
7318 }
7319 return offset;
7320}
7321
7322
27a4e072
JJ
7323/* Subroutine of native_encode_expr. Encode the STRING_CST
7324 specified by EXPR into the buffer PTR of length LEN bytes.
7325 Return the number of bytes placed in the buffer, or zero
7326 upon failure. */
7327
7328static int
7329native_encode_string (const_tree expr, unsigned char *ptr, int len)
7330{
7331 tree type = TREE_TYPE (expr);
7332 HOST_WIDE_INT total_bytes;
7333
7334 if (TREE_CODE (type) != ARRAY_TYPE
7335 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7336 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7337 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7338 return 0;
7339 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7340 if (total_bytes > len)
7341 return 0;
7342 if (TREE_STRING_LENGTH (expr) < total_bytes)
7343 {
7344 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7345 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7346 total_bytes - TREE_STRING_LENGTH (expr));
7347 }
7348 else
7349 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7350 return total_bytes;
7351}
7352
7353
78bf6e2f
RS
7354/* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7355 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7356 buffer PTR of length LEN bytes. Return the number of bytes
7357 placed in the buffer, or zero upon failure. */
7358
db136335 7359int
fa233e34 7360native_encode_expr (const_tree expr, unsigned char *ptr, int len)
78bf6e2f
RS
7361{
7362 switch (TREE_CODE (expr))
7363 {
7364 case INTEGER_CST:
7365 return native_encode_int (expr, ptr, len);
7366
7367 case REAL_CST:
7368 return native_encode_real (expr, ptr, len);
7369
7370 case COMPLEX_CST:
7371 return native_encode_complex (expr, ptr, len);
7372
7373 case VECTOR_CST:
7374 return native_encode_vector (expr, ptr, len);
7375
27a4e072
JJ
7376 case STRING_CST:
7377 return native_encode_string (expr, ptr, len);
7378
78bf6e2f
RS
7379 default:
7380 return 0;
7381 }
7382}
7383
7384
7385/* Subroutine of native_interpret_expr. Interpret the contents of
7386 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7387 If the buffer cannot be interpreted, return NULL_TREE. */
7388
7389static tree
fa233e34 7390native_interpret_int (tree type, const unsigned char *ptr, int len)
78bf6e2f
RS
7391{
7392 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7393 int byte, offset, word, words;
7394 unsigned char value;
1961ffb8 7395 double_int result;
78bf6e2f
RS
7396
7397 if (total_bytes > len)
7398 return NULL_TREE;
7399 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7400 return NULL_TREE;
1961ffb8
AS
7401
7402 result = double_int_zero;
78bf6e2f
RS
7403 words = total_bytes / UNITS_PER_WORD;
7404
7405 for (byte = 0; byte < total_bytes; byte++)
7406 {
7407 int bitpos = byte * BITS_PER_UNIT;
7408 if (total_bytes > UNITS_PER_WORD)
7409 {
7410 word = byte / UNITS_PER_WORD;
7411 if (WORDS_BIG_ENDIAN)
7412 word = (words - 1) - word;
7413 offset = word * UNITS_PER_WORD;
7414 if (BYTES_BIG_ENDIAN)
7415 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7416 else
7417 offset += byte % UNITS_PER_WORD;
7418 }
7419 else
7420 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7421 value = ptr[offset];
7422
7423 if (bitpos < HOST_BITS_PER_WIDE_INT)
1961ffb8 7424 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
78bf6e2f 7425 else
1961ffb8
AS
7426 result.high |= (unsigned HOST_WIDE_INT) value
7427 << (bitpos - HOST_BITS_PER_WIDE_INT);
78bf6e2f
RS
7428 }
7429
1961ffb8 7430 return double_int_to_tree (type, result);
78bf6e2f
RS
7431}
7432
7433
7434/* Subroutine of native_interpret_expr. Interpret the contents of
7435 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7436 If the buffer cannot be interpreted, return NULL_TREE. */
7437
7438static tree
fa233e34 7439native_interpret_real (tree type, const unsigned char *ptr, int len)
78bf6e2f 7440{
15b1c12a
RS
7441 enum machine_mode mode = TYPE_MODE (type);
7442 int total_bytes = GET_MODE_SIZE (mode);
0a9430a8 7443 int byte, offset, word, words, bitpos;
78bf6e2f
RS
7444 unsigned char value;
7445 /* There are always 32 bits in each long, no matter the size of
7446 the hosts long. We handle floating point representations with
7447 up to 192 bits. */
7448 REAL_VALUE_TYPE r;
7449 long tmp[6];
7450
7451 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7452 if (total_bytes > len || total_bytes > 24)
7453 return NULL_TREE;
54193313 7454 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
78bf6e2f
RS
7455
7456 memset (tmp, 0, sizeof (tmp));
0a9430a8
JJ
7457 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7458 bitpos += BITS_PER_UNIT)
78bf6e2f 7459 {
0a9430a8
JJ
7460 byte = (bitpos / BITS_PER_UNIT) & 3;
7461 if (UNITS_PER_WORD < 4)
78bf6e2f
RS
7462 {
7463 word = byte / UNITS_PER_WORD;
0a9430a8 7464 if (WORDS_BIG_ENDIAN)
78bf6e2f
RS
7465 word = (words - 1) - word;
7466 offset = word * UNITS_PER_WORD;
7467 if (BYTES_BIG_ENDIAN)
7468 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7469 else
7470 offset += byte % UNITS_PER_WORD;
7471 }
7472 else
0a9430a8
JJ
7473 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7474 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
78bf6e2f
RS
7475
7476 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7477 }
7478
7479 real_from_target (&r, tmp, mode);
7480 return build_real (type, r);
7481}
7482
7483
7484/* Subroutine of native_interpret_expr. Interpret the contents of
7485 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7486 If the buffer cannot be interpreted, return NULL_TREE. */
7487
7488static tree
fa233e34 7489native_interpret_complex (tree type, const unsigned char *ptr, int len)
78bf6e2f
RS
7490{
7491 tree etype, rpart, ipart;
7492 int size;
7493
7494 etype = TREE_TYPE (type);
7495 size = GET_MODE_SIZE (TYPE_MODE (etype));
7496 if (size * 2 > len)
7497 return NULL_TREE;
7498 rpart = native_interpret_expr (etype, ptr, size);
7499 if (!rpart)
7500 return NULL_TREE;
7501 ipart = native_interpret_expr (etype, ptr+size, size);
7502 if (!ipart)
7503 return NULL_TREE;
7504 return build_complex (type, rpart, ipart);
7505}
7506
7507
7508/* Subroutine of native_interpret_expr. Interpret the contents of
7509 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7510 If the buffer cannot be interpreted, return NULL_TREE. */
7511
7512static tree
fa233e34 7513native_interpret_vector (tree type, const unsigned char *ptr, int len)
78bf6e2f
RS
7514{
7515 tree etype, elem, elements;
7516 int i, size, count;
7517
7518 etype = TREE_TYPE (type);
7519 size = GET_MODE_SIZE (TYPE_MODE (etype));
7520 count = TYPE_VECTOR_SUBPARTS (type);
7521 if (size * count > len)
7522 return NULL_TREE;
7523
7524 elements = NULL_TREE;
7525 for (i = count - 1; i >= 0; i--)
7526 {
7527 elem = native_interpret_expr (etype, ptr+(i*size), size);
7528 if (!elem)
7529 return NULL_TREE;
7530 elements = tree_cons (NULL_TREE, elem, elements);
7531 }
7532 return build_vector (type, elements);
7533}
7534
7535
75c40d56 7536/* Subroutine of fold_view_convert_expr. Interpret the contents of
78bf6e2f
RS
7537 the buffer PTR of length LEN as a constant of type TYPE. For
7538 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7539 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7540 return NULL_TREE. */
7541
db136335 7542tree
fa233e34 7543native_interpret_expr (tree type, const unsigned char *ptr, int len)
78bf6e2f
RS
7544{
7545 switch (TREE_CODE (type))
7546 {
7547 case INTEGER_TYPE:
7548 case ENUMERAL_TYPE:
7549 case BOOLEAN_TYPE:
7550 return native_interpret_int (type, ptr, len);
7551
7552 case REAL_TYPE:
7553 return native_interpret_real (type, ptr, len);
7554
7555 case COMPLEX_TYPE:
7556 return native_interpret_complex (type, ptr, len);
7557
7558 case VECTOR_TYPE:
7559 return native_interpret_vector (type, ptr, len);
7560
7561 default:
7562 return NULL_TREE;
7563 }
7564}
7565
7566
7567/* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7568 TYPE at compile-time. If we're unable to perform the conversion
7569 return NULL_TREE. */
7570
7571static tree
7572fold_view_convert_expr (tree type, tree expr)
7573{
7574 /* We support up to 512-bit values (for V8DFmode). */
7575 unsigned char buffer[64];
7576 int len;
7577
7578 /* Check that the host and target are sane. */
7579 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7580 return NULL_TREE;
7581
7582 len = native_encode_expr (expr, buffer, sizeof (buffer));
7583 if (len == 0)
7584 return NULL_TREE;
7585
7586 return native_interpret_expr (type, buffer, len);
7587}
7588
70826cbb 7589/* Build an expression for the address of T. Folds away INDIRECT_REF
628c189e 7590 to avoid confusing the gimplify process. */
70826cbb 7591
628c189e 7592tree
db3927fb 7593build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
70826cbb
SP
7594{
7595 /* The size of the object is not relevant when talking about its address. */
7596 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7597 t = TREE_OPERAND (t, 0);
7598
7599 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7600 if (TREE_CODE (t) == INDIRECT_REF
7601 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7602 {
7603 t = TREE_OPERAND (t, 0);
7604
7605 if (TREE_TYPE (t) != ptrtype)
db3927fb
AH
7606 {
7607 t = build1 (NOP_EXPR, ptrtype, t);
7608 SET_EXPR_LOCATION (t, loc);
7609 }
70826cbb 7610 }
70f34814
RG
7611 else if (TREE_CODE (t) == MEM_REF
7612 && integer_zerop (TREE_OPERAND (t, 1)))
7613 return TREE_OPERAND (t, 0);
d98e8686
EB
7614 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7615 {
db3927fb 7616 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
d98e8686
EB
7617
7618 if (TREE_TYPE (t) != ptrtype)
db3927fb 7619 t = fold_convert_loc (loc, ptrtype, t);
d98e8686 7620 }
70826cbb 7621 else
db3927fb
AH
7622 {
7623 t = build1 (ADDR_EXPR, ptrtype, t);
7624 SET_EXPR_LOCATION (t, loc);
7625 }
70826cbb
SP
7626
7627 return t;
7628}
7629
628c189e 7630/* Build an expression for the address of T. */
70826cbb
SP
7631
7632tree
db3927fb 7633build_fold_addr_expr_loc (location_t loc, tree t)
70826cbb
SP
7634{
7635 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7636
db3927fb 7637 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
70826cbb 7638}
78bf6e2f 7639
7107fa7c
KH
7640/* Fold a unary expression of code CODE and type TYPE with operand
7641 OP0. Return the folded expression if folding is successful.
7642 Otherwise, return NULL_TREE. */
659d8efa 7643
721425b6 7644tree
db3927fb 7645fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
659d8efa 7646{
659d8efa 7647 tree tem;
fbaa905c 7648 tree arg0;
659d8efa
KH
7649 enum tree_code_class kind = TREE_CODE_CLASS (code);
7650
7651 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7652 && TREE_CODE_LENGTH (code) == 1);
7653
fbaa905c 7654 arg0 = op0;
659d8efa
KH
7655 if (arg0)
7656 {
1a87cf0c 7657 if (CONVERT_EXPR_CODE_P (code)
b49ceb45 7658 || code == FLOAT_EXPR || code == ABS_EXPR)
659d8efa 7659 {
b49ceb45
JM
7660 /* Don't use STRIP_NOPS, because signedness of argument type
7661 matters. */
659d8efa
KH
7662 STRIP_SIGN_NOPS (arg0);
7663 }
7664 else
7665 {
7666 /* Strip any conversions that don't change the mode. This
7667 is safe for every expression, except for a comparison
7668 expression because its signedness is derived from its
7669 operands.
7670
7671 Note that this is done as an internal manipulation within
7672 the constant folder, in order to find the simplest
7673 representation of the arguments so that their form can be
7674 studied. In any cases, the appropriate type conversions
7675 should be put back in the tree that will get out of the
7676 constant folder. */
7677 STRIP_NOPS (arg0);
7678 }
7679 }
7680
7681 if (TREE_CODE_CLASS (code) == tcc_unary)
7682 {
7683 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7684 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
db3927fb
AH
7685 fold_build1_loc (loc, code, type,
7686 fold_convert_loc (loc, TREE_TYPE (op0),
7687 TREE_OPERAND (arg0, 1))));
659d8efa
KH
7688 else if (TREE_CODE (arg0) == COND_EXPR)
7689 {
7690 tree arg01 = TREE_OPERAND (arg0, 1);
7691 tree arg02 = TREE_OPERAND (arg0, 2);
7692 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
db3927fb
AH
7693 arg01 = fold_build1_loc (loc, code, type,
7694 fold_convert_loc (loc,
7695 TREE_TYPE (op0), arg01));
659d8efa 7696 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
db3927fb
AH
7697 arg02 = fold_build1_loc (loc, code, type,
7698 fold_convert_loc (loc,
7699 TREE_TYPE (op0), arg02));
7700 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7f20a5b7 7701 arg01, arg02);
659d8efa
KH
7702
7703 /* If this was a conversion, and all we did was to move into
7704 inside the COND_EXPR, bring it back out. But leave it if
7705 it is a conversion from integer to integer and the
7706 result precision is no wider than a word since such a
7707 conversion is cheap and may be optimized away by combine,
7708 while it couldn't if it were outside the COND_EXPR. Then return
7709 so we don't get into an infinite recursion loop taking the
7710 conversion out and then back in. */
7711
1a87cf0c 7712 if ((CONVERT_EXPR_CODE_P (code)
659d8efa
KH
7713 || code == NON_LVALUE_EXPR)
7714 && TREE_CODE (tem) == COND_EXPR
7715 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7716 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7717 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7718 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7719 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7720 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7721 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7722 && (INTEGRAL_TYPE_P
7723 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7724 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7725 || flag_syntax_only))
db3927fb
AH
7726 {
7727 tem = build1 (code, type,
7728 build3 (COND_EXPR,
7729 TREE_TYPE (TREE_OPERAND
7730 (TREE_OPERAND (tem, 1), 0)),
7731 TREE_OPERAND (tem, 0),
7732 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7733 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7734 SET_EXPR_LOCATION (tem, loc);
7735 }
659d8efa
KH
7736 return tem;
7737 }
7738 else if (COMPARISON_CLASS_P (arg0))
7739 {
7740 if (TREE_CODE (type) == BOOLEAN_TYPE)
7741 {
7742 arg0 = copy_node (arg0);
7743 TREE_TYPE (arg0) = type;
7744 return arg0;
7745 }
7746 else if (TREE_CODE (type) != INTEGER_TYPE)
db3927fb
AH
7747 return fold_build3_loc (loc, COND_EXPR, type, arg0,
7748 fold_build1_loc (loc, code, type,
7f20a5b7 7749 integer_one_node),
db3927fb 7750 fold_build1_loc (loc, code, type,
7f20a5b7 7751 integer_zero_node));
659d8efa
KH
7752 }
7753 }
7754
7755 switch (code)
7756 {
dedd42d5
RG
7757 case PAREN_EXPR:
7758 /* Re-association barriers around constants and other re-association
7759 barriers can be removed. */
7760 if (CONSTANT_CLASS_P (op0)
7761 || TREE_CODE (op0) == PAREN_EXPR)
db3927fb 7762 return fold_convert_loc (loc, type, op0);
dedd42d5
RG
7763 return NULL_TREE;
7764
1043771b 7765 CASE_CONVERT:
659d8efa 7766 case FLOAT_EXPR:
659d8efa 7767 case FIX_TRUNC_EXPR:
4b58fc4d
KH
7768 if (TREE_TYPE (op0) == type)
7769 return op0;
b8698a0f 7770
6416ae7f 7771 /* If we have (type) (a CMP b) and type is an integral type, return
d998dd65
AP
7772 new expression involving the new type. */
7773 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
db3927fb 7774 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
d998dd65 7775 TREE_OPERAND (op0, 1));
659d8efa
KH
7776
7777 /* Handle cases of two conversions in a row. */
1043771b 7778 if (CONVERT_EXPR_P (op0))
659d8efa 7779 {
4b58fc4d
KH
7780 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7781 tree inter_type = TREE_TYPE (op0);
659d8efa
KH
7782 int inside_int = INTEGRAL_TYPE_P (inside_type);
7783 int inside_ptr = POINTER_TYPE_P (inside_type);
7784 int inside_float = FLOAT_TYPE_P (inside_type);
4b8d544b 7785 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
659d8efa
KH
7786 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7787 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7788 int inter_int = INTEGRAL_TYPE_P (inter_type);
7789 int inter_ptr = POINTER_TYPE_P (inter_type);
7790 int inter_float = FLOAT_TYPE_P (inter_type);
4b8d544b 7791 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
659d8efa
KH
7792 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7793 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7794 int final_int = INTEGRAL_TYPE_P (type);
7795 int final_ptr = POINTER_TYPE_P (type);
7796 int final_float = FLOAT_TYPE_P (type);
4b8d544b 7797 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
659d8efa
KH
7798 unsigned int final_prec = TYPE_PRECISION (type);
7799 int final_unsignedp = TYPE_UNSIGNED (type);
7800
7801 /* In addition to the cases of two conversions in a row
7802 handled below, if we are converting something to its own
7803 type via an object of identical or wider precision, neither
7804 conversion is needed. */
7805 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
497cfe24
RG
7806 && (((inter_int || inter_ptr) && final_int)
7807 || (inter_float && final_float))
659d8efa 7808 && inter_prec >= final_prec)
db3927fb 7809 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
659d8efa 7810
1803581d
EB
7811 /* Likewise, if the intermediate and initial types are either both
7812 float or both integer, we don't need the middle conversion if the
7813 former is wider than the latter and doesn't change the signedness
7814 (for integers). Avoid this if the final type is a pointer since
7815 then we sometimes need the middle conversion. Likewise if the
7816 final type has a precision not equal to the size of its mode. */
6aa12f4f 7817 if (((inter_int && inside_int)
4b8d544b
JJ
7818 || (inter_float && inside_float)
7819 || (inter_vec && inside_vec))
659d8efa 7820 && inter_prec >= inside_prec
4b8d544b
JJ
7821 && (inter_float || inter_vec
7822 || inter_unsignedp == inside_unsignedp)
659d8efa
KH
7823 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7824 && TYPE_MODE (type) == TYPE_MODE (inter_type))
4b8d544b
JJ
7825 && ! final_ptr
7826 && (! final_vec || inter_prec == inside_prec))
db3927fb 7827 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
659d8efa
KH
7828
7829 /* If we have a sign-extension of a zero-extended value, we can
7830 replace that by a single zero-extension. */
7831 if (inside_int && inter_int && final_int
7832 && inside_prec < inter_prec && inter_prec < final_prec
7833 && inside_unsignedp && !inter_unsignedp)
db3927fb 7834 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
659d8efa
KH
7835
7836 /* Two conversions in a row are not needed unless:
7837 - some conversion is floating-point (overstrict for now), or
4b8d544b 7838 - some conversion is a vector (overstrict for now), or
659d8efa
KH
7839 - the intermediate type is narrower than both initial and
7840 final, or
7841 - the intermediate type and innermost type differ in signedness,
7842 and the outermost type is wider than the intermediate, or
7843 - the initial type is a pointer type and the precisions of the
7844 intermediate and final types differ, or
7845 - the final type is a pointer type and the precisions of the
c4e5b5a8 7846 initial and intermediate types differ. */
659d8efa 7847 if (! inside_float && ! inter_float && ! final_float
4b8d544b 7848 && ! inside_vec && ! inter_vec && ! final_vec
497cfe24 7849 && (inter_prec >= inside_prec || inter_prec >= final_prec)
659d8efa
KH
7850 && ! (inside_int && inter_int
7851 && inter_unsignedp != inside_unsignedp
7852 && inter_prec < final_prec)
7853 && ((inter_unsignedp && inter_prec > inside_prec)
7854 == (final_unsignedp && final_prec > inter_prec))
7855 && ! (inside_ptr && inter_prec != final_prec)
7856 && ! (final_ptr && inside_prec != inter_prec)
7857 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
c4e5b5a8 7858 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
db3927fb 7859 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
659d8efa
KH
7860 }
7861
46c0a59d 7862 /* Handle (T *)&A.B.C for A being of type T and B and C
a4174ebf 7863 living at offset zero. This occurs frequently in
46c0a59d
RG
7864 C++ upcasting and then accessing the base. */
7865 if (TREE_CODE (op0) == ADDR_EXPR
7866 && POINTER_TYPE_P (type)
7867 && handled_component_p (TREE_OPERAND (op0, 0)))
7868 {
7869 HOST_WIDE_INT bitsize, bitpos;
7870 tree offset;
7871 enum machine_mode mode;
7872 int unsignedp, volatilep;
7873 tree base = TREE_OPERAND (op0, 0);
7874 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7875 &mode, &unsignedp, &volatilep, false);
7876 /* If the reference was to a (constant) zero offset, we can use
7877 the address of the base if it has the same base type
2ea9dc64 7878 as the result type and the pointer type is unqualified. */
46c0a59d 7879 if (! offset && bitpos == 0
2ea9dc64 7880 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
46c0a59d 7881 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
2ea9dc64 7882 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
db3927fb
AH
7883 return fold_convert_loc (loc, type,
7884 build_fold_addr_expr_loc (loc, base));
46c0a59d
RG
7885 }
7886
726a989a
RB
7887 if (TREE_CODE (op0) == MODIFY_EXPR
7888 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
659d8efa 7889 /* Detect assigning a bitfield. */
726a989a 7890 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
07beea0d 7891 && DECL_BIT_FIELD
726a989a 7892 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
659d8efa
KH
7893 {
7894 /* Don't leave an assignment inside a conversion
7895 unless assigning a bitfield. */
db3927fb 7896 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
659d8efa 7897 /* First do the assignment, then return converted constant. */
6405f32f 7898 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
659d8efa
KH
7899 TREE_NO_WARNING (tem) = 1;
7900 TREE_USED (tem) = 1;
db3927fb 7901 SET_EXPR_LOCATION (tem, loc);
659d8efa
KH
7902 return tem;
7903 }
7904
7905 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7906 constants (if x has signed type, the sign bit cannot be set
bfab40f8
EB
7907 in c). This folds extension into the BIT_AND_EXPR.
7908 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7909 very likely don't have maximal range for their precision and this
7910 transformation effectively doesn't preserve non-maximal ranges. */
1e17e15a 7911 if (TREE_CODE (type) == INTEGER_TYPE
4b58fc4d 7912 && TREE_CODE (op0) == BIT_AND_EXPR
84fb43a1 7913 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
659d8efa 7914 {
3d8b2a98
ILT
7915 tree and_expr = op0;
7916 tree and0 = TREE_OPERAND (and_expr, 0);
7917 tree and1 = TREE_OPERAND (and_expr, 1);
659d8efa
KH
7918 int change = 0;
7919
3d8b2a98 7920 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
659d8efa 7921 || (TYPE_PRECISION (type)
3d8b2a98 7922 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
659d8efa
KH
7923 change = 1;
7924 else if (TYPE_PRECISION (TREE_TYPE (and1))
7925 <= HOST_BITS_PER_WIDE_INT
7926 && host_integerp (and1, 1))
7927 {
7928 unsigned HOST_WIDE_INT cst;
7929
7930 cst = tree_low_cst (and1, 1);
7931 cst &= (HOST_WIDE_INT) -1
7932 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7933 change = (cst == 0);
7934#ifdef LOAD_EXTEND_OP
7935 if (change
7936 && !flag_syntax_only
7937 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7938 == ZERO_EXTEND))
7939 {
ca5ba2a3 7940 tree uns = unsigned_type_for (TREE_TYPE (and0));
db3927fb
AH
7941 and0 = fold_convert_loc (loc, uns, and0);
7942 and1 = fold_convert_loc (loc, uns, and1);
659d8efa
KH
7943 }
7944#endif
7945 }
7946 if (change)
7947 {
9589f23e
AS
7948 tem = force_fit_type_double (type, tree_to_double_int (and1),
7949 0, TREE_OVERFLOW (and1));
db3927fb
AH
7950 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7951 fold_convert_loc (loc, type, and0), tem);
659d8efa
KH
7952 }
7953 }
7954
5be014d5 7955 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
ac5a28a6 7956 when one of the new casts will fold away. Conservatively we assume
5be014d5
AP
7957 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7958 if (POINTER_TYPE_P (type)
7959 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
ac5a28a6
JH
7960 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7961 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7962 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
659d8efa
KH
7963 {
7964 tree arg00 = TREE_OPERAND (arg0, 0);
ac5a28a6
JH
7965 tree arg01 = TREE_OPERAND (arg0, 1);
7966
db3927fb
AH
7967 return fold_build2_loc (loc,
7968 TREE_CODE (arg0), type,
7969 fold_convert_loc (loc, type, arg00),
7970 fold_convert_loc (loc, sizetype, arg01));
659d8efa
KH
7971 }
7972
e8206491 7973 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
110abdbc 7974 of the same precision, and X is an integer type not narrower than
e8206491
RS
7975 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7976 if (INTEGRAL_TYPE_P (type)
7977 && TREE_CODE (op0) == BIT_NOT_EXPR
7978 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
1043771b 7979 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
e8206491
RS
7980 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7981 {
7982 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7983 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7984 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
db3927fb
AH
7985 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7986 fold_convert_loc (loc, type, tem));
e8206491
RS
7987 }
7988
c83bd37c
PB
7989 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7990 type of X and Y (integer types only). */
7991 if (INTEGRAL_TYPE_P (type)
7992 && TREE_CODE (op0) == MULT_EXPR
7993 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7994 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7995 {
7996 /* Be careful not to introduce new overflows. */
7997 tree mult_type;
7998 if (TYPE_OVERFLOW_WRAPS (type))
7999 mult_type = type;
8000 else
8001 mult_type = unsigned_type_for (type);
b7785654
JJ
8002
8003 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8004 {
db3927fb
AH
8005 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8006 fold_convert_loc (loc, mult_type,
8007 TREE_OPERAND (op0, 0)),
8008 fold_convert_loc (loc, mult_type,
8009 TREE_OPERAND (op0, 1)));
8010 return fold_convert_loc (loc, type, tem);
b7785654 8011 }
c83bd37c
PB
8012 }
8013
84ece8ef 8014 tem = fold_convert_const (code, type, op0);
62ab45cc 8015 return tem ? tem : NULL_TREE;
659d8efa 8016
09e881c9
BE
8017 case ADDR_SPACE_CONVERT_EXPR:
8018 if (integer_zerop (arg0))
8019 return fold_convert_const (code, type, arg0);
8020 return NULL_TREE;
8021
325217ed
CF
8022 case FIXED_CONVERT_EXPR:
8023 tem = fold_convert_const (code, type, arg0);
8024 return tem ? tem : NULL_TREE;
8025
659d8efa 8026 case VIEW_CONVERT_EXPR:
f85242f0
RS
8027 if (TREE_TYPE (op0) == type)
8028 return op0;
9a327766 8029 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
db3927fb
AH
8030 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8031 type, TREE_OPERAND (op0, 0));
70f34814
RG
8032 if (TREE_CODE (op0) == MEM_REF)
8033 return fold_build2_loc (loc, MEM_REF, type,
8034 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9a327766
RG
8035
8036 /* For integral conversions with the same precision or pointer
8037 conversions use a NOP_EXPR instead. */
3d45dd59
RG
8038 if ((INTEGRAL_TYPE_P (type)
8039 || POINTER_TYPE_P (type))
8040 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8041 || POINTER_TYPE_P (TREE_TYPE (op0)))
84fb43a1 8042 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
db3927fb 8043 return fold_convert_loc (loc, type, op0);
9a327766
RG
8044
8045 /* Strip inner integral conversions that do not change the precision. */
1043771b 8046 if (CONVERT_EXPR_P (op0)
3d45dd59
RG
8047 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8048 || POINTER_TYPE_P (TREE_TYPE (op0)))
8049 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8050 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
9a327766
RG
8051 && (TYPE_PRECISION (TREE_TYPE (op0))
8052 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
db3927fb
AH
8053 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8054 type, TREE_OPERAND (op0, 0));
9a327766 8055
78bf6e2f 8056 return fold_view_convert_expr (type, op0);
659d8efa
KH
8057
8058 case NEGATE_EXPR:
db3927fb 8059 tem = fold_negate_expr (loc, arg0);
1af8dcbf 8060 if (tem)
db3927fb 8061 return fold_convert_loc (loc, type, tem);
62ab45cc 8062 return NULL_TREE;
659d8efa
KH
8063
8064 case ABS_EXPR:
8065 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8066 return fold_abs_const (arg0, type);
8067 else if (TREE_CODE (arg0) == NEGATE_EXPR)
db3927fb 8068 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
659d8efa
KH
8069 /* Convert fabs((double)float) into (double)fabsf(float). */
8070 else if (TREE_CODE (arg0) == NOP_EXPR
8071 && TREE_CODE (type) == REAL_TYPE)
8072 {
8073 tree targ0 = strip_float_extensions (arg0);
8074 if (targ0 != arg0)
db3927fb
AH
8075 return fold_convert_loc (loc, type,
8076 fold_build1_loc (loc, ABS_EXPR,
8077 TREE_TYPE (targ0),
8078 targ0));
659d8efa 8079 }
1ade5842 8080 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
6ac01510
ILT
8081 else if (TREE_CODE (arg0) == ABS_EXPR)
8082 return arg0;
8083 else if (tree_expr_nonnegative_p (arg0))
659d8efa
KH
8084 return arg0;
8085
8086 /* Strip sign ops from argument. */
8087 if (TREE_CODE (type) == REAL_TYPE)
8088 {
8089 tem = fold_strip_sign_ops (arg0);
8090 if (tem)
db3927fb
AH
8091 return fold_build1_loc (loc, ABS_EXPR, type,
8092 fold_convert_loc (loc, type, tem));
659d8efa 8093 }
62ab45cc 8094 return NULL_TREE;
659d8efa
KH
8095
8096 case CONJ_EXPR:
8097 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
db3927fb 8098 return fold_convert_loc (loc, type, arg0);
9734ebaf
RS
8099 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8100 {
8101 tree itype = TREE_TYPE (type);
db3927fb
AH
8102 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8103 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8104 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8105 negate_expr (ipart));
9734ebaf
RS
8106 }
8107 if (TREE_CODE (arg0) == COMPLEX_CST)
8108 {
8109 tree itype = TREE_TYPE (type);
db3927fb
AH
8110 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8111 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
9734ebaf
RS
8112 return build_complex (type, rpart, negate_expr (ipart));
8113 }
8114 if (TREE_CODE (arg0) == CONJ_EXPR)
db3927fb 8115 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
62ab45cc 8116 return NULL_TREE;
659d8efa
KH
8117
8118 case BIT_NOT_EXPR:
8119 if (TREE_CODE (arg0) == INTEGER_CST)
8120 return fold_not_const (arg0, type);
8121 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
db3927fb 8122 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
659d8efa
KH
8123 /* Convert ~ (-A) to A - 1. */
8124 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
db3927fb
AH
8125 return fold_build2_loc (loc, MINUS_EXPR, type,
8126 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
7f20a5b7 8127 build_int_cst (type, 1));
659d8efa
KH
8128 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8129 else if (INTEGRAL_TYPE_P (type)
8130 && ((TREE_CODE (arg0) == MINUS_EXPR
8131 && integer_onep (TREE_OPERAND (arg0, 1)))
8132 || (TREE_CODE (arg0) == PLUS_EXPR
8133 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
db3927fb
AH
8134 return fold_build1_loc (loc, NEGATE_EXPR, type,
8135 fold_convert_loc (loc, type,
8136 TREE_OPERAND (arg0, 0)));
f242e769
JM
8137 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8138 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
db3927fb
AH
8139 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8140 fold_convert_loc (loc, type,
8141 TREE_OPERAND (arg0, 0)))))
8142 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8143 fold_convert_loc (loc, type,
8144 TREE_OPERAND (arg0, 1)));
f242e769 8145 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
db3927fb
AH
8146 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8147 fold_convert_loc (loc, type,
8148 TREE_OPERAND (arg0, 1)))))
8149 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8150 fold_convert_loc (loc, type,
8151 TREE_OPERAND (arg0, 0)), tem);
c01ee935
JJ
8152 /* Perform BIT_NOT_EXPR on each element individually. */
8153 else if (TREE_CODE (arg0) == VECTOR_CST)
8154 {
8155 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8156 int count = TYPE_VECTOR_SUBPARTS (type), i;
8157
8158 for (i = 0; i < count; i++)
8159 {
8160 if (elements)
8161 {
8162 elem = TREE_VALUE (elements);
db3927fb 8163 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
c01ee935
JJ
8164 if (elem == NULL_TREE)
8165 break;
8166 elements = TREE_CHAIN (elements);
8167 }
8168 else
8169 elem = build_int_cst (TREE_TYPE (type), -1);
8170 list = tree_cons (NULL_TREE, elem, list);
8171 }
8172 if (i == count)
8173 return build_vector (type, nreverse (list));
8174 }
f242e769 8175
62ab45cc 8176 return NULL_TREE;
659d8efa
KH
8177
8178 case TRUTH_NOT_EXPR:
8179 /* The argument to invert_truthvalue must have Boolean type. */
8180 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
db3927fb 8181 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
659d8efa
KH
8182
8183 /* Note that the operand of this must be an int
8184 and its values must be 0 or 1.
8185 ("true" is a fixed value perhaps depending on the language,
8186 but we don't handle values other than 1 correctly yet.) */
db3927fb 8187 tem = fold_truth_not_expr (loc, arg0);
d817ed3b 8188 if (!tem)
62ab45cc 8189 return NULL_TREE;
db3927fb 8190 return fold_convert_loc (loc, type, tem);
659d8efa
KH
8191
8192 case REALPART_EXPR:
8193 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
db3927fb 8194 return fold_convert_loc (loc, type, arg0);
9734ebaf 8195 if (TREE_CODE (arg0) == COMPLEX_EXPR)
db3927fb 8196 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
659d8efa 8197 TREE_OPERAND (arg0, 1));
9734ebaf 8198 if (TREE_CODE (arg0) == COMPLEX_CST)
db3927fb 8199 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
9734ebaf
RS
8200 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8201 {
8202 tree itype = TREE_TYPE (TREE_TYPE (arg0));
db3927fb
AH
8203 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8204 fold_build1_loc (loc, REALPART_EXPR, itype,
9734ebaf 8205 TREE_OPERAND (arg0, 0)),
db3927fb 8206 fold_build1_loc (loc, REALPART_EXPR, itype,
9734ebaf 8207 TREE_OPERAND (arg0, 1)));
db3927fb 8208 return fold_convert_loc (loc, type, tem);
9734ebaf
RS
8209 }
8210 if (TREE_CODE (arg0) == CONJ_EXPR)
8211 {
8212 tree itype = TREE_TYPE (TREE_TYPE (arg0));
db3927fb
AH
8213 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8214 TREE_OPERAND (arg0, 0));
8215 return fold_convert_loc (loc, type, tem);
9734ebaf 8216 }
85aef79f
RG
8217 if (TREE_CODE (arg0) == CALL_EXPR)
8218 {
8219 tree fn = get_callee_fndecl (arg0);
111f1fca 8220 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
85aef79f
RG
8221 switch (DECL_FUNCTION_CODE (fn))
8222 {
8223 CASE_FLT_FN (BUILT_IN_CEXPI):
8224 fn = mathfn_built_in (type, BUILT_IN_COS);
2d38026b 8225 if (fn)
db3927fb 8226 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
2d38026b 8227 break;
85aef79f 8228
2d38026b
RS
8229 default:
8230 break;
85aef79f
RG
8231 }
8232 }
62ab45cc 8233 return NULL_TREE;
659d8efa
KH
8234
8235 case IMAGPART_EXPR:
8236 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
db3927fb 8237 return fold_convert_loc (loc, type, integer_zero_node);
9734ebaf 8238 if (TREE_CODE (arg0) == COMPLEX_EXPR)
db3927fb 8239 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
659d8efa 8240 TREE_OPERAND (arg0, 0));
9734ebaf 8241 if (TREE_CODE (arg0) == COMPLEX_CST)
db3927fb 8242 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
9734ebaf
RS
8243 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8244 {
8245 tree itype = TREE_TYPE (TREE_TYPE (arg0));
db3927fb
AH
8246 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8247 fold_build1_loc (loc, IMAGPART_EXPR, itype,
9734ebaf 8248 TREE_OPERAND (arg0, 0)),
db3927fb 8249 fold_build1_loc (loc, IMAGPART_EXPR, itype,
9734ebaf 8250 TREE_OPERAND (arg0, 1)));
db3927fb 8251 return fold_convert_loc (loc, type, tem);
9734ebaf
RS
8252 }
8253 if (TREE_CODE (arg0) == CONJ_EXPR)
8254 {
8255 tree itype = TREE_TYPE (TREE_TYPE (arg0));
db3927fb
AH
8256 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8257 return fold_convert_loc (loc, type, negate_expr (tem));
9734ebaf 8258 }
85aef79f
RG
8259 if (TREE_CODE (arg0) == CALL_EXPR)
8260 {
8261 tree fn = get_callee_fndecl (arg0);
111f1fca 8262 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
85aef79f
RG
8263 switch (DECL_FUNCTION_CODE (fn))
8264 {
8265 CASE_FLT_FN (BUILT_IN_CEXPI):
8266 fn = mathfn_built_in (type, BUILT_IN_SIN);
2d38026b 8267 if (fn)
db3927fb 8268 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
2d38026b 8269 break;
85aef79f 8270
2d38026b
RS
8271 default:
8272 break;
85aef79f
RG
8273 }
8274 }
62ab45cc 8275 return NULL_TREE;
659d8efa 8276
48f30f62
RG
8277 case INDIRECT_REF:
8278 /* Fold *&X to X if X is an lvalue. */
8279 if (TREE_CODE (op0) == ADDR_EXPR)
8280 {
8281 tree op00 = TREE_OPERAND (op0, 0);
8282 if ((TREE_CODE (op00) == VAR_DECL
8283 || TREE_CODE (op00) == PARM_DECL
8284 || TREE_CODE (op00) == RESULT_DECL)
8285 && !TREE_READONLY (op00))
8286 return op00;
8287 }
8288 return NULL_TREE;
8289
659d8efa 8290 default:
62ab45cc 8291 return NULL_TREE;
659d8efa
KH
8292 } /* switch (code) */
8293}
8294
9bacafeb
PB
8295
8296/* If the operation was a conversion do _not_ mark a resulting constant
8297 with TREE_OVERFLOW if the original constant was not. These conversions
8298 have implementation defined behavior and retaining the TREE_OVERFLOW
8299 flag here would confuse later passes such as VRP. */
8300tree
db3927fb
AH
8301fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8302 tree type, tree op0)
9bacafeb 8303{
db3927fb 8304 tree res = fold_unary_loc (loc, code, type, op0);
9bacafeb
PB
8305 if (res
8306 && TREE_CODE (res) == INTEGER_CST
8307 && TREE_CODE (op0) == INTEGER_CST
8308 && CONVERT_EXPR_CODE_P (code))
8309 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8310
8311 return res;
8312}
8313
292f30c5
EB
8314/* Fold a binary expression of code CODE and type TYPE with operands
8315 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8316 Return the folded expression if folding is successful. Otherwise,
8317 return NULL_TREE. */
8318
8319static tree
db3927fb 8320fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
292f30c5
EB
8321{
8322 enum tree_code compl_code;
8323
8324 if (code == MIN_EXPR)
8325 compl_code = MAX_EXPR;
8326 else if (code == MAX_EXPR)
8327 compl_code = MIN_EXPR;
8328 else
5f180d36 8329 gcc_unreachable ();
292f30c5 8330
f0dbdfbb 8331 /* MIN (MAX (a, b), b) == b. */
292f30c5
EB
8332 if (TREE_CODE (op0) == compl_code
8333 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
db3927fb 8334 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
292f30c5 8335
f0dbdfbb 8336 /* MIN (MAX (b, a), b) == b. */
292f30c5
EB
8337 if (TREE_CODE (op0) == compl_code
8338 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8339 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
db3927fb 8340 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
292f30c5 8341
f0dbdfbb 8342 /* MIN (a, MAX (a, b)) == a. */
292f30c5
EB
8343 if (TREE_CODE (op1) == compl_code
8344 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8345 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
db3927fb 8346 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
292f30c5 8347
f0dbdfbb 8348 /* MIN (a, MAX (b, a)) == a. */
292f30c5
EB
8349 if (TREE_CODE (op1) == compl_code
8350 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8351 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
db3927fb 8352 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
292f30c5
EB
8353
8354 return NULL_TREE;
8355}
8356
e73dbcae
RG
8357/* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8358 by changing CODE to reduce the magnitude of constants involved in
8359 ARG0 of the comparison.
8360 Returns a canonicalized comparison tree if a simplification was
6ac01510
ILT
8361 possible, otherwise returns NULL_TREE.
8362 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8363 valid if signed overflow is undefined. */
e73dbcae
RG
8364
8365static tree
db3927fb 8366maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
6ac01510
ILT
8367 tree arg0, tree arg1,
8368 bool *strict_overflow_p)
e73dbcae
RG
8369{
8370 enum tree_code code0 = TREE_CODE (arg0);
8371 tree t, cst0 = NULL_TREE;
8372 int sgn0;
8373 bool swap = false;
8374
0b45fd7a
RG
8375 /* Match A +- CST code arg1 and CST code arg1. We can change the
8376 first form only if overflow is undefined. */
8377 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8378 /* In principle pointers also have undefined overflow behavior,
8379 but that causes problems elsewhere. */
8380 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8381 && (code0 == MINUS_EXPR
8382 || code0 == PLUS_EXPR)
e73dbcae
RG
8383 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8384 || code0 == INTEGER_CST))
8385 return NULL_TREE;
8386
8387 /* Identify the constant in arg0 and its sign. */
8388 if (code0 == INTEGER_CST)
8389 cst0 = arg0;
8390 else
8391 cst0 = TREE_OPERAND (arg0, 1);
8392 sgn0 = tree_int_cst_sgn (cst0);
8393
8394 /* Overflowed constants and zero will cause problems. */
8395 if (integer_zerop (cst0)
8396 || TREE_OVERFLOW (cst0))
8397 return NULL_TREE;
8398
2f8e468b 8399 /* See if we can reduce the magnitude of the constant in
e73dbcae
RG
8400 arg0 by changing the comparison code. */
8401 if (code0 == INTEGER_CST)
8402 {
8403 /* CST <= arg1 -> CST-1 < arg1. */
8404 if (code == LE_EXPR && sgn0 == 1)
8405 code = LT_EXPR;
8406 /* -CST < arg1 -> -CST-1 <= arg1. */
8407 else if (code == LT_EXPR && sgn0 == -1)
8408 code = LE_EXPR;
8409 /* CST > arg1 -> CST-1 >= arg1. */
8410 else if (code == GT_EXPR && sgn0 == 1)
8411 code = GE_EXPR;
8412 /* -CST >= arg1 -> -CST-1 > arg1. */
8413 else if (code == GE_EXPR && sgn0 == -1)
8414 code = GT_EXPR;
8415 else
8416 return NULL_TREE;
8417 /* arg1 code' CST' might be more canonical. */
8418 swap = true;
8419 }
8420 else
8421 {
8422 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8423 if (code == LT_EXPR
8424 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8425 code = LE_EXPR;
8426 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8427 else if (code == GT_EXPR
8428 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8429 code = GE_EXPR;
8430 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8431 else if (code == LE_EXPR
8432 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8433 code = LT_EXPR;
8434 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8435 else if (code == GE_EXPR
8436 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8437 code = GT_EXPR;
8438 else
8439 return NULL_TREE;
6ac01510 8440 *strict_overflow_p = true;
e73dbcae
RG
8441 }
8442
0b45fd7a
RG
8443 /* Now build the constant reduced in magnitude. But not if that
8444 would produce one outside of its types range. */
8445 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8446 && ((sgn0 == 1
8447 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8448 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8449 || (sgn0 == -1
8450 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8451 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8452 /* We cannot swap the comparison here as that would cause us to
8453 endlessly recurse. */
8454 return NULL_TREE;
8455
e73dbcae 8456 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
0b45fd7a 8457 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
e73dbcae 8458 if (code0 != INTEGER_CST)
db3927fb 8459 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
e73dbcae
RG
8460
8461 /* If swapping might yield to a more canonical form, do so. */
8462 if (swap)
db3927fb 8463 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
e73dbcae 8464 else
db3927fb 8465 return fold_build2_loc (loc, code, type, t, arg1);
e73dbcae
RG
8466}
8467
8468/* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8469 overflow further. Try to decrease the magnitude of constants involved
8470 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8471 and put sole constants at the second argument position.
8472 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8473
8474static tree
db3927fb 8475maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
e73dbcae
RG
8476 tree arg0, tree arg1)
8477{
8478 tree t;
6ac01510
ILT
8479 bool strict_overflow_p;
8480 const char * const warnmsg = G_("assuming signed overflow does not occur "
8481 "when reducing constant in comparison");
e73dbcae 8482
e73dbcae 8483 /* Try canonicalization by simplifying arg0. */
6ac01510 8484 strict_overflow_p = false;
db3927fb 8485 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
6ac01510 8486 &strict_overflow_p);
e73dbcae 8487 if (t)
6ac01510
ILT
8488 {
8489 if (strict_overflow_p)
8490 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8491 return t;
8492 }
e73dbcae
RG
8493
8494 /* Try canonicalization by simplifying arg1 using the swapped
2f8e468b 8495 comparison. */
e73dbcae 8496 code = swap_tree_comparison (code);
6ac01510 8497 strict_overflow_p = false;
db3927fb 8498 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
6ac01510
ILT
8499 &strict_overflow_p);
8500 if (t && strict_overflow_p)
8501 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8502 return t;
e73dbcae
RG
8503}
8504
6e3c5c30
ILT
8505/* Return whether BASE + OFFSET + BITPOS may wrap around the address
8506 space. This is used to avoid issuing overflow warnings for
8507 expressions like &p->x which can not wrap. */
8508
8509static bool
8510pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8511{
6e3c5c30 8512 unsigned HOST_WIDE_INT offset_low, total_low;
b2f06c39 8513 HOST_WIDE_INT size, offset_high, total_high;
6e3c5c30
ILT
8514
8515 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8516 return true;
8517
8518 if (bitpos < 0)
8519 return true;
8520
6e3c5c30
ILT
8521 if (offset == NULL_TREE)
8522 {
8523 offset_low = 0;
8524 offset_high = 0;
8525 }
8526 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8527 return true;
8528 else
8529 {
8530 offset_low = TREE_INT_CST_LOW (offset);
8531 offset_high = TREE_INT_CST_HIGH (offset);
8532 }
8533
8534 if (add_double_with_sign (offset_low, offset_high,
8535 bitpos / BITS_PER_UNIT, 0,
8536 &total_low, &total_high,
8537 true))
8538 return true;
8539
b2f06c39 8540 if (total_high != 0)
6e3c5c30 8541 return true;
b2f06c39
ILT
8542
8543 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8544 if (size <= 0)
8545 return true;
8546
8547 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8548 array. */
8549 if (TREE_CODE (base) == ADDR_EXPR)
8550 {
8551 HOST_WIDE_INT base_size;
8552
8553 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8554 if (base_size > 0 && size < base_size)
8555 size = base_size;
8556 }
8557
8558 return total_low > (unsigned HOST_WIDE_INT) size;
6e3c5c30
ILT
8559}
8560
e26ec0bb
RS
8561/* Subroutine of fold_binary. This routine performs all of the
8562 transformations that are common to the equality/inequality
8563 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8564 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8565 fold_binary should call fold_binary. Fold a comparison with
8566 tree code CODE and type TYPE with operands OP0 and OP1. Return
8567 the folded comparison or NULL_TREE. */
8568
8569static tree
db3927fb
AH
8570fold_comparison (location_t loc, enum tree_code code, tree type,
8571 tree op0, tree op1)
e26ec0bb
RS
8572{
8573 tree arg0, arg1, tem;
8574
8575 arg0 = op0;
8576 arg1 = op1;
8577
8578 STRIP_SIGN_NOPS (arg0);
8579 STRIP_SIGN_NOPS (arg1);
8580
8581 tem = fold_relational_const (code, type, arg0, arg1);
8582 if (tem != NULL_TREE)
8583 return tem;
8584
8585 /* If one arg is a real or integer constant, put it last. */
8586 if (tree_swap_operands_p (arg0, arg1, true))
db3927fb 8587 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
e26ec0bb 8588
e26ec0bb
RS
8589 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8590 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8591 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8592 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
eeef0e45 8593 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
e26ec0bb
RS
8594 && (TREE_CODE (arg1) == INTEGER_CST
8595 && !TREE_OVERFLOW (arg1)))
8596 {
8597 tree const1 = TREE_OPERAND (arg0, 1);
8598 tree const2 = arg1;
8599 tree variable = TREE_OPERAND (arg0, 0);
8600 tree lhs;
8601 int lhs_add;
8602 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8603
db3927fb 8604 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
e26ec0bb 8605 TREE_TYPE (arg1), const2, const1);
b44e7f07
ZD
8606
8607 /* If the constant operation overflowed this can be
8608 simplified as a comparison against INT_MAX/INT_MIN. */
8609 if (TREE_CODE (lhs) == INTEGER_CST
8610 && TREE_OVERFLOW (lhs))
8611 {
8612 int const1_sgn = tree_int_cst_sgn (const1);
8613 enum tree_code code2 = code;
8614
8615 /* Get the sign of the constant on the lhs if the
8616 operation were VARIABLE + CONST1. */
8617 if (TREE_CODE (arg0) == MINUS_EXPR)
8618 const1_sgn = -const1_sgn;
8619
8620 /* The sign of the constant determines if we overflowed
8621 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8622 Canonicalize to the INT_MIN overflow by swapping the comparison
8623 if necessary. */
8624 if (const1_sgn == -1)
8625 code2 = swap_tree_comparison (code);
8626
8627 /* We now can look at the canonicalized case
8628 VARIABLE + 1 CODE2 INT_MIN
8629 and decide on the result. */
8630 if (code2 == LT_EXPR
8631 || code2 == LE_EXPR
8632 || code2 == EQ_EXPR)
db3927fb 8633 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
b44e7f07
ZD
8634 else if (code2 == NE_EXPR
8635 || code2 == GE_EXPR
8636 || code2 == GT_EXPR)
db3927fb 8637 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
b44e7f07
ZD
8638 }
8639
e26ec0bb
RS
8640 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8641 && (TREE_CODE (lhs) != INTEGER_CST
8642 || !TREE_OVERFLOW (lhs)))
6ac01510 8643 {
49c8958b 8644 fold_overflow_warning ("assuming signed overflow does not occur "
6ac01510 8645 "when changing X +- C1 cmp C2 to "
49c8958b 8646 "X cmp C1 +- C2",
6ac01510 8647 WARN_STRICT_OVERFLOW_COMPARISON);
db3927fb 8648 return fold_build2_loc (loc, code, type, variable, lhs);
6ac01510 8649 }
e26ec0bb
RS
8650 }
8651
e015f578
RG
8652 /* For comparisons of pointers we can decompose it to a compile time
8653 comparison of the base objects and the offsets into the object.
3e0de255
RG
8654 This requires at least one operand being an ADDR_EXPR or a
8655 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
e015f578
RG
8656 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8657 && (TREE_CODE (arg0) == ADDR_EXPR
3e0de255
RG
8658 || TREE_CODE (arg1) == ADDR_EXPR
8659 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8660 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
e015f578
RG
8661 {
8662 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8663 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8664 enum machine_mode mode;
8665 int volatilep, unsignedp;
bd03c084 8666 bool indirect_base0 = false, indirect_base1 = false;
e015f578
RG
8667
8668 /* Get base and offset for the access. Strip ADDR_EXPR for
8669 get_inner_reference, but put it back by stripping INDIRECT_REF
bd03c084
RG
8670 off the base object if possible. indirect_baseN will be true
8671 if baseN is not an address but refers to the object itself. */
e015f578
RG
8672 base0 = arg0;
8673 if (TREE_CODE (arg0) == ADDR_EXPR)
8674 {
8675 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8676 &bitsize, &bitpos0, &offset0, &mode,
8677 &unsignedp, &volatilep, false);
8678 if (TREE_CODE (base0) == INDIRECT_REF)
8679 base0 = TREE_OPERAND (base0, 0);
8680 else
8681 indirect_base0 = true;
8682 }
3e0de255
RG
8683 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8684 {
8685 base0 = TREE_OPERAND (arg0, 0);
70f34814
RG
8686 if (TREE_CODE (base0) == ADDR_EXPR)
8687 {
8688 base0 = TREE_OPERAND (base0, 0);
8689 indirect_base0 = true;
8690 }
3e0de255
RG
8691 offset0 = TREE_OPERAND (arg0, 1);
8692 }
e015f578
RG
8693
8694 base1 = arg1;
8695 if (TREE_CODE (arg1) == ADDR_EXPR)
8696 {
8697 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8698 &bitsize, &bitpos1, &offset1, &mode,
8699 &unsignedp, &volatilep, false);
bd03c084 8700 if (TREE_CODE (base1) == INDIRECT_REF)
e015f578 8701 base1 = TREE_OPERAND (base1, 0);
bd03c084
RG
8702 else
8703 indirect_base1 = true;
e015f578 8704 }
3e0de255
RG
8705 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8706 {
8707 base1 = TREE_OPERAND (arg1, 0);
70f34814
RG
8708 if (TREE_CODE (base1) == ADDR_EXPR)
8709 {
8710 base1 = TREE_OPERAND (base1, 0);
8711 indirect_base1 = true;
8712 }
3e0de255
RG
8713 offset1 = TREE_OPERAND (arg1, 1);
8714 }
e015f578 8715
94e85e0a
XDL
8716 /* A local variable can never be pointed to by
8717 the default SSA name of an incoming parameter. */
8718 if ((TREE_CODE (arg0) == ADDR_EXPR
8719 && indirect_base0
8720 && TREE_CODE (base0) == VAR_DECL
8721 && auto_var_in_fn_p (base0, current_function_decl)
8722 && !indirect_base1
8723 && TREE_CODE (base1) == SSA_NAME
8724 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8725 && SSA_NAME_IS_DEFAULT_DEF (base1))
8726 || (TREE_CODE (arg1) == ADDR_EXPR
8727 && indirect_base1
8728 && TREE_CODE (base1) == VAR_DECL
8729 && auto_var_in_fn_p (base1, current_function_decl)
8730 && !indirect_base0
8731 && TREE_CODE (base0) == SSA_NAME
8732 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8733 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8734 {
8735 if (code == NE_EXPR)
8736 return constant_boolean_node (1, type);
8737 else if (code == EQ_EXPR)
8738 return constant_boolean_node (0, type);
8739 }
e015f578 8740 /* If we have equivalent bases we might be able to simplify. */
94e85e0a
XDL
8741 else if (indirect_base0 == indirect_base1
8742 && operand_equal_p (base0, base1, 0))
e015f578
RG
8743 {
8744 /* We can fold this expression to a constant if the non-constant
8745 offset parts are equal. */
6e3c5c30
ILT
8746 if ((offset0 == offset1
8747 || (offset0 && offset1
8748 && operand_equal_p (offset0, offset1, 0)))
8749 && (code == EQ_EXPR
8750 || code == NE_EXPR
8751 || POINTER_TYPE_OVERFLOW_UNDEFINED))
b8698a0f 8752
e015f578 8753 {
6e3c5c30
ILT
8754 if (code != EQ_EXPR
8755 && code != NE_EXPR
8756 && bitpos0 != bitpos1
8757 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8758 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8759 fold_overflow_warning (("assuming pointer wraparound does not "
8760 "occur when comparing P +- C1 with "
8761 "P +- C2"),
8762 WARN_STRICT_OVERFLOW_CONDITIONAL);
8763
e015f578
RG
8764 switch (code)
8765 {
8766 case EQ_EXPR:
b0331ccb 8767 return constant_boolean_node (bitpos0 == bitpos1, type);
e015f578 8768 case NE_EXPR:
b0331ccb 8769 return constant_boolean_node (bitpos0 != bitpos1, type);
e015f578 8770 case LT_EXPR:
b0331ccb 8771 return constant_boolean_node (bitpos0 < bitpos1, type);
e015f578 8772 case LE_EXPR:
b0331ccb 8773 return constant_boolean_node (bitpos0 <= bitpos1, type);
e015f578 8774 case GE_EXPR:
b0331ccb 8775 return constant_boolean_node (bitpos0 >= bitpos1, type);
e015f578 8776 case GT_EXPR:
b0331ccb 8777 return constant_boolean_node (bitpos0 > bitpos1, type);
e015f578
RG
8778 default:;
8779 }
8780 }
8781 /* We can simplify the comparison to a comparison of the variable
8782 offset parts if the constant offset parts are equal.
8783 Be careful to use signed size type here because otherwise we
8784 mess with array offsets in the wrong way. This is possible
8785 because pointer arithmetic is restricted to retain within an
8786 object and overflow on pointer differences is undefined as of
8787 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
4c9db6e0
ILT
8788 else if (bitpos0 == bitpos1
8789 && ((code == EQ_EXPR || code == NE_EXPR)
8790 || POINTER_TYPE_OVERFLOW_UNDEFINED))
e015f578 8791 {
e015f578
RG
8792 /* By converting to signed size type we cover middle-end pointer
8793 arithmetic which operates on unsigned pointer types of size
8794 type size and ARRAY_REF offsets which are properly sign or
8795 zero extended from their type in case it is narrower than
8796 size type. */
8797 if (offset0 == NULL_TREE)
3b9e5d95 8798 offset0 = build_int_cst (ssizetype, 0);
e015f578 8799 else
3b9e5d95 8800 offset0 = fold_convert_loc (loc, ssizetype, offset0);
e015f578 8801 if (offset1 == NULL_TREE)
3b9e5d95 8802 offset1 = build_int_cst (ssizetype, 0);
e015f578 8803 else
3b9e5d95 8804 offset1 = fold_convert_loc (loc, ssizetype, offset1);
e015f578 8805
6e3c5c30
ILT
8806 if (code != EQ_EXPR
8807 && code != NE_EXPR
8808 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8809 || pointer_may_wrap_p (base1, offset1, bitpos1)))
4c9db6e0
ILT
8810 fold_overflow_warning (("assuming pointer wraparound does not "
8811 "occur when comparing P +- C1 with "
8812 "P +- C2"),
8813 WARN_STRICT_OVERFLOW_COMPARISON);
8814
db3927fb 8815 return fold_build2_loc (loc, code, type, offset0, offset1);
e015f578
RG
8816 }
8817 }
bd03c084
RG
8818 /* For non-equal bases we can simplify if they are addresses
8819 of local binding decls or constants. */
8820 else if (indirect_base0 && indirect_base1
8821 /* We know that !operand_equal_p (base0, base1, 0)
ffd837fe
RG
8822 because the if condition was false. But make
8823 sure two decls are not the same. */
8824 && base0 != base1
bd03c084
RG
8825 && TREE_CODE (arg0) == ADDR_EXPR
8826 && TREE_CODE (arg1) == ADDR_EXPR
ffd837fe
RG
8827 && (((TREE_CODE (base0) == VAR_DECL
8828 || TREE_CODE (base0) == PARM_DECL)
bd03c084
RG
8829 && (targetm.binds_local_p (base0)
8830 || CONSTANT_CLASS_P (base1)))
8831 || CONSTANT_CLASS_P (base0))
ffd837fe
RG
8832 && (((TREE_CODE (base1) == VAR_DECL
8833 || TREE_CODE (base1) == PARM_DECL)
bd03c084
RG
8834 && (targetm.binds_local_p (base1)
8835 || CONSTANT_CLASS_P (base0)))
8836 || CONSTANT_CLASS_P (base1)))
8837 {
8838 if (code == EQ_EXPR)
db3927fb
AH
8839 return omit_two_operands_loc (loc, type, boolean_false_node,
8840 arg0, arg1);
bd03c084 8841 else if (code == NE_EXPR)
db3927fb
AH
8842 return omit_two_operands_loc (loc, type, boolean_true_node,
8843 arg0, arg1);
bd03c084
RG
8844 }
8845 /* For equal offsets we can simplify to a comparison of the
8846 base addresses. */
8847 else if (bitpos0 == bitpos1
8848 && (indirect_base0
8849 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8850 && (indirect_base1
8851 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8852 && ((offset0 == offset1)
8853 || (offset0 && offset1
8854 && operand_equal_p (offset0, offset1, 0))))
8855 {
8856 if (indirect_base0)
db3927fb 8857 base0 = build_fold_addr_expr_loc (loc, base0);
bd03c084 8858 if (indirect_base1)
db3927fb
AH
8859 base1 = build_fold_addr_expr_loc (loc, base1);
8860 return fold_build2_loc (loc, code, type, base0, base1);
bd03c084 8861 }
e015f578
RG
8862 }
8863
8a1eca08
RG
8864 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8865 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8866 the resulting offset is smaller in absolute value than the
8867 original one. */
eeef0e45 8868 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8a1eca08
RG
8869 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8870 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8871 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8872 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8873 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8874 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8875 {
8876 tree const1 = TREE_OPERAND (arg0, 1);
8877 tree const2 = TREE_OPERAND (arg1, 1);
8878 tree variable1 = TREE_OPERAND (arg0, 0);
8879 tree variable2 = TREE_OPERAND (arg1, 0);
8880 tree cst;
6ac01510
ILT
8881 const char * const warnmsg = G_("assuming signed overflow does not "
8882 "occur when combining constants around "
8883 "a comparison");
8a1eca08
RG
8884
8885 /* Put the constant on the side where it doesn't overflow and is
8886 of lower absolute value than before. */
8887 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8888 ? MINUS_EXPR : PLUS_EXPR,
8889 const2, const1, 0);
8890 if (!TREE_OVERFLOW (cst)
8891 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
6ac01510
ILT
8892 {
8893 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
db3927fb 8894 return fold_build2_loc (loc, code, type,
6ac01510 8895 variable1,
db3927fb
AH
8896 fold_build2_loc (loc,
8897 TREE_CODE (arg1), TREE_TYPE (arg1),
6ac01510
ILT
8898 variable2, cst));
8899 }
8a1eca08
RG
8900
8901 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8902 ? MINUS_EXPR : PLUS_EXPR,
8903 const1, const2, 0);
8904 if (!TREE_OVERFLOW (cst)
8905 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
6ac01510
ILT
8906 {
8907 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
db3927fb
AH
8908 return fold_build2_loc (loc, code, type,
8909 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
6ac01510
ILT
8910 variable1, cst),
8911 variable2);
8912 }
8a1eca08
RG
8913 }
8914
6b074ef6
RK
8915 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8916 signed arithmetic case. That form is created by the compiler
8917 often enough for folding it to be of value. One example is in
8918 computing loop trip counts after Operator Strength Reduction. */
eeef0e45 8919 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
6b074ef6
RK
8920 && TREE_CODE (arg0) == MULT_EXPR
8921 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8922 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8923 && integer_zerop (arg1))
8924 {
8925 tree const1 = TREE_OPERAND (arg0, 1);
8926 tree const2 = arg1; /* zero */
8927 tree variable1 = TREE_OPERAND (arg0, 0);
8928 enum tree_code cmp_code = code;
8929
eb12d0ae
RG
8930 /* Handle unfolded multiplication by zero. */
8931 if (integer_zerop (const1))
8932 return fold_build2_loc (loc, cmp_code, type, const1, const2);
6b074ef6 8933
6ac01510
ILT
8934 fold_overflow_warning (("assuming signed overflow does not occur when "
8935 "eliminating multiplication in comparison "
8936 "with zero"),
8937 WARN_STRICT_OVERFLOW_COMPARISON);
8938
6b074ef6
RK
8939 /* If const1 is negative we swap the sense of the comparison. */
8940 if (tree_int_cst_sgn (const1) < 0)
8941 cmp_code = swap_tree_comparison (cmp_code);
8942
db3927fb 8943 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
6b074ef6
RK
8944 }
8945
db3927fb 8946 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
e73dbcae
RG
8947 if (tem)
8948 return tem;
8949
e26ec0bb
RS
8950 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8951 {
8952 tree targ0 = strip_float_extensions (arg0);
8953 tree targ1 = strip_float_extensions (arg1);
8954 tree newtype = TREE_TYPE (targ0);
8955
8956 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8957 newtype = TREE_TYPE (targ1);
8958
8959 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8960 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
db3927fb
AH
8961 return fold_build2_loc (loc, code, type,
8962 fold_convert_loc (loc, newtype, targ0),
8963 fold_convert_loc (loc, newtype, targ1));
e26ec0bb
RS
8964
8965 /* (-a) CMP (-b) -> b CMP a */
8966 if (TREE_CODE (arg0) == NEGATE_EXPR
8967 && TREE_CODE (arg1) == NEGATE_EXPR)
db3927fb 8968 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
e26ec0bb
RS
8969 TREE_OPERAND (arg0, 0));
8970
8971 if (TREE_CODE (arg1) == REAL_CST)
8972 {
8973 REAL_VALUE_TYPE cst;
8974 cst = TREE_REAL_CST (arg1);
8975
8976 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8977 if (TREE_CODE (arg0) == NEGATE_EXPR)
db3927fb 8978 return fold_build2_loc (loc, swap_tree_comparison (code), type,
e26ec0bb
RS
8979 TREE_OPERAND (arg0, 0),
8980 build_real (TREE_TYPE (arg1),
d49b6e1e 8981 real_value_negate (&cst)));
e26ec0bb
RS
8982
8983 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8984 /* a CMP (-0) -> a CMP 0 */
8985 if (REAL_VALUE_MINUS_ZERO (cst))
db3927fb 8986 return fold_build2_loc (loc, code, type, arg0,
e26ec0bb
RS
8987 build_real (TREE_TYPE (arg1), dconst0));
8988
8989 /* x != NaN is always true, other ops are always false. */
8990 if (REAL_VALUE_ISNAN (cst)
8991 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8992 {
8993 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
db3927fb 8994 return omit_one_operand_loc (loc, type, tem, arg0);
e26ec0bb
RS
8995 }
8996
8997 /* Fold comparisons against infinity. */
dc215785
UW
8998 if (REAL_VALUE_ISINF (cst)
8999 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
e26ec0bb 9000 {
db3927fb 9001 tem = fold_inf_compare (loc, code, type, arg0, arg1);
e26ec0bb
RS
9002 if (tem != NULL_TREE)
9003 return tem;
9004 }
9005 }
9006
9007 /* If this is a comparison of a real constant with a PLUS_EXPR
9008 or a MINUS_EXPR of a real constant, we can convert it into a
9009 comparison with a revised real constant as long as no overflow
9010 occurs when unsafe_math_optimizations are enabled. */
9011 if (flag_unsafe_math_optimizations
9012 && TREE_CODE (arg1) == REAL_CST
9013 && (TREE_CODE (arg0) == PLUS_EXPR
9014 || TREE_CODE (arg0) == MINUS_EXPR)
9015 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9016 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9017 ? MINUS_EXPR : PLUS_EXPR,
43a5d30b 9018 arg1, TREE_OPERAND (arg0, 1)))
455f14dd 9019 && !TREE_OVERFLOW (tem))
db3927fb 9020 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
e26ec0bb
RS
9021
9022 /* Likewise, we can simplify a comparison of a real constant with
9023 a MINUS_EXPR whose first operand is also a real constant, i.e.
b8698a0f 9024 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
a1a82611
RE
9025 floating-point types only if -fassociative-math is set. */
9026 if (flag_associative_math
e26ec0bb
RS
9027 && TREE_CODE (arg1) == REAL_CST
9028 && TREE_CODE (arg0) == MINUS_EXPR
9029 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9030 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
43a5d30b 9031 arg1))
455f14dd 9032 && !TREE_OVERFLOW (tem))
db3927fb 9033 return fold_build2_loc (loc, swap_tree_comparison (code), type,
e26ec0bb
RS
9034 TREE_OPERAND (arg0, 1), tem);
9035
9036 /* Fold comparisons against built-in math functions. */
9037 if (TREE_CODE (arg1) == REAL_CST
9038 && flag_unsafe_math_optimizations
9039 && ! flag_errno_math)
9040 {
9041 enum built_in_function fcode = builtin_mathfn_code (arg0);
9042
9043 if (fcode != END_BUILTINS)
9044 {
db3927fb 9045 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
e26ec0bb
RS
9046 if (tem != NULL_TREE)
9047 return tem;
9048 }
9049 }
9050 }
9051
e26ec0bb 9052 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
1043771b 9053 && CONVERT_EXPR_P (arg0))
e26ec0bb
RS
9054 {
9055 /* If we are widening one operand of an integer comparison,
9056 see if the other operand is similarly being widened. Perhaps we
9057 can do the comparison in the narrower type. */
db3927fb 9058 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
e26ec0bb
RS
9059 if (tem)
9060 return tem;
9061
9062 /* Or if we are changing signedness. */
db3927fb 9063 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
e26ec0bb
RS
9064 if (tem)
9065 return tem;
9066 }
9067
9068 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9069 constant, we can simplify it. */
9070 if (TREE_CODE (arg1) == INTEGER_CST
9071 && (TREE_CODE (arg0) == MIN_EXPR
9072 || TREE_CODE (arg0) == MAX_EXPR)
9073 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9074 {
db3927fb 9075 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
e26ec0bb
RS
9076 if (tem)
9077 return tem;
9078 }
9079
9080 /* Simplify comparison of something with itself. (For IEEE
9081 floating-point, we can only do some of these simplifications.) */
9082 if (operand_equal_p (arg0, arg1, 0))
9083 {
9084 switch (code)
9085 {
9086 case EQ_EXPR:
9087 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9088 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9089 return constant_boolean_node (1, type);
9090 break;
9091
9092 case GE_EXPR:
9093 case LE_EXPR:
9094 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9095 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9096 return constant_boolean_node (1, type);
db3927fb 9097 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
e26ec0bb
RS
9098
9099 case NE_EXPR:
9100 /* For NE, we can only do this simplification if integer
9101 or we don't honor IEEE floating point NaNs. */
9102 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9103 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9104 break;
9105 /* ... fall through ... */
9106 case GT_EXPR:
9107 case LT_EXPR:
9108 return constant_boolean_node (0, type);
9109 default:
9110 gcc_unreachable ();
9111 }
9112 }
9113
9114 /* If we are comparing an expression that just has comparisons
9115 of two integer values, arithmetic expressions of those comparisons,
9116 and constants, we can simplify it. There are only three cases
9117 to check: the two values can either be equal, the first can be
9118 greater, or the second can be greater. Fold the expression for
9119 those three values. Since each value must be 0 or 1, we have
9120 eight possibilities, each of which corresponds to the constant 0
9121 or 1 or one of the six possible comparisons.
9122
9123 This handles common cases like (a > b) == 0 but also handles
9124 expressions like ((x > y) - (y > x)) > 0, which supposedly
9125 occur in macroized code. */
9126
9127 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9128 {
9129 tree cval1 = 0, cval2 = 0;
9130 int save_p = 0;
9131
9132 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9133 /* Don't handle degenerate cases here; they should already
9134 have been handled anyway. */
9135 && cval1 != 0 && cval2 != 0
9136 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9137 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9138 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9139 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9140 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9141 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9142 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9143 {
9144 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9145 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9146
9147 /* We can't just pass T to eval_subst in case cval1 or cval2
9148 was the same as ARG1. */
9149
9150 tree high_result
db3927fb
AH
9151 = fold_build2_loc (loc, code, type,
9152 eval_subst (loc, arg0, cval1, maxval,
e26ec0bb
RS
9153 cval2, minval),
9154 arg1);
9155 tree equal_result
db3927fb
AH
9156 = fold_build2_loc (loc, code, type,
9157 eval_subst (loc, arg0, cval1, maxval,
e26ec0bb
RS
9158 cval2, maxval),
9159 arg1);
9160 tree low_result
db3927fb
AH
9161 = fold_build2_loc (loc, code, type,
9162 eval_subst (loc, arg0, cval1, minval,
e26ec0bb
RS
9163 cval2, maxval),
9164 arg1);
9165
9166 /* All three of these results should be 0 or 1. Confirm they are.
9167 Then use those values to select the proper code to use. */
9168
9169 if (TREE_CODE (high_result) == INTEGER_CST
9170 && TREE_CODE (equal_result) == INTEGER_CST
9171 && TREE_CODE (low_result) == INTEGER_CST)
9172 {
9173 /* Make a 3-bit mask with the high-order bit being the
9174 value for `>', the next for '=', and the low for '<'. */
9175 switch ((integer_onep (high_result) * 4)
9176 + (integer_onep (equal_result) * 2)
9177 + integer_onep (low_result))
9178 {
9179 case 0:
9180 /* Always false. */
db3927fb 9181 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
e26ec0bb
RS
9182 case 1:
9183 code = LT_EXPR;
9184 break;
9185 case 2:
9186 code = EQ_EXPR;
9187 break;
9188 case 3:
9189 code = LE_EXPR;
9190 break;
9191 case 4:
9192 code = GT_EXPR;
9193 break;
9194 case 5:
9195 code = NE_EXPR;
9196 break;
9197 case 6:
9198 code = GE_EXPR;
9199 break;
9200 case 7:
9201 /* Always true. */
db3927fb 9202 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
e26ec0bb
RS
9203 }
9204
9205 if (save_p)
db3927fb
AH
9206 {
9207 tem = save_expr (build2 (code, type, cval1, cval2));
9208 SET_EXPR_LOCATION (tem, loc);
9209 return tem;
9210 }
9211 return fold_build2_loc (loc, code, type, cval1, cval2);
e26ec0bb
RS
9212 }
9213 }
9214 }
9215
e26ec0bb
RS
9216 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9217 into a single range test. */
9218 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9219 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9220 && TREE_CODE (arg1) == INTEGER_CST
9221 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9222 && !integer_zerop (TREE_OPERAND (arg0, 1))
9223 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9224 && !TREE_OVERFLOW (arg1))
9225 {
db3927fb 9226 tem = fold_div_compare (loc, code, type, arg0, arg1);
e26ec0bb
RS
9227 if (tem != NULL_TREE)
9228 return tem;
9229 }
9230
c159ffe7
RS
9231 /* Fold ~X op ~Y as Y op X. */
9232 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9233 && TREE_CODE (arg1) == BIT_NOT_EXPR)
270d43bf
RS
9234 {
9235 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
db3927fb
AH
9236 return fold_build2_loc (loc, code, type,
9237 fold_convert_loc (loc, cmp_type,
9238 TREE_OPERAND (arg1, 0)),
270d43bf
RS
9239 TREE_OPERAND (arg0, 0));
9240 }
c159ffe7
RS
9241
9242 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9243 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9244 && TREE_CODE (arg1) == INTEGER_CST)
270d43bf
RS
9245 {
9246 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
db3927fb 9247 return fold_build2_loc (loc, swap_tree_comparison (code), type,
270d43bf 9248 TREE_OPERAND (arg0, 0),
db3927fb
AH
9249 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9250 fold_convert_loc (loc, cmp_type, arg1)));
270d43bf 9251 }
c159ffe7 9252
e26ec0bb
RS
9253 return NULL_TREE;
9254}
9255
99b25753
RS
9256
9257/* Subroutine of fold_binary. Optimize complex multiplications of the
9258 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9259 argument EXPR represents the expression "z" of type TYPE. */
9260
9261static tree
db3927fb 9262fold_mult_zconjz (location_t loc, tree type, tree expr)
99b25753
RS
9263{
9264 tree itype = TREE_TYPE (type);
9265 tree rpart, ipart, tem;
9266
9267 if (TREE_CODE (expr) == COMPLEX_EXPR)
9268 {
9269 rpart = TREE_OPERAND (expr, 0);
9270 ipart = TREE_OPERAND (expr, 1);
9271 }
9272 else if (TREE_CODE (expr) == COMPLEX_CST)
9273 {
9274 rpart = TREE_REALPART (expr);
9275 ipart = TREE_IMAGPART (expr);
9276 }
9277 else
9278 {
9279 expr = save_expr (expr);
db3927fb
AH
9280 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9281 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
99b25753
RS
9282 }
9283
9284 rpart = save_expr (rpart);
9285 ipart = save_expr (ipart);
db3927fb
AH
9286 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9287 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9288 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9289 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9290 fold_convert_loc (loc, itype, integer_zero_node));
99b25753
RS
9291}
9292
9293
e5901cad
OW
9294/* Subroutine of fold_binary. If P is the value of EXPR, computes
9295 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9296 guarantees that P and N have the same least significant log2(M) bits.
9297 N is not otherwise constrained. In particular, N is not normalized to
9298 0 <= N < M as is common. In general, the precise value of P is unknown.
9299 M is chosen as large as possible such that constant N can be determined.
9300
617f3897
MJ
9301 Returns M and sets *RESIDUE to N.
9302
9303 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9304 account. This is not always possible due to PR 35705.
9305 */
e5901cad
OW
9306
9307static unsigned HOST_WIDE_INT
617f3897
MJ
9308get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9309 bool allow_func_align)
e5901cad
OW
9310{
9311 enum tree_code code;
9312
9313 *residue = 0;
9314
9315 code = TREE_CODE (expr);
9316 if (code == ADDR_EXPR)
9317 {
9318 expr = TREE_OPERAND (expr, 0);
9319 if (handled_component_p (expr))
9320 {
9321 HOST_WIDE_INT bitsize, bitpos;
9322 tree offset;
9323 enum machine_mode mode;
9324 int unsignedp, volatilep;
9325
9326 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9327 &mode, &unsignedp, &volatilep, false);
9328 *residue = bitpos / BITS_PER_UNIT;
9329 if (offset)
9330 {
9331 if (TREE_CODE (offset) == INTEGER_CST)
9332 *residue += TREE_INT_CST_LOW (offset);
9333 else
9334 /* We don't handle more complicated offset expressions. */
9335 return 1;
9336 }
9337 }
9338
617f3897
MJ
9339 if (DECL_P (expr)
9340 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
e5901cad
OW
9341 return DECL_ALIGN_UNIT (expr);
9342 }
9343 else if (code == POINTER_PLUS_EXPR)
9344 {
9345 tree op0, op1;
9346 unsigned HOST_WIDE_INT modulus;
9347 enum tree_code inner_code;
b8698a0f 9348
e5901cad
OW
9349 op0 = TREE_OPERAND (expr, 0);
9350 STRIP_NOPS (op0);
617f3897
MJ
9351 modulus = get_pointer_modulus_and_residue (op0, residue,
9352 allow_func_align);
e5901cad
OW
9353
9354 op1 = TREE_OPERAND (expr, 1);
9355 STRIP_NOPS (op1);
9356 inner_code = TREE_CODE (op1);
9357 if (inner_code == INTEGER_CST)
9358 {
9359 *residue += TREE_INT_CST_LOW (op1);
9360 return modulus;
9361 }
9362 else if (inner_code == MULT_EXPR)
9363 {
9364 op1 = TREE_OPERAND (op1, 1);
9365 if (TREE_CODE (op1) == INTEGER_CST)
9366 {
9367 unsigned HOST_WIDE_INT align;
b8698a0f 9368
e5901cad
OW
9369 /* Compute the greatest power-of-2 divisor of op1. */
9370 align = TREE_INT_CST_LOW (op1);
9371 align &= -align;
9372
9373 /* If align is non-zero and less than *modulus, replace
9374 *modulus with align., If align is 0, then either op1 is 0
9375 or the greatest power-of-2 divisor of op1 doesn't fit in an
9376 unsigned HOST_WIDE_INT. In either case, no additional
9377 constraint is imposed. */
9378 if (align)
9379 modulus = MIN (modulus, align);
9380
9381 return modulus;
9382 }
9383 }
9384 }
9385
9386 /* If we get here, we were unable to determine anything useful about the
9387 expression. */
9388 return 1;
9389}
9390
9391
7107fa7c 9392/* Fold a binary expression of code CODE and type TYPE with operands
db3927fb
AH
9393 OP0 and OP1. LOC is the location of the resulting expression.
9394 Return the folded expression if folding is successful. Otherwise,
9395 return NULL_TREE. */
0aee4751 9396
721425b6 9397tree
db3927fb
AH
9398fold_binary_loc (location_t loc,
9399 enum tree_code code, tree type, tree op0, tree op1)
0aee4751 9400{
0aee4751 9401 enum tree_code_class kind = TREE_CODE_CLASS (code);
e26ec0bb
RS
9402 tree arg0, arg1, tem;
9403 tree t1 = NULL_TREE;
6ac01510 9404 bool strict_overflow_p;
0aee4751 9405
726a989a 9406 gcc_assert (IS_EXPR_CODE_CLASS (kind)
fd6c76f4
RS
9407 && TREE_CODE_LENGTH (code) == 2
9408 && op0 != NULL_TREE
9409 && op1 != NULL_TREE);
0aee4751 9410
fbaa905c
KH
9411 arg0 = op0;
9412 arg1 = op1;
1eaea409 9413
fd6c76f4
RS
9414 /* Strip any conversions that don't change the mode. This is
9415 safe for every expression, except for a comparison expression
9416 because its signedness is derived from its operands. So, in
9417 the latter case, only strip conversions that don't change the
f61edbf6
JJ
9418 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9419 preserved.
0aee4751 9420
fd6c76f4
RS
9421 Note that this is done as an internal manipulation within the
9422 constant folder, in order to find the simplest representation
9423 of the arguments so that their form can be studied. In any
9424 cases, the appropriate type conversions should be put back in
9425 the tree that will get out of the constant folder. */
0aee4751 9426
f61edbf6 9427 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
fd6c76f4
RS
9428 {
9429 STRIP_SIGN_NOPS (arg0);
9430 STRIP_SIGN_NOPS (arg1);
1eaea409 9431 }
fd6c76f4 9432 else
1eaea409 9433 {
fd6c76f4
RS
9434 STRIP_NOPS (arg0);
9435 STRIP_NOPS (arg1);
9436 }
0aee4751 9437
fd6c76f4
RS
9438 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9439 constant but we can't do arithmetic on them. */
9440 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9441 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
325217ed
CF
9442 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9443 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
fd6c76f4
RS
9444 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9445 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9446 {
9447 if (kind == tcc_binary)
325217ed
CF
9448 {
9449 /* Make sure type and arg0 have the same saturating flag. */
9450 gcc_assert (TYPE_SATURATING (type)
9451 == TYPE_SATURATING (TREE_TYPE (arg0)));
43a5d30b 9452 tem = const_binop (code, arg0, arg1);
325217ed 9453 }
fd6c76f4
RS
9454 else if (kind == tcc_comparison)
9455 tem = fold_relational_const (code, type, arg0, arg1);
1eaea409 9456 else
fd6c76f4 9457 tem = NULL_TREE;
1eaea409 9458
fd6c76f4
RS
9459 if (tem != NULL_TREE)
9460 {
9461 if (TREE_TYPE (tem) != type)
db3927fb 9462 tem = fold_convert_loc (loc, type, tem);
fd6c76f4
RS
9463 return tem;
9464 }
0aee4751
KH
9465 }
9466
9467 /* If this is a commutative operation, and ARG0 is a constant, move it
9468 to ARG1 to reduce the number of tests below. */
9469 if (commutative_tree_code (code)
9470 && tree_swap_operands_p (arg0, arg1, true))
db3927fb 9471 return fold_build2_loc (loc, code, type, op1, op0);
0aee4751 9472
fd6c76f4 9473 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
0aee4751
KH
9474
9475 First check for cases where an arithmetic operation is applied to a
9476 compound, conditional, or comparison operation. Push the arithmetic
9477 operation inside the compound or conditional to see if any folding
9478 can then be done. Convert comparison to conditional for this purpose.
9479 The also optimizes non-constant cases that used to be done in
9480 expand_expr.
9481
9482 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9483 one of the operands is a comparison and the other is a comparison, a
9484 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9485 code below would make the expression more complex. Change it to a
9486 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9487 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9488
9489 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9490 || code == EQ_EXPR || code == NE_EXPR)
9491 && ((truth_value_p (TREE_CODE (arg0))
9492 && (truth_value_p (TREE_CODE (arg1))
9493 || (TREE_CODE (arg1) == BIT_AND_EXPR
9494 && integer_onep (TREE_OPERAND (arg1, 1)))))
9495 || (truth_value_p (TREE_CODE (arg1))
9496 && (truth_value_p (TREE_CODE (arg0))
9497 || (TREE_CODE (arg0) == BIT_AND_EXPR
9498 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9499 {
db3927fb 9500 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7f20a5b7
KH
9501 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9502 : TRUTH_XOR_EXPR,
9503 boolean_type_node,
db3927fb
AH
9504 fold_convert_loc (loc, boolean_type_node, arg0),
9505 fold_convert_loc (loc, boolean_type_node, arg1));
0aee4751
KH
9506
9507 if (code == EQ_EXPR)
db3927fb 9508 tem = invert_truthvalue_loc (loc, tem);
0aee4751 9509
db3927fb 9510 return fold_convert_loc (loc, type, tem);
0aee4751
KH
9511 }
9512
4c17e288
RG
9513 if (TREE_CODE_CLASS (code) == tcc_binary
9514 || TREE_CODE_CLASS (code) == tcc_comparison)
0aee4751
KH
9515 {
9516 if (TREE_CODE (arg0) == COMPOUND_EXPR)
db3927fb
AH
9517 {
9518 tem = fold_build2_loc (loc, code, type,
9519 fold_convert_loc (loc, TREE_TYPE (op0),
9520 TREE_OPERAND (arg0, 1)), op1);
db3927fb
AH
9521 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem);
9522 goto fold_binary_exit;
9523 }
0aee4751
KH
9524 if (TREE_CODE (arg1) == COMPOUND_EXPR
9525 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
db3927fb
AH
9526 {
9527 tem = fold_build2_loc (loc, code, type, op0,
9528 fold_convert_loc (loc, TREE_TYPE (op1),
9529 TREE_OPERAND (arg1, 1)));
db3927fb
AH
9530 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem);
9531 goto fold_binary_exit;
9532 }
0aee4751
KH
9533
9534 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9535 {
db3927fb 9536 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
b8698a0f 9537 arg0, arg1,
0aee4751
KH
9538 /*cond_first_p=*/1);
9539 if (tem != NULL_TREE)
9540 return tem;
9541 }
9542
9543 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9544 {
db3927fb 9545 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
b8698a0f 9546 arg1, arg0,
0aee4751
KH
9547 /*cond_first_p=*/0);
9548 if (tem != NULL_TREE)
9549 return tem;
9550 }
9551 }
9552
9553 switch (code)
9554 {
70f34814
RG
9555 case MEM_REF:
9556 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9557 if (TREE_CODE (arg0) == ADDR_EXPR
9558 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9559 {
9560 tree iref = TREE_OPERAND (arg0, 0);
9561 return fold_build2 (MEM_REF, type,
9562 TREE_OPERAND (iref, 0),
9563 int_const_binop (PLUS_EXPR, arg1,
9564 TREE_OPERAND (iref, 1), 0));
9565 }
9566
9567 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9568 if (TREE_CODE (arg0) == ADDR_EXPR
9569 && handled_component_p (TREE_OPERAND (arg0, 0)))
9570 {
9571 tree base;
9572 HOST_WIDE_INT coffset;
9573 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9574 &coffset);
9575 if (!base)
9576 return NULL_TREE;
9577 return fold_build2 (MEM_REF, type,
9578 build_fold_addr_expr (base),
9579 int_const_binop (PLUS_EXPR, arg1,
9580 size_int (coffset), 0));
9581 }
9582
9583 return NULL_TREE;
9584
5be014d5
AP
9585 case POINTER_PLUS_EXPR:
9586 /* 0 +p index -> (type)index */
9587 if (integer_zerop (arg0))
db3927fb 9588 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5be014d5
AP
9589
9590 /* PTR +p 0 -> PTR */
9591 if (integer_zerop (arg1))
db3927fb 9592 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
5be014d5
AP
9593
9594 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9595 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9596 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
db3927fb
AH
9597 return fold_convert_loc (loc, type,
9598 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9599 fold_convert_loc (loc, sizetype,
9600 arg1),
9601 fold_convert_loc (loc, sizetype,
9602 arg0)));
5be014d5 9603
f7d1e0c6
RG
9604 /* index +p PTR -> PTR +p index */
9605 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9606 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
db3927fb
AH
9607 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
9608 fold_convert_loc (loc, type, arg1),
9609 fold_convert_loc (loc, sizetype, arg0));
f7d1e0c6 9610
5be014d5
AP
9611 /* (PTR +p B) +p A -> PTR +p (B + A) */
9612 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9613 {
9614 tree inner;
db3927fb 9615 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
5be014d5 9616 tree arg00 = TREE_OPERAND (arg0, 0);
db3927fb
AH
9617 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9618 arg01, fold_convert_loc (loc, sizetype, arg1));
9619 return fold_convert_loc (loc, type,
9620 fold_build2_loc (loc, POINTER_PLUS_EXPR,
9621 TREE_TYPE (arg00),
9622 arg00, inner));
5be014d5
AP
9623 }
9624
9625 /* PTR_CST +p CST -> CST1 */
9626 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
db3927fb
AH
9627 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9628 fold_convert_loc (loc, type, arg1));
5be014d5
AP
9629
9630 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9631 of the array. Loop optimizer sometimes produce this type of
9632 expressions. */
9633 if (TREE_CODE (arg0) == ADDR_EXPR)
9634 {
db3927fb
AH
9635 tem = try_move_mult_to_index (loc, arg0,
9636 fold_convert_loc (loc, sizetype, arg1));
5be014d5 9637 if (tem)
db3927fb 9638 return fold_convert_loc (loc, type, tem);
5be014d5
AP
9639 }
9640
9641 return NULL_TREE;
8015455a 9642
0aee4751
KH
9643 case PLUS_EXPR:
9644 /* A + (-B) -> A - B */
9645 if (TREE_CODE (arg1) == NEGATE_EXPR)
db3927fb
AH
9646 return fold_build2_loc (loc, MINUS_EXPR, type,
9647 fold_convert_loc (loc, type, arg0),
9648 fold_convert_loc (loc, type,
9649 TREE_OPERAND (arg1, 0)));
0aee4751
KH
9650 /* (-A) + B -> B - A */
9651 if (TREE_CODE (arg0) == NEGATE_EXPR
9652 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
db3927fb
AH
9653 return fold_build2_loc (loc, MINUS_EXPR, type,
9654 fold_convert_loc (loc, type, arg1),
9655 fold_convert_loc (loc, type,
9656 TREE_OPERAND (arg0, 0)));
0ed9a3e3 9657
c22f6d33 9658 if (INTEGRAL_TYPE_P (type))
0aee4751 9659 {
c22f6d33
UB
9660 /* Convert ~A + 1 to -A. */
9661 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9662 && integer_onep (arg1))
db3927fb
AH
9663 return fold_build1_loc (loc, NEGATE_EXPR, type,
9664 fold_convert_loc (loc, type,
9665 TREE_OPERAND (arg0, 0)));
0aee4751 9666
870aa1eb
RS
9667 /* ~X + X is -1. */
9668 if (TREE_CODE (arg0) == BIT_NOT_EXPR
eeef0e45 9669 && !TYPE_OVERFLOW_TRAPS (type))
870aa1eb 9670 {
a49c5793
SP
9671 tree tem = TREE_OPERAND (arg0, 0);
9672
9673 STRIP_NOPS (tem);
9674 if (operand_equal_p (tem, arg1, 0))
9675 {
9676 t1 = build_int_cst_type (type, -1);
db3927fb 9677 return omit_one_operand_loc (loc, type, t1, arg1);
a49c5793 9678 }
870aa1eb
RS
9679 }
9680
9681 /* X + ~X is -1. */
9682 if (TREE_CODE (arg1) == BIT_NOT_EXPR
eeef0e45 9683 && !TYPE_OVERFLOW_TRAPS (type))
870aa1eb 9684 {
a49c5793
SP
9685 tree tem = TREE_OPERAND (arg1, 0);
9686
9687 STRIP_NOPS (tem);
9688 if (operand_equal_p (arg0, tem, 0))
9689 {
9690 t1 = build_int_cst_type (type, -1);
db3927fb 9691 return omit_one_operand_loc (loc, type, t1, arg0);
a49c5793
SP
9692 }
9693 }
65648dd4
RG
9694
9695 /* X + (X / CST) * -CST is X % CST. */
9696 if (TREE_CODE (arg1) == MULT_EXPR
9697 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9698 && operand_equal_p (arg0,
9699 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9700 {
9701 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9702 tree cst1 = TREE_OPERAND (arg1, 1);
db3927fb
AH
9703 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9704 cst1, cst0);
65648dd4 9705 if (sum && integer_zerop (sum))
db3927fb
AH
9706 return fold_convert_loc (loc, type,
9707 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9708 TREE_TYPE (arg0), arg0,
9709 cst0));
65648dd4 9710 }
c22f6d33
UB
9711 }
9712
9713 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
a1a82611
RE
9714 same or one. Make sure type is not saturating.
9715 fold_plusminus_mult_expr will re-associate. */
c22f6d33
UB
9716 if ((TREE_CODE (arg0) == MULT_EXPR
9717 || TREE_CODE (arg1) == MULT_EXPR)
325217ed 9718 && !TYPE_SATURATING (type)
a1a82611 9719 && (!FLOAT_TYPE_P (type) || flag_associative_math))
c22f6d33 9720 {
db3927fb 9721 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
c22f6d33
UB
9722 if (tem)
9723 return tem;
9724 }
9725
9726 if (! FLOAT_TYPE_P (type))
9727 {
9728 if (integer_zerop (arg1))
db3927fb 9729 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
870aa1eb 9730
0aee4751
KH
9731 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9732 with a constant, and the two constants have no bits in common,
9733 we should treat this as a BIT_IOR_EXPR since this may produce more
9734 simplifications. */
9735 if (TREE_CODE (arg0) == BIT_AND_EXPR
9736 && TREE_CODE (arg1) == BIT_AND_EXPR
9737 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9738 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9739 && integer_zerop (const_binop (BIT_AND_EXPR,
9740 TREE_OPERAND (arg0, 1),
43a5d30b 9741 TREE_OPERAND (arg1, 1))))
0aee4751
KH
9742 {
9743 code = BIT_IOR_EXPR;
9744 goto bit_ior;
9745 }
9746
9747 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9748 (plus (plus (mult) (mult)) (foo)) so that we can
9749 take advantage of the factoring cases below. */
9750 if (((TREE_CODE (arg0) == PLUS_EXPR
9751 || TREE_CODE (arg0) == MINUS_EXPR)
9752 && TREE_CODE (arg1) == MULT_EXPR)
9753 || ((TREE_CODE (arg1) == PLUS_EXPR
9754 || TREE_CODE (arg1) == MINUS_EXPR)
9755 && TREE_CODE (arg0) == MULT_EXPR))
9756 {
9757 tree parg0, parg1, parg, marg;
9758 enum tree_code pcode;
9759
9760 if (TREE_CODE (arg1) == MULT_EXPR)
9761 parg = arg0, marg = arg1;
9762 else
9763 parg = arg1, marg = arg0;
9764 pcode = TREE_CODE (parg);
9765 parg0 = TREE_OPERAND (parg, 0);
9766 parg1 = TREE_OPERAND (parg, 1);
9767 STRIP_NOPS (parg0);
9768 STRIP_NOPS (parg1);
9769
9770 if (TREE_CODE (parg0) == MULT_EXPR
9771 && TREE_CODE (parg1) != MULT_EXPR)
db3927fb
AH
9772 return fold_build2_loc (loc, pcode, type,
9773 fold_build2_loc (loc, PLUS_EXPR, type,
9774 fold_convert_loc (loc, type,
9775 parg0),
9776 fold_convert_loc (loc, type,
9777 marg)),
9778 fold_convert_loc (loc, type, parg1));
0aee4751
KH
9779 if (TREE_CODE (parg0) != MULT_EXPR
9780 && TREE_CODE (parg1) == MULT_EXPR)
db3927fb
AH
9781 return
9782 fold_build2_loc (loc, PLUS_EXPR, type,
9783 fold_convert_loc (loc, type, parg0),
9784 fold_build2_loc (loc, pcode, type,
9785 fold_convert_loc (loc, type, marg),
9786 fold_convert_loc (loc, type,
9787 parg1)));
0aee4751 9788 }
0aee4751
KH
9789 }
9790 else
9791 {
9792 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9793 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
db3927fb 9794 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751
KH
9795
9796 /* Likewise if the operands are reversed. */
9797 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
db3927fb 9798 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
0aee4751
KH
9799
9800 /* Convert X + -C into X - C. */
9801 if (TREE_CODE (arg1) == REAL_CST
9802 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9803 {
9804 tem = fold_negate_const (arg1, type);
9805 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
db3927fb
AH
9806 return fold_build2_loc (loc, MINUS_EXPR, type,
9807 fold_convert_loc (loc, type, arg0),
9808 fold_convert_loc (loc, type, tem));
0aee4751
KH
9809 }
9810
9f539671
RG
9811 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9812 to __complex__ ( x, y ). This is not the same for SNaNs or
d1ad84c2 9813 if signed zeros are involved. */
9f539671
RG
9814 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9815 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9816 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9817 {
9818 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
db3927fb
AH
9819 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9820 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9f539671
RG
9821 bool arg0rz = false, arg0iz = false;
9822 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9823 || (arg0i && (arg0iz = real_zerop (arg0i))))
9824 {
db3927fb
AH
9825 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9826 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9f539671
RG
9827 if (arg0rz && arg1i && real_zerop (arg1i))
9828 {
9829 tree rp = arg1r ? arg1r
9830 : build1 (REALPART_EXPR, rtype, arg1);
9831 tree ip = arg0i ? arg0i
9832 : build1 (IMAGPART_EXPR, rtype, arg0);
db3927fb 9833 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9f539671
RG
9834 }
9835 else if (arg0iz && arg1r && real_zerop (arg1r))
9836 {
9837 tree rp = arg0r ? arg0r
9838 : build1 (REALPART_EXPR, rtype, arg0);
9839 tree ip = arg1i ? arg1i
9840 : build1 (IMAGPART_EXPR, rtype, arg1);
db3927fb 9841 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9f539671
RG
9842 }
9843 }
9844 }
9845
e0dd989a 9846 if (flag_unsafe_math_optimizations
f8912a55
PB
9847 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9848 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
db3927fb 9849 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
f8912a55
PB
9850 return tem;
9851
0aee4751
KH
9852 /* Convert x+x into x*2.0. */
9853 if (operand_equal_p (arg0, arg1, 0)
9854 && SCALAR_FLOAT_TYPE_P (type))
db3927fb 9855 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
7f20a5b7 9856 build_real (type, dconst2));
0aee4751 9857
b8698a0f 9858 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
a1a82611
RE
9859 We associate floats only if the user has specified
9860 -fassociative-math. */
9861 if (flag_associative_math
0aee4751
KH
9862 && TREE_CODE (arg1) == PLUS_EXPR
9863 && TREE_CODE (arg0) != MULT_EXPR)
9864 {
9865 tree tree10 = TREE_OPERAND (arg1, 0);
9866 tree tree11 = TREE_OPERAND (arg1, 1);
9867 if (TREE_CODE (tree11) == MULT_EXPR
9868 && TREE_CODE (tree10) == MULT_EXPR)
9869 {
9870 tree tree0;
db3927fb
AH
9871 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9872 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
0aee4751
KH
9873 }
9874 }
b8698a0f 9875 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
a1a82611
RE
9876 We associate floats only if the user has specified
9877 -fassociative-math. */
9878 if (flag_associative_math
0aee4751
KH
9879 && TREE_CODE (arg0) == PLUS_EXPR
9880 && TREE_CODE (arg1) != MULT_EXPR)
9881 {
9882 tree tree00 = TREE_OPERAND (arg0, 0);
9883 tree tree01 = TREE_OPERAND (arg0, 1);
9884 if (TREE_CODE (tree01) == MULT_EXPR
9885 && TREE_CODE (tree00) == MULT_EXPR)
9886 {
9887 tree tree0;
db3927fb
AH
9888 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9889 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
0aee4751
KH
9890 }
9891 }
9892 }
9893
9894 bit_rotate:
9895 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9896 is a rotate of A by C1 bits. */
9897 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9898 is a rotate of A by B bits. */
9899 {
9900 enum tree_code code0, code1;
70582b3a 9901 tree rtype;
0aee4751
KH
9902 code0 = TREE_CODE (arg0);
9903 code1 = TREE_CODE (arg1);
9904 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9905 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9906 && operand_equal_p (TREE_OPERAND (arg0, 0),
9907 TREE_OPERAND (arg1, 0), 0)
70582b3a
RG
9908 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9909 TYPE_UNSIGNED (rtype))
9910 /* Only create rotates in complete modes. Other cases are not
9911 expanded properly. */
9912 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
0aee4751
KH
9913 {
9914 tree tree01, tree11;
9915 enum tree_code code01, code11;
9916
9917 tree01 = TREE_OPERAND (arg0, 1);
9918 tree11 = TREE_OPERAND (arg1, 1);
9919 STRIP_NOPS (tree01);
9920 STRIP_NOPS (tree11);
9921 code01 = TREE_CODE (tree01);
9922 code11 = TREE_CODE (tree11);
9923 if (code01 == INTEGER_CST
9924 && code11 == INTEGER_CST
9925 && TREE_INT_CST_HIGH (tree01) == 0
9926 && TREE_INT_CST_HIGH (tree11) == 0
9927 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9928 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
db3927fb
AH
9929 {
9930 tem = build2 (LROTATE_EXPR,
9931 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9932 TREE_OPERAND (arg0, 0),
9933 code0 == LSHIFT_EXPR
9934 ? tree01 : tree11);
9935 SET_EXPR_LOCATION (tem, loc);
9936 return fold_convert_loc (loc, type, tem);
9937 }
0aee4751
KH
9938 else if (code11 == MINUS_EXPR)
9939 {
9940 tree tree110, tree111;
9941 tree110 = TREE_OPERAND (tree11, 0);
9942 tree111 = TREE_OPERAND (tree11, 1);
9943 STRIP_NOPS (tree110);
9944 STRIP_NOPS (tree111);
9945 if (TREE_CODE (tree110) == INTEGER_CST
9946 && 0 == compare_tree_int (tree110,
9947 TYPE_PRECISION
9948 (TREE_TYPE (TREE_OPERAND
9949 (arg0, 0))))
9950 && operand_equal_p (tree01, tree111, 0))
db3927fb
AH
9951 return
9952 fold_convert_loc (loc, type,
9953 build2 ((code0 == LSHIFT_EXPR
9954 ? LROTATE_EXPR
9955 : RROTATE_EXPR),
9956 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9957 TREE_OPERAND (arg0, 0), tree01));
0aee4751
KH
9958 }
9959 else if (code01 == MINUS_EXPR)
9960 {
9961 tree tree010, tree011;
9962 tree010 = TREE_OPERAND (tree01, 0);
9963 tree011 = TREE_OPERAND (tree01, 1);
9964 STRIP_NOPS (tree010);
9965 STRIP_NOPS (tree011);
9966 if (TREE_CODE (tree010) == INTEGER_CST
9967 && 0 == compare_tree_int (tree010,
9968 TYPE_PRECISION
9969 (TREE_TYPE (TREE_OPERAND
9970 (arg0, 0))))
9971 && operand_equal_p (tree11, tree011, 0))
db3927fb
AH
9972 return fold_convert_loc
9973 (loc, type,
9974 build2 ((code0 != LSHIFT_EXPR
9975 ? LROTATE_EXPR
9976 : RROTATE_EXPR),
9977 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9978 TREE_OPERAND (arg0, 0), tree11));
0aee4751
KH
9979 }
9980 }
9981 }
9982
9983 associate:
9984 /* In most languages, can't associate operations on floats through
9985 parentheses. Rather than remember where the parentheses were, we
9986 don't associate floats at all, unless the user has specified
a1a82611 9987 -fassociative-math.
325217ed 9988 And, we need to make sure type is not saturating. */
0aee4751 9989
a1a82611 9990 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
325217ed 9991 && !TYPE_SATURATING (type))
0aee4751
KH
9992 {
9993 tree var0, con0, lit0, minus_lit0;
9994 tree var1, con1, lit1, minus_lit1;
a6d5f37c 9995 bool ok = true;
0aee4751
KH
9996
9997 /* Split both trees into variables, constants, and literals. Then
9998 associate each group together, the constants with literals,
9999 then the result with variables. This increases the chances of
10000 literals being recombined later and of generating relocatable
10001 expressions for the sum of a constant and literal. */
10002 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10003 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10004 code == MINUS_EXPR);
10005
9e9ef331
EB
10006 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10007 if (code == MINUS_EXPR)
10008 code = PLUS_EXPR;
10009
10010 /* With undefined overflow we can only associate constants with one
10011 variable, and constants whose association doesn't overflow. */
10012 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10013 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
a6d5f37c 10014 {
9e9ef331
EB
10015 if (var0 && var1)
10016 {
10017 tree tmp0 = var0;
10018 tree tmp1 = var1;
10019
10020 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10021 tmp0 = TREE_OPERAND (tmp0, 0);
10022 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10023 tmp1 = TREE_OPERAND (tmp1, 0);
10024 /* The only case we can still associate with two variables
10025 is if they are the same, modulo negation. */
10026 if (!operand_equal_p (tmp0, tmp1, 0))
10027 ok = false;
10028 }
10029
10030 if (ok && lit0 && lit1)
10031 {
10032 tree tmp0 = fold_convert (type, lit0);
10033 tree tmp1 = fold_convert (type, lit1);
10034
10035 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10036 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10037 ok = false;
10038 }
a6d5f37c
RG
10039 }
10040
0aee4751
KH
10041 /* Only do something if we found more than two objects. Otherwise,
10042 nothing has changed and we risk infinite recursion. */
a6d5f37c
RG
10043 if (ok
10044 && (2 < ((var0 != 0) + (var1 != 0)
10045 + (con0 != 0) + (con1 != 0)
10046 + (lit0 != 0) + (lit1 != 0)
10047 + (minus_lit0 != 0) + (minus_lit1 != 0))))
0aee4751 10048 {
db3927fb
AH
10049 var0 = associate_trees (loc, var0, var1, code, type);
10050 con0 = associate_trees (loc, con0, con1, code, type);
10051 lit0 = associate_trees (loc, lit0, lit1, code, type);
10052 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
0aee4751
KH
10053
10054 /* Preserve the MINUS_EXPR if the negative part of the literal is
10055 greater than the positive part. Otherwise, the multiplicative
10056 folding code (i.e extract_muldiv) may be fooled in case
10057 unsigned constants are subtracted, like in the following
10058 example: ((X*2 + 4) - 8U)/2. */
10059 if (minus_lit0 && lit0)
10060 {
10061 if (TREE_CODE (lit0) == INTEGER_CST
10062 && TREE_CODE (minus_lit0) == INTEGER_CST
10063 && tree_int_cst_lt (lit0, minus_lit0))
10064 {
db3927fb 10065 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
0aee4751
KH
10066 MINUS_EXPR, type);
10067 lit0 = 0;
10068 }
10069 else
10070 {
db3927fb 10071 lit0 = associate_trees (loc, lit0, minus_lit0,
0aee4751
KH
10072 MINUS_EXPR, type);
10073 minus_lit0 = 0;
10074 }
10075 }
10076 if (minus_lit0)
10077 {
10078 if (con0 == 0)
db3927fb
AH
10079 return
10080 fold_convert_loc (loc, type,
10081 associate_trees (loc, var0, minus_lit0,
10082 MINUS_EXPR, type));
0aee4751
KH
10083 else
10084 {
db3927fb 10085 con0 = associate_trees (loc, con0, minus_lit0,
0aee4751 10086 MINUS_EXPR, type);
db3927fb
AH
10087 return
10088 fold_convert_loc (loc, type,
10089 associate_trees (loc, var0, con0,
10090 PLUS_EXPR, type));
0aee4751
KH
10091 }
10092 }
10093
db3927fb
AH
10094 con0 = associate_trees (loc, con0, lit0, code, type);
10095 return
10096 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10097 code, type));
0aee4751
KH
10098 }
10099 }
10100
62ab45cc 10101 return NULL_TREE;
0aee4751
KH
10102
10103 case MINUS_EXPR:
5be014d5
AP
10104 /* Pointer simplifications for subtraction, simple reassociations. */
10105 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10106 {
10107 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10108 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10109 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10110 {
db3927fb
AH
10111 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10112 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10113 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10114 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10115 return fold_build2_loc (loc, PLUS_EXPR, type,
10116 fold_build2_loc (loc, MINUS_EXPR, type,
10117 arg00, arg10),
10118 fold_build2_loc (loc, MINUS_EXPR, type,
10119 arg01, arg11));
5be014d5
AP
10120 }
10121 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10122 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10123 {
db3927fb
AH
10124 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10125 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10126 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10127 fold_convert_loc (loc, type, arg1));
5be014d5 10128 if (tmp)
db3927fb 10129 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
5be014d5
AP
10130 }
10131 }
0aee4751
KH
10132 /* A - (-B) -> A + B */
10133 if (TREE_CODE (arg1) == NEGATE_EXPR)
db3927fb
AH
10134 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10135 fold_convert_loc (loc, type,
10136 TREE_OPERAND (arg1, 0)));
0aee4751
KH
10137 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10138 if (TREE_CODE (arg0) == NEGATE_EXPR
10139 && (FLOAT_TYPE_P (type)
b0cd88d2 10140 || INTEGRAL_TYPE_P (type))
0aee4751
KH
10141 && negate_expr_p (arg1)
10142 && reorder_operands_p (arg0, arg1))
db3927fb
AH
10143 return fold_build2_loc (loc, MINUS_EXPR, type,
10144 fold_convert_loc (loc, type,
10145 negate_expr (arg1)),
10146 fold_convert_loc (loc, type,
10147 TREE_OPERAND (arg0, 0)));
cbefb99c
JL
10148 /* Convert -A - 1 to ~A. */
10149 if (INTEGRAL_TYPE_P (type)
10150 && TREE_CODE (arg0) == NEGATE_EXPR
870aa1eb 10151 && integer_onep (arg1)
eeef0e45 10152 && !TYPE_OVERFLOW_TRAPS (type))
db3927fb
AH
10153 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10154 fold_convert_loc (loc, type,
10155 TREE_OPERAND (arg0, 0)));
cbefb99c
JL
10156
10157 /* Convert -1 - A to ~A. */
10158 if (INTEGRAL_TYPE_P (type)
10159 && integer_all_onesp (arg0))
db3927fb 10160 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
0aee4751 10161
65648dd4
RG
10162
10163 /* X - (X / CST) * CST is X % CST. */
10164 if (INTEGRAL_TYPE_P (type)
10165 && TREE_CODE (arg1) == MULT_EXPR
10166 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10167 && operand_equal_p (arg0,
10168 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10169 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10170 TREE_OPERAND (arg1, 1), 0))
db3927fb
AH
10171 return
10172 fold_convert_loc (loc, type,
10173 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10174 arg0, TREE_OPERAND (arg1, 1)));
65648dd4 10175
0aee4751
KH
10176 if (! FLOAT_TYPE_P (type))
10177 {
fd6c76f4 10178 if (integer_zerop (arg0))
db3927fb 10179 return negate_expr (fold_convert_loc (loc, type, arg1));
0aee4751 10180 if (integer_zerop (arg1))
db3927fb 10181 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751
KH
10182
10183 /* Fold A - (A & B) into ~B & A. */
10184 if (!TREE_SIDE_EFFECTS (arg0)
10185 && TREE_CODE (arg1) == BIT_AND_EXPR)
10186 {
10187 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
48075623 10188 {
db3927fb
AH
10189 tree arg10 = fold_convert_loc (loc, type,
10190 TREE_OPERAND (arg1, 0));
10191 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10192 fold_build1_loc (loc, BIT_NOT_EXPR,
10193 type, arg10),
10194 fold_convert_loc (loc, type, arg0));
48075623 10195 }
0aee4751 10196 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
48075623 10197 {
db3927fb
AH
10198 tree arg11 = fold_convert_loc (loc,
10199 type, TREE_OPERAND (arg1, 1));
10200 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10201 fold_build1_loc (loc, BIT_NOT_EXPR,
10202 type, arg11),
10203 fold_convert_loc (loc, type, arg0));
48075623 10204 }
0aee4751
KH
10205 }
10206
10207 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10208 any power of 2 minus 1. */
10209 if (TREE_CODE (arg0) == BIT_AND_EXPR
10210 && TREE_CODE (arg1) == BIT_AND_EXPR
10211 && operand_equal_p (TREE_OPERAND (arg0, 0),
10212 TREE_OPERAND (arg1, 0), 0))
10213 {
10214 tree mask0 = TREE_OPERAND (arg0, 1);
10215 tree mask1 = TREE_OPERAND (arg1, 1);
db3927fb 10216 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
0aee4751
KH
10217
10218 if (operand_equal_p (tem, mask1, 0))
10219 {
db3927fb 10220 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
7f20a5b7 10221 TREE_OPERAND (arg0, 0), mask1);
db3927fb 10222 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
0aee4751
KH
10223 }
10224 }
10225 }
10226
10227 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10228 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
db3927fb 10229 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751
KH
10230
10231 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10232 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10233 (-ARG1 + ARG0) reduces to -ARG1. */
fd6c76f4 10234 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
db3927fb 10235 return negate_expr (fold_convert_loc (loc, type, arg1));
0aee4751 10236
d1ad84c2
KG
10237 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10238 __complex__ ( x, -y ). This is not the same for SNaNs or if
10239 signed zeros are involved. */
10240 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10241 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10242 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10243 {
10244 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
db3927fb
AH
10245 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10246 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
d1ad84c2
KG
10247 bool arg0rz = false, arg0iz = false;
10248 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10249 || (arg0i && (arg0iz = real_zerop (arg0i))))
10250 {
db3927fb
AH
10251 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10252 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
d1ad84c2
KG
10253 if (arg0rz && arg1i && real_zerop (arg1i))
10254 {
db3927fb 10255 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
d1ad84c2
KG
10256 arg1r ? arg1r
10257 : build1 (REALPART_EXPR, rtype, arg1));
10258 tree ip = arg0i ? arg0i
10259 : build1 (IMAGPART_EXPR, rtype, arg0);
db3927fb 10260 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
d1ad84c2
KG
10261 }
10262 else if (arg0iz && arg1r && real_zerop (arg1r))
10263 {
10264 tree rp = arg0r ? arg0r
10265 : build1 (REALPART_EXPR, rtype, arg0);
db3927fb 10266 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
d1ad84c2
KG
10267 arg1i ? arg1i
10268 : build1 (IMAGPART_EXPR, rtype, arg1));
db3927fb 10269 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
d1ad84c2
KG
10270 }
10271 }
10272 }
10273
0aee4751
KH
10274 /* Fold &x - &x. This can happen from &x.foo - &x.
10275 This is unsafe for certain floats even in non-IEEE formats.
10276 In IEEE, it is unsafe because it does wrong for NaNs.
10277 Also note that operand_equal_p is always false if an operand
10278 is volatile. */
10279
81d2fb02 10280 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
0aee4751 10281 && operand_equal_p (arg0, arg1, 0))
db3927fb 10282 return fold_convert_loc (loc, type, integer_zero_node);
0aee4751
KH
10283
10284 /* A - B -> A + (-B) if B is easily negatable. */
fd6c76f4 10285 if (negate_expr_p (arg1)
0aee4751
KH
10286 && ((FLOAT_TYPE_P (type)
10287 /* Avoid this transformation if B is a positive REAL_CST. */
10288 && (TREE_CODE (arg1) != REAL_CST
10289 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
b0cd88d2 10290 || INTEGRAL_TYPE_P (type)))
db3927fb
AH
10291 return fold_build2_loc (loc, PLUS_EXPR, type,
10292 fold_convert_loc (loc, type, arg0),
10293 fold_convert_loc (loc, type,
10294 negate_expr (arg1)));
0aee4751
KH
10295
10296 /* Try folding difference of addresses. */
10297 {
10298 HOST_WIDE_INT diff;
10299
10300 if ((TREE_CODE (arg0) == ADDR_EXPR
10301 || TREE_CODE (arg1) == ADDR_EXPR)
10302 && ptr_difference_const (arg0, arg1, &diff))
10303 return build_int_cst_type (type, diff);
10304 }
75cf42cc
RG
10305
10306 /* Fold &a[i] - &a[j] to i-j. */
10307 if (TREE_CODE (arg0) == ADDR_EXPR
10308 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10309 && TREE_CODE (arg1) == ADDR_EXPR
10310 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10311 {
10312 tree aref0 = TREE_OPERAND (arg0, 0);
10313 tree aref1 = TREE_OPERAND (arg1, 0);
10314 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10315 TREE_OPERAND (aref1, 0), 0))
10316 {
db3927fb
AH
10317 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10318 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
75cf42cc
RG
10319 tree esz = array_ref_element_size (aref0);
10320 tree diff = build2 (MINUS_EXPR, type, op0, op1);
db3927fb
AH
10321 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10322 fold_convert_loc (loc, type, esz));
b8698a0f 10323
75cf42cc
RG
10324 }
10325 }
10326
e0dd989a
RG
10327 if (FLOAT_TYPE_P (type)
10328 && flag_unsafe_math_optimizations
f8912a55
PB
10329 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10330 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
db3927fb 10331 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
f8912a55
PB
10332 return tem;
10333
0ed9a3e3 10334 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
a1a82611
RE
10335 same or one. Make sure type is not saturating.
10336 fold_plusminus_mult_expr will re-associate. */
0ed9a3e3
RG
10337 if ((TREE_CODE (arg0) == MULT_EXPR
10338 || TREE_CODE (arg1) == MULT_EXPR)
325217ed 10339 && !TYPE_SATURATING (type)
a1a82611 10340 && (!FLOAT_TYPE_P (type) || flag_associative_math))
0ed9a3e3 10341 {
db3927fb 10342 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
0ed9a3e3
RG
10343 if (tem)
10344 return tem;
0aee4751
KH
10345 }
10346
10347 goto associate;
10348
10349 case MULT_EXPR:
10350 /* (-A) * (-B) -> A * B */
10351 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
db3927fb
AH
10352 return fold_build2_loc (loc, MULT_EXPR, type,
10353 fold_convert_loc (loc, type,
10354 TREE_OPERAND (arg0, 0)),
10355 fold_convert_loc (loc, type,
10356 negate_expr (arg1)));
0aee4751 10357 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
db3927fb
AH
10358 return fold_build2_loc (loc, MULT_EXPR, type,
10359 fold_convert_loc (loc, type,
10360 negate_expr (arg0)),
10361 fold_convert_loc (loc, type,
10362 TREE_OPERAND (arg1, 0)));
0aee4751 10363
0aee4751
KH
10364 if (! FLOAT_TYPE_P (type))
10365 {
10366 if (integer_zerop (arg1))
db3927fb 10367 return omit_one_operand_loc (loc, type, arg1, arg0);
0aee4751 10368 if (integer_onep (arg1))
db3927fb 10369 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
b9e67f8b
RG
10370 /* Transform x * -1 into -x. Make sure to do the negation
10371 on the original operand with conversions not stripped
10372 because we can only strip non-sign-changing conversions. */
694d73e1 10373 if (integer_all_onesp (arg1))
db3927fb 10374 return fold_convert_loc (loc, type, negate_expr (op0));
b0cd88d2
RG
10375 /* Transform x * -C into -x * C if x is easily negatable. */
10376 if (TREE_CODE (arg1) == INTEGER_CST
10377 && tree_int_cst_sgn (arg1) == -1
10378 && negate_expr_p (arg0)
10379 && (tem = negate_expr (arg1)) != arg1
10380 && !TREE_OVERFLOW (tem))
db3927fb
AH
10381 return fold_build2_loc (loc, MULT_EXPR, type,
10382 fold_convert_loc (loc, type,
10383 negate_expr (arg0)),
10384 tem);
0aee4751
KH
10385
10386 /* (a * (1 << b)) is (a << b) */
10387 if (TREE_CODE (arg1) == LSHIFT_EXPR
10388 && integer_onep (TREE_OPERAND (arg1, 0)))
db3927fb 10389 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
7f20a5b7 10390 TREE_OPERAND (arg1, 1));
0aee4751
KH
10391 if (TREE_CODE (arg0) == LSHIFT_EXPR
10392 && integer_onep (TREE_OPERAND (arg0, 0)))
db3927fb 10393 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
7f20a5b7 10394 TREE_OPERAND (arg0, 1));
0aee4751 10395
1447bf05
RG
10396 /* (A + A) * C -> A * 2 * C */
10397 if (TREE_CODE (arg0) == PLUS_EXPR
10398 && TREE_CODE (arg1) == INTEGER_CST
10399 && operand_equal_p (TREE_OPERAND (arg0, 0),
10400 TREE_OPERAND (arg0, 1), 0))
db3927fb
AH
10401 return fold_build2_loc (loc, MULT_EXPR, type,
10402 omit_one_operand_loc (loc, type,
10403 TREE_OPERAND (arg0, 0),
1447bf05 10404 TREE_OPERAND (arg0, 1)),
db3927fb 10405 fold_build2_loc (loc, MULT_EXPR, type,
1447bf05
RG
10406 build_int_cst (type, 2) , arg1));
10407
6ac01510 10408 strict_overflow_p = false;
0aee4751 10409 if (TREE_CODE (arg1) == INTEGER_CST
ac029795 10410 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
6ac01510
ILT
10411 &strict_overflow_p)))
10412 {
10413 if (strict_overflow_p)
10414 fold_overflow_warning (("assuming signed overflow does not "
10415 "occur when simplifying "
10416 "multiplication"),
10417 WARN_STRICT_OVERFLOW_MISC);
db3927fb 10418 return fold_convert_loc (loc, type, tem);
6ac01510 10419 }
0aee4751 10420
99b25753
RS
10421 /* Optimize z * conj(z) for integer complex numbers. */
10422 if (TREE_CODE (arg0) == CONJ_EXPR
10423 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
db3927fb 10424 return fold_mult_zconjz (loc, type, arg1);
99b25753
RS
10425 if (TREE_CODE (arg1) == CONJ_EXPR
10426 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
db3927fb 10427 return fold_mult_zconjz (loc, type, arg0);
0aee4751
KH
10428 }
10429 else
10430 {
10431 /* Maybe fold x * 0 to 0. The expressions aren't the same
10432 when x is NaN, since x * 0 is also NaN. Nor are they the
10433 same in modes with signed zeros, since multiplying a
10434 negative value by 0 gives -0, not +0. */
10435 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10436 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10437 && real_zerop (arg1))
db3927fb 10438 return omit_one_operand_loc (loc, type, arg1, arg0);
c94f9067
JM
10439 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10440 Likewise for complex arithmetic with signed zeros. */
0aee4751 10441 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
c94f9067
JM
10442 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10443 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
0aee4751 10444 && real_onep (arg1))
db3927fb 10445 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751
KH
10446
10447 /* Transform x * -1.0 into -x. */
10448 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
c94f9067
JM
10449 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10450 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
0aee4751 10451 && real_minus_onep (arg1))
db3927fb 10452 return fold_convert_loc (loc, type, negate_expr (arg0));
0aee4751 10453
a1a82611
RE
10454 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10455 the result for floating point types due to rounding so it is applied
10456 only if -fassociative-math was specify. */
10457 if (flag_associative_math
0aee4751
KH
10458 && TREE_CODE (arg0) == RDIV_EXPR
10459 && TREE_CODE (arg1) == REAL_CST
10460 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10461 {
10462 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
43a5d30b 10463 arg1);
0aee4751 10464 if (tem)
db3927fb 10465 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
7f20a5b7 10466 TREE_OPERAND (arg0, 1));
0aee4751
KH
10467 }
10468
10469 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10470 if (operand_equal_p (arg0, arg1, 0))
10471 {
10472 tree tem = fold_strip_sign_ops (arg0);
10473 if (tem != NULL_TREE)
10474 {
db3927fb
AH
10475 tem = fold_convert_loc (loc, type, tem);
10476 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
0aee4751
KH
10477 }
10478 }
10479
9f539671 10480 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
d1ad84c2 10481 This is not the same for NaNs or if signed zeros are
9f539671
RG
10482 involved. */
10483 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10484 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10485 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10486 && TREE_CODE (arg1) == COMPLEX_CST
10487 && real_zerop (TREE_REALPART (arg1)))
10488 {
10489 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10490 if (real_onep (TREE_IMAGPART (arg1)))
db3927fb
AH
10491 return
10492 fold_build2_loc (loc, COMPLEX_EXPR, type,
10493 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10494 rtype, arg0)),
10495 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9f539671 10496 else if (real_minus_onep (TREE_IMAGPART (arg1)))
db3927fb
AH
10497 return
10498 fold_build2_loc (loc, COMPLEX_EXPR, type,
10499 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10500 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10501 rtype, arg0)));
9f539671
RG
10502 }
10503
99b25753
RS
10504 /* Optimize z * conj(z) for floating point complex numbers.
10505 Guarded by flag_unsafe_math_optimizations as non-finite
10506 imaginary components don't produce scalar results. */
10507 if (flag_unsafe_math_optimizations
10508 && TREE_CODE (arg0) == CONJ_EXPR
10509 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
db3927fb 10510 return fold_mult_zconjz (loc, type, arg1);
99b25753
RS
10511 if (flag_unsafe_math_optimizations
10512 && TREE_CODE (arg1) == CONJ_EXPR
10513 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
db3927fb 10514 return fold_mult_zconjz (loc, type, arg0);
99b25753 10515
0aee4751
KH
10516 if (flag_unsafe_math_optimizations)
10517 {
10518 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10519 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10520
10521 /* Optimizations of root(...)*root(...). */
10522 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10523 {
5039610b
SL
10524 tree rootfn, arg;
10525 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10526 tree arg10 = CALL_EXPR_ARG (arg1, 0);
0aee4751
KH
10527
10528 /* Optimize sqrt(x)*sqrt(x) as x. */
10529 if (BUILTIN_SQRT_P (fcode0)
10530 && operand_equal_p (arg00, arg10, 0)
10531 && ! HONOR_SNANS (TYPE_MODE (type)))
10532 return arg00;
10533
10534 /* Optimize root(x)*root(y) as root(x*y). */
5039610b 10535 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
db3927fb
AH
10536 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10537 return build_call_expr_loc (loc, rootfn, 1, arg);
0aee4751
KH
10538 }
10539
10540 /* Optimize expN(x)*expN(y) as expN(x+y). */
10541 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10542 {
5039610b 10543 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
db3927fb 10544 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
5039610b
SL
10545 CALL_EXPR_ARG (arg0, 0),
10546 CALL_EXPR_ARG (arg1, 0));
db3927fb 10547 return build_call_expr_loc (loc, expfn, 1, arg);
0aee4751
KH
10548 }
10549
10550 /* Optimizations of pow(...)*pow(...). */
10551 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10552 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10553 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10554 {
5039610b
SL
10555 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10556 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10557 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10558 tree arg11 = CALL_EXPR_ARG (arg1, 1);
0aee4751
KH
10559
10560 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10561 if (operand_equal_p (arg01, arg11, 0))
10562 {
5039610b 10563 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
db3927fb
AH
10564 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10565 arg00, arg10);
10566 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
0aee4751
KH
10567 }
10568
10569 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10570 if (operand_equal_p (arg00, arg10, 0))
10571 {
5039610b 10572 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
db3927fb
AH
10573 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10574 arg01, arg11);
10575 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
0aee4751
KH
10576 }
10577 }
10578
10579 /* Optimize tan(x)*cos(x) as sin(x). */
10580 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10581 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10582 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10583 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10584 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10585 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
5039610b
SL
10586 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10587 CALL_EXPR_ARG (arg1, 0), 0))
0aee4751
KH
10588 {
10589 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10590
10591 if (sinfn != NULL_TREE)
db3927fb
AH
10592 return build_call_expr_loc (loc, sinfn, 1,
10593 CALL_EXPR_ARG (arg0, 0));
0aee4751
KH
10594 }
10595
10596 /* Optimize x*pow(x,c) as pow(x,c+1). */
10597 if (fcode1 == BUILT_IN_POW
10598 || fcode1 == BUILT_IN_POWF
10599 || fcode1 == BUILT_IN_POWL)
10600 {
5039610b
SL
10601 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10602 tree arg11 = CALL_EXPR_ARG (arg1, 1);
0aee4751 10603 if (TREE_CODE (arg11) == REAL_CST
455f14dd 10604 && !TREE_OVERFLOW (arg11)
0aee4751
KH
10605 && operand_equal_p (arg0, arg10, 0))
10606 {
5039610b 10607 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
0aee4751 10608 REAL_VALUE_TYPE c;
5039610b 10609 tree arg;
0aee4751
KH
10610
10611 c = TREE_REAL_CST (arg11);
10612 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10613 arg = build_real (type, c);
db3927fb 10614 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
0aee4751
KH
10615 }
10616 }
10617
10618 /* Optimize pow(x,c)*x as pow(x,c+1). */
10619 if (fcode0 == BUILT_IN_POW
10620 || fcode0 == BUILT_IN_POWF
10621 || fcode0 == BUILT_IN_POWL)
10622 {
5039610b
SL
10623 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10624 tree arg01 = CALL_EXPR_ARG (arg0, 1);
0aee4751 10625 if (TREE_CODE (arg01) == REAL_CST
455f14dd 10626 && !TREE_OVERFLOW (arg01)
0aee4751
KH
10627 && operand_equal_p (arg1, arg00, 0))
10628 {
5039610b 10629 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
0aee4751 10630 REAL_VALUE_TYPE c;
5039610b 10631 tree arg;
0aee4751
KH
10632
10633 c = TREE_REAL_CST (arg01);
10634 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10635 arg = build_real (type, c);
db3927fb 10636 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
0aee4751
KH
10637 }
10638 }
10639
10640 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
efd8f750 10641 if (optimize_function_for_speed_p (cfun)
0aee4751
KH
10642 && operand_equal_p (arg0, arg1, 0))
10643 {
10644 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10645
10646 if (powfn)
10647 {
10648 tree arg = build_real (type, dconst2);
db3927fb 10649 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
0aee4751
KH
10650 }
10651 }
10652 }
10653 }
10654 goto associate;
10655
10656 case BIT_IOR_EXPR:
10657 bit_ior:
10658 if (integer_all_onesp (arg1))
db3927fb 10659 return omit_one_operand_loc (loc, type, arg1, arg0);
0aee4751 10660 if (integer_zerop (arg1))
db3927fb 10661 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751 10662 if (operand_equal_p (arg0, arg1, 0))
db3927fb 10663 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751
KH
10664
10665 /* ~X | X is -1. */
10666 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10667 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10668 {
db3927fb
AH
10669 t1 = fold_convert_loc (loc, type, integer_zero_node);
10670 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10671 return omit_one_operand_loc (loc, type, t1, arg1);
0aee4751
KH
10672 }
10673
10674 /* X | ~X is -1. */
10675 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10676 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10677 {
db3927fb
AH
10678 t1 = fold_convert_loc (loc, type, integer_zero_node);
10679 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10680 return omit_one_operand_loc (loc, type, t1, arg0);
0aee4751
KH
10681 }
10682
840992bd
RS
10683 /* Canonicalize (X & C1) | C2. */
10684 if (TREE_CODE (arg0) == BIT_AND_EXPR
10685 && TREE_CODE (arg1) == INTEGER_CST
10686 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10687 {
517ddae9
JJ
10688 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10689 int width = TYPE_PRECISION (type), w;
840992bd
RS
10690 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10691 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10692 hi2 = TREE_INT_CST_HIGH (arg1);
10693 lo2 = TREE_INT_CST_LOW (arg1);
10694
10695 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10696 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
db3927fb
AH
10697 return omit_one_operand_loc (loc, type, arg1,
10698 TREE_OPERAND (arg0, 0));
840992bd
RS
10699
10700 if (width > HOST_BITS_PER_WIDE_INT)
10701 {
b8698a0f 10702 mhi = (unsigned HOST_WIDE_INT) -1
840992bd
RS
10703 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10704 mlo = -1;
10705 }
10706 else
10707 {
10708 mhi = 0;
10709 mlo = (unsigned HOST_WIDE_INT) -1
10710 >> (HOST_BITS_PER_WIDE_INT - width);
10711 }
10712
10713 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10714 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
db3927fb 10715 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
840992bd
RS
10716 TREE_OPERAND (arg0, 0), arg1);
10717
517ddae9
JJ
10718 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10719 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10720 mode which allows further optimizations. */
840992bd
RS
10721 hi1 &= mhi;
10722 lo1 &= mlo;
517ddae9
JJ
10723 hi2 &= mhi;
10724 lo2 &= mlo;
10725 hi3 = hi1 & ~hi2;
10726 lo3 = lo1 & ~lo2;
10727 for (w = BITS_PER_UNIT;
10728 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10729 w <<= 1)
10730 {
10731 unsigned HOST_WIDE_INT mask
10732 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10733 if (((lo1 | lo2) & mask) == mask
10734 && (lo1 & ~mask) == 0 && hi1 == 0)
10735 {
10736 hi3 = 0;
10737 lo3 = mask;
10738 break;
10739 }
10740 }
10741 if (hi3 != hi1 || lo3 != lo1)
db3927fb
AH
10742 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10743 fold_build2_loc (loc, BIT_AND_EXPR, type,
840992bd
RS
10744 TREE_OPERAND (arg0, 0),
10745 build_int_cst_wide (type,
517ddae9 10746 lo3, hi3)),
840992bd
RS
10747 arg1);
10748 }
10749
03bebcac
RS
10750 /* (X & Y) | Y is (X, Y). */
10751 if (TREE_CODE (arg0) == BIT_AND_EXPR
10752 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
db3927fb 10753 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
03bebcac
RS
10754 /* (X & Y) | X is (Y, X). */
10755 if (TREE_CODE (arg0) == BIT_AND_EXPR
10756 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10757 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
db3927fb 10758 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
03bebcac
RS
10759 /* X | (X & Y) is (Y, X). */
10760 if (TREE_CODE (arg1) == BIT_AND_EXPR
10761 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10762 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
db3927fb 10763 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
03bebcac
RS
10764 /* X | (Y & X) is (Y, X). */
10765 if (TREE_CODE (arg1) == BIT_AND_EXPR
10766 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10767 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
db3927fb 10768 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
03bebcac 10769
db3927fb 10770 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
0aee4751
KH
10771 if (t1 != NULL_TREE)
10772 return t1;
10773
10774 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10775
10776 This results in more efficient code for machines without a NAND
10777 instruction. Combine will canonicalize to the first form
10778 which will allow use of NAND instructions provided by the
10779 backend if they exist. */
10780 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10781 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10782 {
db3927fb
AH
10783 return
10784 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10785 build2 (BIT_AND_EXPR, type,
10786 fold_convert_loc (loc, type,
10787 TREE_OPERAND (arg0, 0)),
10788 fold_convert_loc (loc, type,
10789 TREE_OPERAND (arg1, 0))));
0aee4751
KH
10790 }
10791
10792 /* See if this can be simplified into a rotate first. If that
10793 is unsuccessful continue in the association code. */
10794 goto bit_rotate;
10795
10796 case BIT_XOR_EXPR:
10797 if (integer_zerop (arg1))
db3927fb 10798 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751 10799 if (integer_all_onesp (arg1))
db3927fb 10800 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
0aee4751 10801 if (operand_equal_p (arg0, arg1, 0))
db3927fb 10802 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
0aee4751
KH
10803
10804 /* ~X ^ X is -1. */
10805 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10806 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10807 {
db3927fb
AH
10808 t1 = fold_convert_loc (loc, type, integer_zero_node);
10809 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10810 return omit_one_operand_loc (loc, type, t1, arg1);
0aee4751
KH
10811 }
10812
10813 /* X ^ ~X is -1. */
10814 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10815 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10816 {
db3927fb
AH
10817 t1 = fold_convert_loc (loc, type, integer_zero_node);
10818 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10819 return omit_one_operand_loc (loc, type, t1, arg0);
0aee4751
KH
10820 }
10821
10822 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10823 with a constant, and the two constants have no bits in common,
10824 we should treat this as a BIT_IOR_EXPR since this may produce more
10825 simplifications. */
10826 if (TREE_CODE (arg0) == BIT_AND_EXPR
10827 && TREE_CODE (arg1) == BIT_AND_EXPR
10828 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10829 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10830 && integer_zerop (const_binop (BIT_AND_EXPR,
10831 TREE_OPERAND (arg0, 1),
43a5d30b 10832 TREE_OPERAND (arg1, 1))))
0aee4751
KH
10833 {
10834 code = BIT_IOR_EXPR;
10835 goto bit_ior;
10836 }
10837
9d24eb54
AP
10838 /* (X | Y) ^ X -> Y & ~ X*/
10839 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10840 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10841 {
10842 tree t2 = TREE_OPERAND (arg0, 1);
db3927fb 10843 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
9d24eb54 10844 arg1);
db3927fb
AH
10845 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10846 fold_convert_loc (loc, type, t2),
10847 fold_convert_loc (loc, type, t1));
9d24eb54
AP
10848 return t1;
10849 }
10850
10851 /* (Y | X) ^ X -> Y & ~ X*/
10852 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10853 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10854 {
10855 tree t2 = TREE_OPERAND (arg0, 0);
db3927fb 10856 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
9d24eb54 10857 arg1);
db3927fb
AH
10858 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10859 fold_convert_loc (loc, type, t2),
10860 fold_convert_loc (loc, type, t1));
9d24eb54
AP
10861 return t1;
10862 }
10863
10864 /* X ^ (X | Y) -> Y & ~ X*/
10865 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10866 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10867 {
10868 tree t2 = TREE_OPERAND (arg1, 1);
db3927fb 10869 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
9d24eb54 10870 arg0);
db3927fb
AH
10871 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10872 fold_convert_loc (loc, type, t2),
10873 fold_convert_loc (loc, type, t1));
9d24eb54
AP
10874 return t1;
10875 }
10876
10877 /* X ^ (Y | X) -> Y & ~ X*/
10878 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10879 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10880 {
10881 tree t2 = TREE_OPERAND (arg1, 0);
db3927fb 10882 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
9d24eb54 10883 arg0);
db3927fb
AH
10884 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10885 fold_convert_loc (loc, type, t2),
10886 fold_convert_loc (loc, type, t1));
9d24eb54
AP
10887 return t1;
10888 }
b8698a0f 10889
33ab6245
JM
10890 /* Convert ~X ^ ~Y to X ^ Y. */
10891 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10892 && TREE_CODE (arg1) == BIT_NOT_EXPR)
db3927fb
AH
10893 return fold_build2_loc (loc, code, type,
10894 fold_convert_loc (loc, type,
10895 TREE_OPERAND (arg0, 0)),
10896 fold_convert_loc (loc, type,
10897 TREE_OPERAND (arg1, 0)));
33ab6245 10898
f8ed9a1c
RS
10899 /* Convert ~X ^ C to X ^ ~C. */
10900 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10901 && TREE_CODE (arg1) == INTEGER_CST)
db3927fb
AH
10902 return fold_build2_loc (loc, code, type,
10903 fold_convert_loc (loc, type,
10904 TREE_OPERAND (arg0, 0)),
10905 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
f8ed9a1c 10906
cef65eaa
RS
10907 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10908 if (TREE_CODE (arg0) == BIT_AND_EXPR
10909 && integer_onep (TREE_OPERAND (arg0, 1))
10910 && integer_onep (arg1))
db3927fb 10911 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
cef65eaa
RS
10912 build_int_cst (TREE_TYPE (arg0), 0));
10913
dd2c62dc
RS
10914 /* Fold (X & Y) ^ Y as ~X & Y. */
10915 if (TREE_CODE (arg0) == BIT_AND_EXPR
10916 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10917 {
db3927fb 10918 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
b8698a0f 10919 return fold_build2_loc (loc, BIT_AND_EXPR, type,
db3927fb
AH
10920 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10921 fold_convert_loc (loc, type, arg1));
dd2c62dc
RS
10922 }
10923 /* Fold (X & Y) ^ X as ~Y & X. */
10924 if (TREE_CODE (arg0) == BIT_AND_EXPR
10925 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10926 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10927 {
db3927fb
AH
10928 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10929 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10930 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10931 fold_convert_loc (loc, type, arg1));
dd2c62dc
RS
10932 }
10933 /* Fold X ^ (X & Y) as X & ~Y. */
10934 if (TREE_CODE (arg1) == BIT_AND_EXPR
10935 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10936 {
db3927fb
AH
10937 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10938 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10939 fold_convert_loc (loc, type, arg0),
10940 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
dd2c62dc
RS
10941 }
10942 /* Fold X ^ (Y & X) as ~Y & X. */
10943 if (TREE_CODE (arg1) == BIT_AND_EXPR
10944 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10945 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10946 {
db3927fb
AH
10947 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10948 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10949 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10950 fold_convert_loc (loc, type, arg0));
dd2c62dc
RS
10951 }
10952
0aee4751
KH
10953 /* See if this can be simplified into a rotate first. If that
10954 is unsuccessful continue in the association code. */
10955 goto bit_rotate;
10956
10957 case BIT_AND_EXPR:
10958 if (integer_all_onesp (arg1))
db3927fb 10959 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751 10960 if (integer_zerop (arg1))
db3927fb 10961 return omit_one_operand_loc (loc, type, arg1, arg0);
0aee4751 10962 if (operand_equal_p (arg0, arg1, 0))
db3927fb 10963 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751
KH
10964
10965 /* ~X & X is always zero. */
10966 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10967 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
db3927fb 10968 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
0aee4751
KH
10969
10970 /* X & ~X is always zero. */
10971 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10972 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
db3927fb 10973 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
0aee4751 10974
840992bd
RS
10975 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10976 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10977 && TREE_CODE (arg1) == INTEGER_CST
10978 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8174836f 10979 {
db3927fb
AH
10980 tree tmp1 = fold_convert_loc (loc, type, arg1);
10981 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10982 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10983 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
10984 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
10985 return
10986 fold_convert_loc (loc, type,
10987 fold_build2_loc (loc, BIT_IOR_EXPR,
10988 type, tmp2, tmp3));
8174836f 10989 }
840992bd 10990
03bebcac
RS
10991 /* (X | Y) & Y is (X, Y). */
10992 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10993 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
db3927fb 10994 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
03bebcac
RS
10995 /* (X | Y) & X is (Y, X). */
10996 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10997 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10998 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
db3927fb 10999 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
03bebcac
RS
11000 /* X & (X | Y) is (Y, X). */
11001 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11002 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11003 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
db3927fb 11004 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
03bebcac
RS
11005 /* X & (Y | X) is (Y, X). */
11006 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11007 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11008 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
db3927fb 11009 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
03bebcac 11010
cef65eaa
RS
11011 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11012 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11013 && integer_onep (TREE_OPERAND (arg0, 1))
11014 && integer_onep (arg1))
11015 {
11016 tem = TREE_OPERAND (arg0, 0);
db3927fb
AH
11017 return fold_build2_loc (loc, EQ_EXPR, type,
11018 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
cef65eaa
RS
11019 build_int_cst (TREE_TYPE (tem), 1)),
11020 build_int_cst (TREE_TYPE (tem), 0));
11021 }
11022 /* Fold ~X & 1 as (X & 1) == 0. */
11023 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11024 && integer_onep (arg1))
11025 {
11026 tem = TREE_OPERAND (arg0, 0);
db3927fb
AH
11027 return fold_build2_loc (loc, EQ_EXPR, type,
11028 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
cef65eaa
RS
11029 build_int_cst (TREE_TYPE (tem), 1)),
11030 build_int_cst (TREE_TYPE (tem), 0));
11031 }
11032
dd2c62dc
RS
11033 /* Fold (X ^ Y) & Y as ~X & Y. */
11034 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11035 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11036 {
db3927fb 11037 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
b8698a0f 11038 return fold_build2_loc (loc, BIT_AND_EXPR, type,
db3927fb
AH
11039 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11040 fold_convert_loc (loc, type, arg1));
dd2c62dc
RS
11041 }
11042 /* Fold (X ^ Y) & X as ~Y & X. */
11043 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11044 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11045 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11046 {
db3927fb
AH
11047 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11048 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11049 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11050 fold_convert_loc (loc, type, arg1));
dd2c62dc
RS
11051 }
11052 /* Fold X & (X ^ Y) as X & ~Y. */
11053 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11054 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11055 {
db3927fb
AH
11056 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11057 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11058 fold_convert_loc (loc, type, arg0),
11059 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
dd2c62dc
RS
11060 }
11061 /* Fold X & (Y ^ X) as ~Y & X. */
11062 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11063 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11064 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11065 {
db3927fb
AH
11066 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11067 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11068 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11069 fold_convert_loc (loc, type, arg0));
dd2c62dc
RS
11070 }
11071
db3927fb 11072 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
0aee4751
KH
11073 if (t1 != NULL_TREE)
11074 return t1;
11075 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11076 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11077 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11078 {
11079 unsigned int prec
11080 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11081
11082 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11083 && (~TREE_INT_CST_LOW (arg1)
11084 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
db3927fb
AH
11085 return
11086 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
0aee4751
KH
11087 }
11088
11089 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11090
11091 This results in more efficient code for machines without a NOR
11092 instruction. Combine will canonicalize to the first form
11093 which will allow use of NOR instructions provided by the
11094 backend if they exist. */
11095 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11096 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11097 {
db3927fb 11098 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7f20a5b7 11099 build2 (BIT_IOR_EXPR, type,
db3927fb
AH
11100 fold_convert_loc (loc, type,
11101 TREE_OPERAND (arg0, 0)),
11102 fold_convert_loc (loc, type,
11103 TREE_OPERAND (arg1, 0))));
0aee4751
KH
11104 }
11105
e5901cad
OW
11106 /* If arg0 is derived from the address of an object or function, we may
11107 be able to fold this expression using the object or function's
11108 alignment. */
11109 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11110 {
11111 unsigned HOST_WIDE_INT modulus, residue;
11112 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11113
617f3897
MJ
11114 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11115 integer_onep (arg1));
e5901cad
OW
11116
11117 /* This works because modulus is a power of 2. If this weren't the
11118 case, we'd have to replace it by its greatest power-of-2
11119 divisor: modulus & -modulus. */
11120 if (low < modulus)
11121 return build_int_cst (type, residue & low);
11122 }
11123
22164c3d
JJ
11124 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11125 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11126 if the new mask might be further optimized. */
11127 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11128 || TREE_CODE (arg0) == RSHIFT_EXPR)
11129 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11130 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11131 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11132 < TYPE_PRECISION (TREE_TYPE (arg0))
11133 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11134 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11135 {
11136 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11137 unsigned HOST_WIDE_INT mask
11138 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11139 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11140 tree shift_type = TREE_TYPE (arg0);
11141
11142 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11143 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11144 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11145 && TYPE_PRECISION (TREE_TYPE (arg0))
11146 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11147 {
11148 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11149 tree arg00 = TREE_OPERAND (arg0, 0);
11150 /* See if more bits can be proven as zero because of
11151 zero extension. */
11152 if (TREE_CODE (arg00) == NOP_EXPR
11153 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11154 {
11155 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11156 if (TYPE_PRECISION (inner_type)
11157 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11158 && TYPE_PRECISION (inner_type) < prec)
11159 {
11160 prec = TYPE_PRECISION (inner_type);
11161 /* See if we can shorten the right shift. */
11162 if (shiftc < prec)
11163 shift_type = inner_type;
11164 }
11165 }
11166 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11167 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11168 zerobits <<= prec - shiftc;
11169 /* For arithmetic shift if sign bit could be set, zerobits
11170 can contain actually sign bits, so no transformation is
11171 possible, unless MASK masks them all away. In that
11172 case the shift needs to be converted into logical shift. */
11173 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11174 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11175 {
11176 if ((mask & zerobits) == 0)
11177 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11178 else
11179 zerobits = 0;
11180 }
11181 }
11182
11183 /* ((X << 16) & 0xff00) is (X, 0). */
11184 if ((mask & zerobits) == mask)
db3927fb
AH
11185 return omit_one_operand_loc (loc, type,
11186 build_int_cst (type, 0), arg0);
22164c3d
JJ
11187
11188 newmask = mask | zerobits;
11189 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11190 {
11191 unsigned int prec;
11192
11193 /* Only do the transformation if NEWMASK is some integer
11194 mode's mask. */
11195 for (prec = BITS_PER_UNIT;
11196 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11197 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11198 break;
11199 if (prec < HOST_BITS_PER_WIDE_INT
11200 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11201 {
776248b8
JJ
11202 tree newmaskt;
11203
22164c3d
JJ
11204 if (shift_type != TREE_TYPE (arg0))
11205 {
db3927fb
AH
11206 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11207 fold_convert_loc (loc, shift_type,
11208 TREE_OPERAND (arg0, 0)),
22164c3d 11209 TREE_OPERAND (arg0, 1));
db3927fb 11210 tem = fold_convert_loc (loc, type, tem);
22164c3d
JJ
11211 }
11212 else
11213 tem = op0;
776248b8
JJ
11214 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11215 if (!tree_int_cst_equal (newmaskt, arg1))
db3927fb 11216 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
22164c3d
JJ
11217 }
11218 }
11219 }
11220
0aee4751
KH
11221 goto associate;
11222
11223 case RDIV_EXPR:
11224 /* Don't touch a floating-point divide by zero unless the mode
11225 of the constant can represent infinity. */
11226 if (TREE_CODE (arg1) == REAL_CST
11227 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11228 && real_zerop (arg1))
62ab45cc 11229 return NULL_TREE;
0aee4751 11230
ffbc33cc 11231 /* Optimize A / A to 1.0 if we don't care about
1d8b38a0
UB
11232 NaNs or Infinities. Skip the transformation
11233 for non-real operands. */
11234 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11235 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
ffbc33cc
UB
11236 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11237 && operand_equal_p (arg0, arg1, 0))
11238 {
11239 tree r = build_real (TREE_TYPE (arg0), dconst1);
11240
db3927fb 11241 return omit_two_operands_loc (loc, type, r, arg0, arg1);
ffbc33cc
UB
11242 }
11243
1d8b38a0
UB
11244 /* The complex version of the above A / A optimization. */
11245 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11246 && operand_equal_p (arg0, arg1, 0))
11247 {
11248 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11249 if (! HONOR_NANS (TYPE_MODE (elem_type))
11250 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11251 {
11252 tree r = build_real (elem_type, dconst1);
11253 /* omit_two_operands will call fold_convert for us. */
db3927fb 11254 return omit_two_operands_loc (loc, type, r, arg0, arg1);
1d8b38a0
UB
11255 }
11256 }
11257
0aee4751
KH
11258 /* (-A) / (-B) -> A / B */
11259 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
db3927fb 11260 return fold_build2_loc (loc, RDIV_EXPR, type,
7f20a5b7
KH
11261 TREE_OPERAND (arg0, 0),
11262 negate_expr (arg1));
0aee4751 11263 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
db3927fb 11264 return fold_build2_loc (loc, RDIV_EXPR, type,
7f20a5b7
KH
11265 negate_expr (arg0),
11266 TREE_OPERAND (arg1, 0));
0aee4751
KH
11267
11268 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11269 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11270 && real_onep (arg1))
db3927fb 11271 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751
KH
11272
11273 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11274 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11275 && real_minus_onep (arg1))
db3927fb
AH
11276 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11277 negate_expr (arg0)));
0aee4751
KH
11278
11279 /* If ARG1 is a constant, we can convert this to a multiply by the
11280 reciprocal. This does not have the same rounding properties,
a1a82611 11281 so only do this if -freciprocal-math. We can actually
0aee4751
KH
11282 always safely do it if ARG1 is a power of two, but it's hard to
11283 tell if it is or not in a portable manner. */
11284 if (TREE_CODE (arg1) == REAL_CST)
11285 {
a1a82611 11286 if (flag_reciprocal_math
0aee4751 11287 && 0 != (tem = const_binop (code, build_real (type, dconst1),
43a5d30b 11288 arg1)))
db3927fb 11289 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
0aee4751
KH
11290 /* Find the reciprocal if optimizing and the result is exact. */
11291 if (optimize)
11292 {
11293 REAL_VALUE_TYPE r;
11294 r = TREE_REAL_CST (arg1);
11295 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11296 {
11297 tem = build_real (type, r);
db3927fb
AH
11298 return fold_build2_loc (loc, MULT_EXPR, type,
11299 fold_convert_loc (loc, type, arg0), tem);
0aee4751
KH
11300 }
11301 }
11302 }
b8698a0f 11303 /* Convert A/B/C to A/(B*C). */
a1a82611 11304 if (flag_reciprocal_math
0aee4751 11305 && TREE_CODE (arg0) == RDIV_EXPR)
db3927fb
AH
11306 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11307 fold_build2_loc (loc, MULT_EXPR, type,
7f20a5b7 11308 TREE_OPERAND (arg0, 1), arg1));
0aee4751
KH
11309
11310 /* Convert A/(B/C) to (A/B)*C. */
a1a82611 11311 if (flag_reciprocal_math
0aee4751 11312 && TREE_CODE (arg1) == RDIV_EXPR)
db3927fb
AH
11313 return fold_build2_loc (loc, MULT_EXPR, type,
11314 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
7f20a5b7
KH
11315 TREE_OPERAND (arg1, 0)),
11316 TREE_OPERAND (arg1, 1));
0aee4751
KH
11317
11318 /* Convert C1/(X*C2) into (C1/C2)/X. */
a1a82611 11319 if (flag_reciprocal_math
0aee4751
KH
11320 && TREE_CODE (arg1) == MULT_EXPR
11321 && TREE_CODE (arg0) == REAL_CST
11322 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11323 {
11324 tree tem = const_binop (RDIV_EXPR, arg0,
43a5d30b 11325 TREE_OPERAND (arg1, 1));
0aee4751 11326 if (tem)
db3927fb 11327 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
7f20a5b7 11328 TREE_OPERAND (arg1, 0));
0aee4751
KH
11329 }
11330
0aee4751
KH
11331 if (flag_unsafe_math_optimizations)
11332 {
11333 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11334 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11335
11336 /* Optimize sin(x)/cos(x) as tan(x). */
11337 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11338 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11339 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
5039610b
SL
11340 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11341 CALL_EXPR_ARG (arg1, 0), 0))
0aee4751
KH
11342 {
11343 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11344
11345 if (tanfn != NULL_TREE)
db3927fb 11346 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
0aee4751
KH
11347 }
11348
11349 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11350 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11351 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11352 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
5039610b
SL
11353 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11354 CALL_EXPR_ARG (arg1, 0), 0))
0aee4751
KH
11355 {
11356 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11357
11358 if (tanfn != NULL_TREE)
11359 {
db3927fb
AH
11360 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11361 CALL_EXPR_ARG (arg0, 0));
11362 return fold_build2_loc (loc, RDIV_EXPR, type,
7f20a5b7 11363 build_real (type, dconst1), tmp);
0aee4751
KH
11364 }
11365 }
11366
d531830f
RS
11367 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11368 NaNs or Infinities. */
11369 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11370 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11371 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11372 {
5039610b
SL
11373 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11374 tree arg01 = CALL_EXPR_ARG (arg1, 0);
d531830f
RS
11375
11376 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11377 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11378 && operand_equal_p (arg00, arg01, 0))
11379 {
11380 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11381
11382 if (cosfn != NULL_TREE)
db3927fb 11383 return build_call_expr_loc (loc, cosfn, 1, arg00);
d531830f
RS
11384 }
11385 }
11386
11387 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
6416ae7f 11388 NaNs or Infinities. */
d531830f
RS
11389 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11390 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11391 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11392 {
5039610b
SL
11393 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11394 tree arg01 = CALL_EXPR_ARG (arg1, 0);
d531830f
RS
11395
11396 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11397 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11398 && operand_equal_p (arg00, arg01, 0))
11399 {
11400 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11401
11402 if (cosfn != NULL_TREE)
11403 {
db3927fb
AH
11404 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11405 return fold_build2_loc (loc, RDIV_EXPR, type,
d531830f 11406 build_real (type, dconst1),
b71b8086 11407 tmp);
d531830f
RS
11408 }
11409 }
11410 }
11411
0aee4751
KH
11412 /* Optimize pow(x,c)/x as pow(x,c-1). */
11413 if (fcode0 == BUILT_IN_POW
11414 || fcode0 == BUILT_IN_POWF
11415 || fcode0 == BUILT_IN_POWL)
11416 {
5039610b
SL
11417 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11418 tree arg01 = CALL_EXPR_ARG (arg0, 1);
0aee4751 11419 if (TREE_CODE (arg01) == REAL_CST
455f14dd 11420 && !TREE_OVERFLOW (arg01)
0aee4751
KH
11421 && operand_equal_p (arg1, arg00, 0))
11422 {
5039610b 11423 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
0aee4751 11424 REAL_VALUE_TYPE c;
5039610b 11425 tree arg;
0aee4751
KH
11426
11427 c = TREE_REAL_CST (arg01);
11428 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11429 arg = build_real (type, c);
db3927fb 11430 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
0aee4751
KH
11431 }
11432 }
d531830f 11433
9883e373
UB
11434 /* Optimize a/root(b/c) into a*root(c/b). */
11435 if (BUILTIN_ROOT_P (fcode1))
f1da2df1
UB
11436 {
11437 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11438
11439 if (TREE_CODE (rootarg) == RDIV_EXPR)
11440 {
11441 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11442 tree b = TREE_OPERAND (rootarg, 0);
11443 tree c = TREE_OPERAND (rootarg, 1);
11444
db3927fb 11445 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
f1da2df1 11446
db3927fb
AH
11447 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11448 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
f1da2df1
UB
11449 }
11450 }
11451
d531830f
RS
11452 /* Optimize x/expN(y) into x*expN(-y). */
11453 if (BUILTIN_EXPONENT_P (fcode1))
11454 {
5039610b
SL
11455 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11456 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
db3927fb
AH
11457 arg1 = build_call_expr_loc (loc,
11458 expfn, 1,
11459 fold_convert_loc (loc, type, arg));
11460 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
d531830f
RS
11461 }
11462
11463 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11464 if (fcode1 == BUILT_IN_POW
11465 || fcode1 == BUILT_IN_POWF
11466 || fcode1 == BUILT_IN_POWL)
11467 {
5039610b
SL
11468 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11469 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11470 tree arg11 = CALL_EXPR_ARG (arg1, 1);
db3927fb
AH
11471 tree neg11 = fold_convert_loc (loc, type,
11472 negate_expr (arg11));
11473 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11474 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
d531830f 11475 }
0aee4751 11476 }
fd6c76f4 11477 return NULL_TREE;
0aee4751
KH
11478
11479 case TRUNC_DIV_EXPR:
0aee4751 11480 case FLOOR_DIV_EXPR:
0f35201e
AM
11481 /* Simplify A / (B << N) where A and B are positive and B is
11482 a power of 2, to A >> (N + log2(B)). */
6ac01510 11483 strict_overflow_p = false;
0f35201e 11484 if (TREE_CODE (arg1) == LSHIFT_EXPR
6ac01510 11485 && (TYPE_UNSIGNED (type)
916c75b4 11486 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
0f35201e
AM
11487 {
11488 tree sval = TREE_OPERAND (arg1, 0);
11489 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11490 {
11491 tree sh_cnt = TREE_OPERAND (arg1, 1);
11492 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11493
6ac01510
ILT
11494 if (strict_overflow_p)
11495 fold_overflow_warning (("assuming signed overflow does not "
11496 "occur when simplifying A / (B << N)"),
11497 WARN_STRICT_OVERFLOW_MISC);
11498
db3927fb 11499 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
0f35201e 11500 sh_cnt, build_int_cst (NULL_TREE, pow2));
db3927fb
AH
11501 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11502 fold_convert_loc (loc, type, arg0), sh_cnt);
0f35201e
AM
11503 }
11504 }
65648dd4
RG
11505
11506 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11507 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11508 if (INTEGRAL_TYPE_P (type)
11509 && TYPE_UNSIGNED (type)
11510 && code == FLOOR_DIV_EXPR)
db3927fb 11511 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
65648dd4 11512
0f35201e
AM
11513 /* Fall thru */
11514
11515 case ROUND_DIV_EXPR:
0aee4751
KH
11516 case CEIL_DIV_EXPR:
11517 case EXACT_DIV_EXPR:
11518 if (integer_onep (arg1))
db3927fb 11519 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751 11520 if (integer_zerop (arg1))
62ab45cc 11521 return NULL_TREE;
0aee4751
KH
11522 /* X / -1 is -X. */
11523 if (!TYPE_UNSIGNED (type)
11524 && TREE_CODE (arg1) == INTEGER_CST
11525 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11526 && TREE_INT_CST_HIGH (arg1) == -1)
db3927fb 11527 return fold_convert_loc (loc, type, negate_expr (arg0));
0aee4751 11528
37d3243d
AP
11529 /* Convert -A / -B to A / B when the type is signed and overflow is
11530 undefined. */
eeef0e45 11531 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
37d3243d
AP
11532 && TREE_CODE (arg0) == NEGATE_EXPR
11533 && negate_expr_p (arg1))
6ac01510
ILT
11534 {
11535 if (INTEGRAL_TYPE_P (type))
11536 fold_overflow_warning (("assuming signed overflow does not occur "
11537 "when distributing negation across "
11538 "division"),
11539 WARN_STRICT_OVERFLOW_MISC);
db3927fb
AH
11540 return fold_build2_loc (loc, code, type,
11541 fold_convert_loc (loc, type,
11542 TREE_OPERAND (arg0, 0)),
11543 fold_convert_loc (loc, type,
11544 negate_expr (arg1)));
6ac01510 11545 }
eeef0e45 11546 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
37d3243d
AP
11547 && TREE_CODE (arg1) == NEGATE_EXPR
11548 && negate_expr_p (arg0))
6ac01510
ILT
11549 {
11550 if (INTEGRAL_TYPE_P (type))
11551 fold_overflow_warning (("assuming signed overflow does not occur "
11552 "when distributing negation across "
11553 "division"),
11554 WARN_STRICT_OVERFLOW_MISC);
db3927fb
AH
11555 return fold_build2_loc (loc, code, type,
11556 fold_convert_loc (loc, type,
11557 negate_expr (arg0)),
11558 fold_convert_loc (loc, type,
11559 TREE_OPERAND (arg1, 0)));
6ac01510 11560 }
37d3243d 11561
0aee4751
KH
11562 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11563 operation, EXACT_DIV_EXPR.
11564
11565 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11566 At one time others generated faster code, it's not clear if they do
11567 after the last round to changes to the DIV code in expmed.c. */
11568 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11569 && multiple_of_p (type, arg0, arg1))
db3927fb 11570 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
0aee4751 11571
6ac01510 11572 strict_overflow_p = false;
0aee4751 11573 if (TREE_CODE (arg1) == INTEGER_CST
6ac01510
ILT
11574 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11575 &strict_overflow_p)))
11576 {
11577 if (strict_overflow_p)
11578 fold_overflow_warning (("assuming signed overflow does not occur "
11579 "when simplifying division"),
11580 WARN_STRICT_OVERFLOW_MISC);
db3927fb 11581 return fold_convert_loc (loc, type, tem);
6ac01510 11582 }
0aee4751 11583
fd6c76f4 11584 return NULL_TREE;
0aee4751
KH
11585
11586 case CEIL_MOD_EXPR:
11587 case FLOOR_MOD_EXPR:
11588 case ROUND_MOD_EXPR:
11589 case TRUNC_MOD_EXPR:
11590 /* X % 1 is always zero, but be sure to preserve any side
11591 effects in X. */
11592 if (integer_onep (arg1))
db3927fb 11593 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
0aee4751
KH
11594
11595 /* X % 0, return X % 0 unchanged so that we can get the
11596 proper warnings and errors. */
11597 if (integer_zerop (arg1))
62ab45cc 11598 return NULL_TREE;
0aee4751
KH
11599
11600 /* 0 % X is always zero, but be sure to preserve any side
11601 effects in X. Place this after checking for X == 0. */
11602 if (integer_zerop (arg0))
db3927fb 11603 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
0aee4751
KH
11604
11605 /* X % -1 is zero. */
11606 if (!TYPE_UNSIGNED (type)
11607 && TREE_CODE (arg1) == INTEGER_CST
11608 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11609 && TREE_INT_CST_HIGH (arg1) == -1)
db3927fb 11610 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
0aee4751 11611
0aee4751
KH
11612 /* X % -C is the same as X % C. */
11613 if (code == TRUNC_MOD_EXPR
11614 && !TYPE_UNSIGNED (type)
11615 && TREE_CODE (arg1) == INTEGER_CST
455f14dd 11616 && !TREE_OVERFLOW (arg1)
0aee4751 11617 && TREE_INT_CST_HIGH (arg1) < 0
eeef0e45 11618 && !TYPE_OVERFLOW_TRAPS (type)
0aee4751
KH
11619 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11620 && !sign_bit_p (arg1, arg1))
db3927fb
AH
11621 return fold_build2_loc (loc, code, type,
11622 fold_convert_loc (loc, type, arg0),
11623 fold_convert_loc (loc, type,
11624 negate_expr (arg1)));
0aee4751
KH
11625
11626 /* X % -Y is the same as X % Y. */
11627 if (code == TRUNC_MOD_EXPR
11628 && !TYPE_UNSIGNED (type)
11629 && TREE_CODE (arg1) == NEGATE_EXPR
eeef0e45 11630 && !TYPE_OVERFLOW_TRAPS (type))
db3927fb
AH
11631 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11632 fold_convert_loc (loc, type,
11633 TREE_OPERAND (arg1, 0)));
0aee4751 11634
9e9ef331 11635 strict_overflow_p = false;
0aee4751 11636 if (TREE_CODE (arg1) == INTEGER_CST
6ac01510
ILT
11637 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11638 &strict_overflow_p)))
11639 {
11640 if (strict_overflow_p)
11641 fold_overflow_warning (("assuming signed overflow does not occur "
fa10beec 11642 "when simplifying modulus"),
6ac01510 11643 WARN_STRICT_OVERFLOW_MISC);
db3927fb 11644 return fold_convert_loc (loc, type, tem);
6ac01510 11645 }
0aee4751 11646
9e9ef331
EB
11647 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11648 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11649 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11650 && (TYPE_UNSIGNED (type)
11651 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11652 {
11653 tree c = arg1;
11654 /* Also optimize A % (C << N) where C is a power of 2,
11655 to A & ((C << N) - 1). */
11656 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11657 c = TREE_OPERAND (arg1, 0);
11658
11659 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11660 {
11661 tree mask
11662 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11663 build_int_cst (TREE_TYPE (arg1), 1));
11664 if (strict_overflow_p)
11665 fold_overflow_warning (("assuming signed overflow does not "
11666 "occur when simplifying "
11667 "X % (power of two)"),
11668 WARN_STRICT_OVERFLOW_MISC);
11669 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11670 fold_convert_loc (loc, type, arg0),
11671 fold_convert_loc (loc, type, mask));
11672 }
11673 }
11674
fd6c76f4 11675 return NULL_TREE;
0aee4751
KH
11676
11677 case LROTATE_EXPR:
11678 case RROTATE_EXPR:
11679 if (integer_all_onesp (arg0))
db3927fb 11680 return omit_one_operand_loc (loc, type, arg0, arg1);
0aee4751
KH
11681 goto shift;
11682
11683 case RSHIFT_EXPR:
11684 /* Optimize -1 >> x for arithmetic right shifts. */
bd170bbc
RG
11685 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11686 && tree_expr_nonnegative_p (arg1))
db3927fb 11687 return omit_one_operand_loc (loc, type, arg0, arg1);
0aee4751
KH
11688 /* ... fall through ... */
11689
11690 case LSHIFT_EXPR:
11691 shift:
11692 if (integer_zerop (arg1))
db3927fb 11693 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751 11694 if (integer_zerop (arg0))
db3927fb 11695 return omit_one_operand_loc (loc, type, arg0, arg1);
0aee4751
KH
11696
11697 /* Since negative shift count is not well-defined,
11698 don't try to compute it in the compiler. */
11699 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
62ab45cc 11700 return NULL_TREE;
e3d025cb
JM
11701
11702 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
2d60e929 11703 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
e3d025cb
JM
11704 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11705 && host_integerp (TREE_OPERAND (arg0, 1), false)
11706 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11707 {
11708 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11709 + TREE_INT_CST_LOW (arg1));
11710
11711 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11712 being well defined. */
11713 if (low >= TYPE_PRECISION (type))
11714 {
11715 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11716 low = low % TYPE_PRECISION (type);
11717 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
db3927fb 11718 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
2c0eba5a 11719 TREE_OPERAND (arg0, 0));
e3d025cb
JM
11720 else
11721 low = TYPE_PRECISION (type) - 1;
11722 }
11723
db3927fb 11724 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
e3d025cb
JM
11725 build_int_cst (type, low));
11726 }
11727
a165e746
JM
11728 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11729 into x & ((unsigned)-1 >> c) for unsigned types. */
11730 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11731 || (TYPE_UNSIGNED (type)
11732 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
e3d025cb
JM
11733 && host_integerp (arg1, false)
11734 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11735 && host_integerp (TREE_OPERAND (arg0, 1), false)
11736 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11737 {
11738 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11739 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
e3d025cb
JM
11740 tree lshift;
11741 tree arg00;
11742
11743 if (low0 == low1)
11744 {
db3927fb 11745 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
e3d025cb 11746
a165e746
JM
11747 lshift = build_int_cst (type, -1);
11748 lshift = int_const_binop (code, lshift, arg1, 0);
e3d025cb 11749
db3927fb 11750 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
e3d025cb
JM
11751 }
11752 }
11753
0aee4751
KH
11754 /* Rewrite an LROTATE_EXPR by a constant into an
11755 RROTATE_EXPR by a new constant. */
11756 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11757 {
000d8d44 11758 tree tem = build_int_cst (TREE_TYPE (arg1),
70582b3a 11759 TYPE_PRECISION (type));
43a5d30b 11760 tem = const_binop (MINUS_EXPR, tem, arg1);
db3927fb 11761 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
0aee4751
KH
11762 }
11763
11764 /* If we have a rotate of a bit operation with the rotate count and
11765 the second operand of the bit operation both constant,
11766 permute the two operations. */
11767 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11768 && (TREE_CODE (arg0) == BIT_AND_EXPR
11769 || TREE_CODE (arg0) == BIT_IOR_EXPR
11770 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11771 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
db3927fb
AH
11772 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11773 fold_build2_loc (loc, code, type,
7f20a5b7 11774 TREE_OPERAND (arg0, 0), arg1),
db3927fb 11775 fold_build2_loc (loc, code, type,
7f20a5b7 11776 TREE_OPERAND (arg0, 1), arg1));
0aee4751 11777
70582b3a
RG
11778 /* Two consecutive rotates adding up to the precision of the
11779 type can be ignored. */
0aee4751
KH
11780 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11781 && TREE_CODE (arg0) == RROTATE_EXPR
11782 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11783 && TREE_INT_CST_HIGH (arg1) == 0
11784 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11785 && ((TREE_INT_CST_LOW (arg1)
11786 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
70582b3a 11787 == (unsigned int) TYPE_PRECISION (type)))
0aee4751
KH
11788 return TREE_OPERAND (arg0, 0);
11789
22164c3d
JJ
11790 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11791 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11792 if the latter can be further optimized. */
11793 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11794 && TREE_CODE (arg0) == BIT_AND_EXPR
11795 && TREE_CODE (arg1) == INTEGER_CST
11796 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11797 {
db3927fb
AH
11798 tree mask = fold_build2_loc (loc, code, type,
11799 fold_convert_loc (loc, type,
11800 TREE_OPERAND (arg0, 1)),
22164c3d 11801 arg1);
db3927fb
AH
11802 tree shift = fold_build2_loc (loc, code, type,
11803 fold_convert_loc (loc, type,
11804 TREE_OPERAND (arg0, 0)),
22164c3d 11805 arg1);
db3927fb 11806 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
22164c3d
JJ
11807 if (tem)
11808 return tem;
11809 }
11810
fd6c76f4 11811 return NULL_TREE;
0aee4751
KH
11812
11813 case MIN_EXPR:
11814 if (operand_equal_p (arg0, arg1, 0))
db3927fb 11815 return omit_one_operand_loc (loc, type, arg0, arg1);
0aee4751
KH
11816 if (INTEGRAL_TYPE_P (type)
11817 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
db3927fb
AH
11818 return omit_one_operand_loc (loc, type, arg1, arg0);
11819 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
292f30c5
EB
11820 if (tem)
11821 return tem;
0aee4751
KH
11822 goto associate;
11823
11824 case MAX_EXPR:
11825 if (operand_equal_p (arg0, arg1, 0))
db3927fb 11826 return omit_one_operand_loc (loc, type, arg0, arg1);
0aee4751
KH
11827 if (INTEGRAL_TYPE_P (type)
11828 && TYPE_MAX_VALUE (type)
11829 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
db3927fb
AH
11830 return omit_one_operand_loc (loc, type, arg1, arg0);
11831 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
292f30c5
EB
11832 if (tem)
11833 return tem;
0aee4751
KH
11834 goto associate;
11835
11836 case TRUTH_ANDIF_EXPR:
11837 /* Note that the operands of this must be ints
11838 and their values must be 0 or 1.
11839 ("true" is a fixed value perhaps depending on the language.) */
11840 /* If first arg is constant zero, return it. */
11841 if (integer_zerop (arg0))
db3927fb 11842 return fold_convert_loc (loc, type, arg0);
0aee4751
KH
11843 case TRUTH_AND_EXPR:
11844 /* If either arg is constant true, drop it. */
11845 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
db3927fb 11846 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
0aee4751
KH
11847 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11848 /* Preserve sequence points. */
11849 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
db3927fb 11850 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751
KH
11851 /* If second arg is constant zero, result is zero, but first arg
11852 must be evaluated. */
11853 if (integer_zerop (arg1))
db3927fb 11854 return omit_one_operand_loc (loc, type, arg1, arg0);
0aee4751
KH
11855 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11856 case will be handled here. */
11857 if (integer_zerop (arg0))
db3927fb 11858 return omit_one_operand_loc (loc, type, arg0, arg1);
0aee4751
KH
11859
11860 /* !X && X is always false. */
11861 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11862 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
db3927fb 11863 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
0aee4751
KH
11864 /* X && !X is always false. */
11865 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11866 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
db3927fb 11867 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
0aee4751
KH
11868
11869 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11870 means A >= Y && A != MAX, but in this case we know that
11871 A < X <= MAX. */
11872
11873 if (!TREE_SIDE_EFFECTS (arg0)
11874 && !TREE_SIDE_EFFECTS (arg1))
11875 {
db3927fb 11876 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
70a9e64b 11877 if (tem && !operand_equal_p (tem, arg0, 0))
db3927fb 11878 return fold_build2_loc (loc, code, type, tem, arg1);
0aee4751 11879
db3927fb 11880 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
70a9e64b 11881 if (tem && !operand_equal_p (tem, arg1, 0))
db3927fb 11882 return fold_build2_loc (loc, code, type, arg0, tem);
0aee4751
KH
11883 }
11884
11885 truth_andor:
11886 /* We only do these simplifications if we are optimizing. */
11887 if (!optimize)
62ab45cc 11888 return NULL_TREE;
0aee4751
KH
11889
11890 /* Check for things like (A || B) && (A || C). We can convert this
11891 to A || (B && C). Note that either operator can be any of the four
11892 truth and/or operations and the transformation will still be
11893 valid. Also note that we only care about order for the
11894 ANDIF and ORIF operators. If B contains side effects, this
11895 might change the truth-value of A. */
11896 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11897 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11898 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11899 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11900 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11901 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11902 {
11903 tree a00 = TREE_OPERAND (arg0, 0);
11904 tree a01 = TREE_OPERAND (arg0, 1);
11905 tree a10 = TREE_OPERAND (arg1, 0);
11906 tree a11 = TREE_OPERAND (arg1, 1);
11907 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11908 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11909 && (code == TRUTH_AND_EXPR
11910 || code == TRUTH_OR_EXPR));
11911
11912 if (operand_equal_p (a00, a10, 0))
db3927fb
AH
11913 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11914 fold_build2_loc (loc, code, type, a01, a11));
0aee4751 11915 else if (commutative && operand_equal_p (a00, a11, 0))
db3927fb
AH
11916 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11917 fold_build2_loc (loc, code, type, a01, a10));
0aee4751 11918 else if (commutative && operand_equal_p (a01, a10, 0))
db3927fb
AH
11919 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
11920 fold_build2_loc (loc, code, type, a00, a11));
0aee4751
KH
11921
11922 /* This case if tricky because we must either have commutative
11923 operators or else A10 must not have side-effects. */
11924
11925 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11926 && operand_equal_p (a01, a11, 0))
db3927fb
AH
11927 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11928 fold_build2_loc (loc, code, type, a00, a10),
7f20a5b7 11929 a01);
0aee4751
KH
11930 }
11931
11932 /* See if we can build a range comparison. */
db3927fb 11933 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
0aee4751
KH
11934 return tem;
11935
27d0d96a
BS
11936 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
11937 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
11938 {
11939 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
11940 if (tem)
11941 return fold_build2_loc (loc, code, type, tem, arg1);
11942 }
11943
11944 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
11945 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
11946 {
11947 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
11948 if (tem)
11949 return fold_build2_loc (loc, code, type, arg0, tem);
11950 }
11951
0aee4751
KH
11952 /* Check for the possibility of merging component references. If our
11953 lhs is another similar operation, try to merge its rhs with our
11954 rhs. Then try to merge our lhs and rhs. */
11955 if (TREE_CODE (arg0) == code
db3927fb 11956 && 0 != (tem = fold_truthop (loc, code, type,
0aee4751 11957 TREE_OPERAND (arg0, 1), arg1)))
db3927fb 11958 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
0aee4751 11959
db3927fb 11960 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
0aee4751
KH
11961 return tem;
11962
62ab45cc 11963 return NULL_TREE;
0aee4751
KH
11964
11965 case TRUTH_ORIF_EXPR:
11966 /* Note that the operands of this must be ints
11967 and their values must be 0 or true.
11968 ("true" is a fixed value perhaps depending on the language.) */
11969 /* If first arg is constant true, return it. */
11970 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
db3927fb 11971 return fold_convert_loc (loc, type, arg0);
0aee4751
KH
11972 case TRUTH_OR_EXPR:
11973 /* If either arg is constant zero, drop it. */
11974 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
db3927fb 11975 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
0aee4751
KH
11976 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11977 /* Preserve sequence points. */
11978 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
db3927fb 11979 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751
KH
11980 /* If second arg is constant true, result is true, but we must
11981 evaluate first arg. */
11982 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
db3927fb 11983 return omit_one_operand_loc (loc, type, arg1, arg0);
0aee4751
KH
11984 /* Likewise for first arg, but note this only occurs here for
11985 TRUTH_OR_EXPR. */
11986 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
db3927fb 11987 return omit_one_operand_loc (loc, type, arg0, arg1);
0aee4751
KH
11988
11989 /* !X || X is always true. */
11990 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11991 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
db3927fb 11992 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
0aee4751
KH
11993 /* X || !X is always true. */
11994 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11995 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
db3927fb 11996 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
0aee4751
KH
11997
11998 goto truth_andor;
11999
12000 case TRUTH_XOR_EXPR:
12001 /* If the second arg is constant zero, drop it. */
12002 if (integer_zerop (arg1))
db3927fb 12003 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751
KH
12004 /* If the second arg is constant true, this is a logical inversion. */
12005 if (integer_onep (arg1))
90ec750d
RS
12006 {
12007 /* Only call invert_truthvalue if operand is a truth value. */
12008 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
db3927fb 12009 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
90ec750d 12010 else
db3927fb
AH
12011 tem = invert_truthvalue_loc (loc, arg0);
12012 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
90ec750d 12013 }
0aee4751
KH
12014 /* Identical arguments cancel to zero. */
12015 if (operand_equal_p (arg0, arg1, 0))
db3927fb 12016 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
0aee4751
KH
12017
12018 /* !X ^ X is always true. */
12019 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12020 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
db3927fb 12021 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
0aee4751
KH
12022
12023 /* X ^ !X is always true. */
12024 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12025 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
db3927fb 12026 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
0aee4751 12027
62ab45cc 12028 return NULL_TREE;
0aee4751
KH
12029
12030 case EQ_EXPR:
12031 case NE_EXPR:
db3927fb 12032 tem = fold_comparison (loc, code, type, op0, op1);
e26ec0bb
RS
12033 if (tem != NULL_TREE)
12034 return tem;
210dfe6e 12035
a7e1c928
AP
12036 /* bool_var != 0 becomes bool_var. */
12037 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12038 && code == NE_EXPR)
db3927fb 12039 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
e26ec0bb 12040
a7e1c928
AP
12041 /* bool_var == 1 becomes bool_var. */
12042 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12043 && code == EQ_EXPR)
db3927fb 12044 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751 12045
7934558d
AP
12046 /* bool_var != 1 becomes !bool_var. */
12047 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12048 && code == NE_EXPR)
db3927fb
AH
12049 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12050 fold_convert_loc (loc, type, arg0));
7934558d
AP
12051
12052 /* bool_var == 0 becomes !bool_var. */
12053 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12054 && code == EQ_EXPR)
db3927fb
AH
12055 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12056 fold_convert_loc (loc, type, arg0));
7934558d 12057
44e10129
MM
12058 /* !exp != 0 becomes !exp */
12059 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12060 && code == NE_EXPR)
12061 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12062
0aee4751
KH
12063 /* If this is an equality comparison of the address of two non-weak,
12064 unaliased symbols neither of which are extern (since we do not
12065 have access to attributes for externs), then we know the result. */
e26ec0bb 12066 if (TREE_CODE (arg0) == ADDR_EXPR
820cc88f 12067 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
0aee4751
KH
12068 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12069 && ! lookup_attribute ("alias",
12070 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12071 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12072 && TREE_CODE (arg1) == ADDR_EXPR
820cc88f 12073 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
0aee4751
KH
12074 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12075 && ! lookup_attribute ("alias",
12076 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12077 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
59f7a202
JL
12078 {
12079 /* We know that we're looking at the address of two
12080 non-weak, unaliased, static _DECL nodes.
12081
12082 It is both wasteful and incorrect to call operand_equal_p
12083 to compare the two ADDR_EXPR nodes. It is wasteful in that
12084 all we need to do is test pointer equality for the arguments
12085 to the two ADDR_EXPR nodes. It is incorrect to use
12086 operand_equal_p as that function is NOT equivalent to a
12087 C equality test. It can in fact return false for two
12088 objects which would test as equal using the C equality
12089 operator. */
12090 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12091 return constant_boolean_node (equal
12092 ? code == EQ_EXPR : code != EQ_EXPR,
12093 type);
12094 }
0aee4751 12095
e26ec0bb
RS
12096 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12097 a MINUS_EXPR of a constant, we can convert it into a comparison with
12098 a revised constant as long as no overflow occurs. */
12099 if (TREE_CODE (arg1) == INTEGER_CST
12100 && (TREE_CODE (arg0) == PLUS_EXPR
12101 || TREE_CODE (arg0) == MINUS_EXPR)
12102 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12103 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12104 ? MINUS_EXPR : PLUS_EXPR,
db3927fb
AH
12105 fold_convert_loc (loc, TREE_TYPE (arg0),
12106 arg1),
43a5d30b 12107 TREE_OPERAND (arg0, 1)))
455f14dd 12108 && !TREE_OVERFLOW (tem))
db3927fb 12109 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
0eeb03e6 12110
e26ec0bb
RS
12111 /* Similarly for a NEGATE_EXPR. */
12112 if (TREE_CODE (arg0) == NEGATE_EXPR
12113 && TREE_CODE (arg1) == INTEGER_CST
12114 && 0 != (tem = negate_expr (arg1))
12115 && TREE_CODE (tem) == INTEGER_CST
455f14dd 12116 && !TREE_OVERFLOW (tem))
db3927fb 12117 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
0eeb03e6 12118
cf06e5c1
RS
12119 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12120 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12121 && TREE_CODE (arg1) == INTEGER_CST
12122 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
db3927fb
AH
12123 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12124 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12125 fold_convert_loc (loc,
12126 TREE_TYPE (arg0),
12127 arg1),
cf06e5c1
RS
12128 TREE_OPERAND (arg0, 1)));
12129
6b12efe9
RG
12130 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12131 if ((TREE_CODE (arg0) == PLUS_EXPR
12132 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12133 || TREE_CODE (arg0) == MINUS_EXPR)
a31498d2 12134 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
a31498d2
RG
12135 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12136 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12137 {
6b12efe9 12138 tree val = TREE_OPERAND (arg0, 1);
db3927fb
AH
12139 return omit_two_operands_loc (loc, type,
12140 fold_build2_loc (loc, code, type,
6b12efe9
RG
12141 val,
12142 build_int_cst (TREE_TYPE (val),
12143 0)),
12144 TREE_OPERAND (arg0, 0), arg1);
12145 }
12146
12147 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12148 if (TREE_CODE (arg0) == MINUS_EXPR
12149 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12150 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12151 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12152 {
db3927fb 12153 return omit_two_operands_loc (loc, type,
6b12efe9
RG
12154 code == NE_EXPR
12155 ? boolean_true_node : boolean_false_node,
12156 TREE_OPERAND (arg0, 1), arg1);
a31498d2
RG
12157 }
12158
e26ec0bb
RS
12159 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12160 for !=. Don't do this for ordered comparisons due to overflow. */
12161 if (TREE_CODE (arg0) == MINUS_EXPR
12162 && integer_zerop (arg1))
db3927fb 12163 return fold_build2_loc (loc, code, type,
e26ec0bb 12164 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
0eeb03e6 12165
e26ec0bb
RS
12166 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12167 if (TREE_CODE (arg0) == ABS_EXPR
12168 && (integer_zerop (arg1) || real_zerop (arg1)))
db3927fb 12169 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
0eeb03e6 12170
e26ec0bb
RS
12171 /* If this is an EQ or NE comparison with zero and ARG0 is
12172 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12173 two operations, but the latter can be done in one less insn
12174 on machines that have only two-operand insns or on which a
12175 constant cannot be the first operand. */
12176 if (TREE_CODE (arg0) == BIT_AND_EXPR
12177 && integer_zerop (arg1))
12178 {
12179 tree arg00 = TREE_OPERAND (arg0, 0);
12180 tree arg01 = TREE_OPERAND (arg0, 1);
12181 if (TREE_CODE (arg00) == LSHIFT_EXPR
12182 && integer_onep (TREE_OPERAND (arg00, 0)))
5abe9685 12183 {
db3927fb 12184 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
5abe9685 12185 arg01, TREE_OPERAND (arg00, 1));
db3927fb 12186 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
5abe9685 12187 build_int_cst (TREE_TYPE (arg0), 1));
db3927fb
AH
12188 return fold_build2_loc (loc, code, type,
12189 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12190 arg1);
5abe9685
RG
12191 }
12192 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12193 && integer_onep (TREE_OPERAND (arg01, 0)))
12194 {
db3927fb 12195 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
5abe9685 12196 arg00, TREE_OPERAND (arg01, 1));
db3927fb 12197 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
5abe9685 12198 build_int_cst (TREE_TYPE (arg0), 1));
db3927fb
AH
12199 return fold_build2_loc (loc, code, type,
12200 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12201 arg1);
5abe9685 12202 }
e26ec0bb
RS
12203 }
12204
12205 /* If this is an NE or EQ comparison of zero against the result of a
12206 signed MOD operation whose second operand is a power of 2, make
12207 the MOD operation unsigned since it is simpler and equivalent. */
12208 if (integer_zerop (arg1)
12209 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12210 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12211 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12212 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12213 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12214 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12215 {
ca5ba2a3 12216 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
db3927fb
AH
12217 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12218 fold_convert_loc (loc, newtype,
12219 TREE_OPERAND (arg0, 0)),
12220 fold_convert_loc (loc, newtype,
12221 TREE_OPERAND (arg0, 1)));
e26ec0bb 12222
db3927fb
AH
12223 return fold_build2_loc (loc, code, type, newmod,
12224 fold_convert_loc (loc, newtype, arg1));
e26ec0bb
RS
12225 }
12226
a861485c
RS
12227 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12228 C1 is a valid shift constant, and C2 is a power of two, i.e.
12229 a single bit. */
12230 if (TREE_CODE (arg0) == BIT_AND_EXPR
12231 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12232 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12233 == INTEGER_CST
12234 && integer_pow2p (TREE_OPERAND (arg0, 1))
12235 && integer_zerop (arg1))
12236 {
12237 tree itype = TREE_TYPE (arg0);
12238 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12239 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12240
12241 /* Check for a valid shift count. */
12242 if (TREE_INT_CST_HIGH (arg001) == 0
12243 && TREE_INT_CST_LOW (arg001) < prec)
12244 {
12245 tree arg01 = TREE_OPERAND (arg0, 1);
12246 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12247 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12248 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12249 can be rewritten as (X & (C2 << C1)) != 0. */
0ad12cd3 12250 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
a861485c 12251 {
db3927fb
AH
12252 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12253 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12254 return fold_build2_loc (loc, code, type, tem, arg1);
a861485c
RS
12255 }
12256 /* Otherwise, for signed (arithmetic) shifts,
12257 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12258 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12259 else if (!TYPE_UNSIGNED (itype))
db3927fb 12260 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
a861485c
RS
12261 arg000, build_int_cst (itype, 0));
12262 /* Otherwise, of unsigned (logical) shifts,
12263 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12264 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12265 else
db3927fb 12266 return omit_one_operand_loc (loc, type,
a861485c
RS
12267 code == EQ_EXPR ? integer_one_node
12268 : integer_zero_node,
12269 arg000);
12270 }
12271 }
12272
e26ec0bb
RS
12273 /* If this is an NE comparison of zero with an AND of one, remove the
12274 comparison since the AND will give the correct value. */
12275 if (code == NE_EXPR
12276 && integer_zerop (arg1)
12277 && TREE_CODE (arg0) == BIT_AND_EXPR
12278 && integer_onep (TREE_OPERAND (arg0, 1)))
db3927fb 12279 return fold_convert_loc (loc, type, arg0);
e26ec0bb
RS
12280
12281 /* If we have (A & C) == C where C is a power of 2, convert this into
12282 (A & C) != 0. Similarly for NE_EXPR. */
12283 if (TREE_CODE (arg0) == BIT_AND_EXPR
12284 && integer_pow2p (TREE_OPERAND (arg0, 1))
12285 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
db3927fb
AH
12286 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12287 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12288 integer_zero_node));
e26ec0bb
RS
12289
12290 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12291 bit, then fold the expression into A < 0 or A >= 0. */
db3927fb 12292 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
e26ec0bb
RS
12293 if (tem)
12294 return tem;
12295
12296 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12297 Similarly for NE_EXPR. */
12298 if (TREE_CODE (arg0) == BIT_AND_EXPR
12299 && TREE_CODE (arg1) == INTEGER_CST
12300 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12301 {
db3927fb 12302 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
e26ec0bb
RS
12303 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12304 TREE_OPERAND (arg0, 1));
db3927fb 12305 tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
e26ec0bb
RS
12306 arg1, notc);
12307 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12308 if (integer_nonzerop (dandnotc))
db3927fb 12309 return omit_one_operand_loc (loc, type, rslt, arg0);
e26ec0bb
RS
12310 }
12311
12312 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12313 Similarly for NE_EXPR. */
12314 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12315 && TREE_CODE (arg1) == INTEGER_CST
12316 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12317 {
db3927fb
AH
12318 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12319 tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
e26ec0bb
RS
12320 TREE_OPERAND (arg0, 1), notd);
12321 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12322 if (integer_nonzerop (candnotd))
db3927fb 12323 return omit_one_operand_loc (loc, type, rslt, arg0);
e26ec0bb
RS
12324 }
12325
45dc13b9
JJ
12326 /* If this is a comparison of a field, we may be able to simplify it. */
12327 if ((TREE_CODE (arg0) == COMPONENT_REF
12328 || TREE_CODE (arg0) == BIT_FIELD_REF)
12329 /* Handle the constant case even without -O
12330 to make sure the warnings are given. */
12331 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12332 {
db3927fb 12333 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
45dc13b9
JJ
12334 if (t1)
12335 return t1;
12336 }
12337
e26ec0bb
RS
12338 /* Optimize comparisons of strlen vs zero to a compare of the
12339 first character of the string vs zero. To wit,
12340 strlen(ptr) == 0 => *ptr == 0
12341 strlen(ptr) != 0 => *ptr != 0
12342 Other cases should reduce to one of these two (or a constant)
12343 due to the return value of strlen being unsigned. */
12344 if (TREE_CODE (arg0) == CALL_EXPR
12345 && integer_zerop (arg1))
12346 {
12347 tree fndecl = get_callee_fndecl (arg0);
e26ec0bb
RS
12348
12349 if (fndecl
12350 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12351 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
5039610b
SL
12352 && call_expr_nargs (arg0) == 1
12353 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
e26ec0bb 12354 {
db3927fb
AH
12355 tree iref = build_fold_indirect_ref_loc (loc,
12356 CALL_EXPR_ARG (arg0, 0));
12357 return fold_build2_loc (loc, code, type, iref,
e26ec0bb
RS
12358 build_int_cst (TREE_TYPE (iref), 0));
12359 }
12360 }
12361
12362 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12363 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12364 if (TREE_CODE (arg0) == RSHIFT_EXPR
12365 && integer_zerop (arg1)
12366 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12367 {
12368 tree arg00 = TREE_OPERAND (arg0, 0);
12369 tree arg01 = TREE_OPERAND (arg0, 1);
12370 tree itype = TREE_TYPE (arg00);
12371 if (TREE_INT_CST_HIGH (arg01) == 0
12372 && TREE_INT_CST_LOW (arg01)
12373 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12374 {
12375 if (TYPE_UNSIGNED (itype))
12376 {
12753674 12377 itype = signed_type_for (itype);
db3927fb 12378 arg00 = fold_convert_loc (loc, itype, arg00);
e26ec0bb 12379 }
db3927fb 12380 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
e26ec0bb
RS
12381 type, arg00, build_int_cst (itype, 0));
12382 }
12383 }
12384
eb8dffe0
RS
12385 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12386 if (integer_zerop (arg1)
12387 && TREE_CODE (arg0) == BIT_XOR_EXPR)
db3927fb 12388 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
eb8dffe0
RS
12389 TREE_OPERAND (arg0, 1));
12390
12391 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12392 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12393 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
db3927fb 12394 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
eb8dffe0
RS
12395 build_int_cst (TREE_TYPE (arg1), 0));
12396 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12397 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12398 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12399 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
db3927fb 12400 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
eb8dffe0
RS
12401 build_int_cst (TREE_TYPE (arg1), 0));
12402
12403 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12404 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12405 && TREE_CODE (arg1) == INTEGER_CST
12406 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
db3927fb
AH
12407 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12408 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
eb8dffe0
RS
12409 TREE_OPERAND (arg0, 1), arg1));
12410
5881ad5d
RS
12411 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12412 (X & C) == 0 when C is a single bit. */
12413 if (TREE_CODE (arg0) == BIT_AND_EXPR
12414 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12415 && integer_zerop (arg1)
12416 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12417 {
db3927fb 12418 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
5881ad5d
RS
12419 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12420 TREE_OPERAND (arg0, 1));
db3927fb 12421 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
5881ad5d
RS
12422 type, tem, arg1);
12423 }
12424
12425 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12426 constant C is a power of two, i.e. a single bit. */
12427 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12428 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12429 && integer_zerop (arg1)
12430 && integer_pow2p (TREE_OPERAND (arg0, 1))
12431 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12432 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12433 {
12434 tree arg00 = TREE_OPERAND (arg0, 0);
db3927fb 12435 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
5881ad5d
RS
12436 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12437 }
12438
12439 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12440 when is C is a power of two, i.e. a single bit. */
12441 if (TREE_CODE (arg0) == BIT_AND_EXPR
12442 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12443 && integer_zerop (arg1)
12444 && integer_pow2p (TREE_OPERAND (arg0, 1))
12445 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12446 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12447 {
12448 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
db3927fb 12449 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
5881ad5d 12450 arg000, TREE_OPERAND (arg0, 1));
db3927fb 12451 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
5881ad5d
RS
12452 tem, build_int_cst (TREE_TYPE (tem), 0));
12453 }
12454
e26ec0bb
RS
12455 if (integer_zerop (arg1)
12456 && tree_expr_nonzero_p (arg0))
12457 {
12458 tree res = constant_boolean_node (code==NE_EXPR, type);
db3927fb 12459 return omit_one_operand_loc (loc, type, res, arg0);
e26ec0bb 12460 }
c159ffe7
RS
12461
12462 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12463 if (TREE_CODE (arg0) == NEGATE_EXPR
12464 && TREE_CODE (arg1) == NEGATE_EXPR)
db3927fb 12465 return fold_build2_loc (loc, code, type,
c159ffe7
RS
12466 TREE_OPERAND (arg0, 0),
12467 TREE_OPERAND (arg1, 0));
12468
015e23f4
RS
12469 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12470 if (TREE_CODE (arg0) == BIT_AND_EXPR
12471 && TREE_CODE (arg1) == BIT_AND_EXPR)
12472 {
12473 tree arg00 = TREE_OPERAND (arg0, 0);
12474 tree arg01 = TREE_OPERAND (arg0, 1);
12475 tree arg10 = TREE_OPERAND (arg1, 0);
12476 tree arg11 = TREE_OPERAND (arg1, 1);
12477 tree itype = TREE_TYPE (arg0);
12478
12479 if (operand_equal_p (arg01, arg11, 0))
db3927fb
AH
12480 return fold_build2_loc (loc, code, type,
12481 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12482 fold_build2_loc (loc,
12483 BIT_XOR_EXPR, itype,
015e23f4
RS
12484 arg00, arg10),
12485 arg01),
12486 build_int_cst (itype, 0));
12487
12488 if (operand_equal_p (arg01, arg10, 0))
db3927fb
AH
12489 return fold_build2_loc (loc, code, type,
12490 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12491 fold_build2_loc (loc,
12492 BIT_XOR_EXPR, itype,
015e23f4
RS
12493 arg00, arg11),
12494 arg01),
12495 build_int_cst (itype, 0));
12496
12497 if (operand_equal_p (arg00, arg11, 0))
db3927fb
AH
12498 return fold_build2_loc (loc, code, type,
12499 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12500 fold_build2_loc (loc,
12501 BIT_XOR_EXPR, itype,
015e23f4
RS
12502 arg01, arg10),
12503 arg00),
12504 build_int_cst (itype, 0));
12505
12506 if (operand_equal_p (arg00, arg10, 0))
db3927fb
AH
12507 return fold_build2_loc (loc, code, type,
12508 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12509 fold_build2_loc (loc,
12510 BIT_XOR_EXPR, itype,
015e23f4
RS
12511 arg01, arg11),
12512 arg00),
12513 build_int_cst (itype, 0));
12514 }
12515
cf06e5c1
RS
12516 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12517 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12518 {
12519 tree arg00 = TREE_OPERAND (arg0, 0);
12520 tree arg01 = TREE_OPERAND (arg0, 1);
12521 tree arg10 = TREE_OPERAND (arg1, 0);
12522 tree arg11 = TREE_OPERAND (arg1, 1);
12523 tree itype = TREE_TYPE (arg0);
12524
12525 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12526 operand_equal_p guarantees no side-effects so we don't need
12527 to use omit_one_operand on Z. */
12528 if (operand_equal_p (arg01, arg11, 0))
db3927fb 12529 return fold_build2_loc (loc, code, type, arg00, arg10);
cf06e5c1 12530 if (operand_equal_p (arg01, arg10, 0))
db3927fb 12531 return fold_build2_loc (loc, code, type, arg00, arg11);
cf06e5c1 12532 if (operand_equal_p (arg00, arg11, 0))
db3927fb 12533 return fold_build2_loc (loc, code, type, arg01, arg10);
cf06e5c1 12534 if (operand_equal_p (arg00, arg10, 0))
db3927fb 12535 return fold_build2_loc (loc, code, type, arg01, arg11);
cf06e5c1
RS
12536
12537 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12538 if (TREE_CODE (arg01) == INTEGER_CST
12539 && TREE_CODE (arg11) == INTEGER_CST)
db3927fb
AH
12540 return fold_build2_loc (loc, code, type,
12541 fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
12542 fold_build2_loc (loc,
12543 BIT_XOR_EXPR, itype,
cf06e5c1
RS
12544 arg01, arg11)),
12545 arg10);
12546 }
23b9463b
RS
12547
12548 /* Attempt to simplify equality/inequality comparisons of complex
12549 values. Only lower the comparison if the result is known or
12550 can be simplified to a single scalar comparison. */
12551 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12552 || TREE_CODE (arg0) == COMPLEX_CST)
12553 && (TREE_CODE (arg1) == COMPLEX_EXPR
12554 || TREE_CODE (arg1) == COMPLEX_CST))
12555 {
12556 tree real0, imag0, real1, imag1;
12557 tree rcond, icond;
12558
12559 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12560 {
12561 real0 = TREE_OPERAND (arg0, 0);
12562 imag0 = TREE_OPERAND (arg0, 1);
12563 }
12564 else
12565 {
12566 real0 = TREE_REALPART (arg0);
12567 imag0 = TREE_IMAGPART (arg0);
12568 }
12569
12570 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12571 {
12572 real1 = TREE_OPERAND (arg1, 0);
12573 imag1 = TREE_OPERAND (arg1, 1);
12574 }
12575 else
12576 {
12577 real1 = TREE_REALPART (arg1);
12578 imag1 = TREE_IMAGPART (arg1);
12579 }
12580
db3927fb 12581 rcond = fold_binary_loc (loc, code, type, real0, real1);
23b9463b
RS
12582 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12583 {
12584 if (integer_zerop (rcond))
12585 {
12586 if (code == EQ_EXPR)
db3927fb 12587 return omit_two_operands_loc (loc, type, boolean_false_node,
23b9463b 12588 imag0, imag1);
db3927fb 12589 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
23b9463b
RS
12590 }
12591 else
12592 {
12593 if (code == NE_EXPR)
db3927fb 12594 return omit_two_operands_loc (loc, type, boolean_true_node,
23b9463b 12595 imag0, imag1);
db3927fb 12596 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
23b9463b
RS
12597 }
12598 }
12599
db3927fb 12600 icond = fold_binary_loc (loc, code, type, imag0, imag1);
23b9463b
RS
12601 if (icond && TREE_CODE (icond) == INTEGER_CST)
12602 {
12603 if (integer_zerop (icond))
12604 {
12605 if (code == EQ_EXPR)
db3927fb 12606 return omit_two_operands_loc (loc, type, boolean_false_node,
23b9463b 12607 real0, real1);
db3927fb 12608 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
23b9463b
RS
12609 }
12610 else
12611 {
12612 if (code == NE_EXPR)
db3927fb 12613 return omit_two_operands_loc (loc, type, boolean_true_node,
23b9463b 12614 real0, real1);
db3927fb 12615 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
23b9463b
RS
12616 }
12617 }
12618 }
12619
e26ec0bb
RS
12620 return NULL_TREE;
12621
12622 case LT_EXPR:
12623 case GT_EXPR:
12624 case LE_EXPR:
12625 case GE_EXPR:
db3927fb 12626 tem = fold_comparison (loc, code, type, op0, op1);
e26ec0bb
RS
12627 if (tem != NULL_TREE)
12628 return tem;
12629
12630 /* Transform comparisons of the form X +- C CMP X. */
12631 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12632 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12633 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12634 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12635 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
eeef0e45 12636 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
e26ec0bb
RS
12637 {
12638 tree arg01 = TREE_OPERAND (arg0, 1);
12639 enum tree_code code0 = TREE_CODE (arg0);
12640 int is_positive;
12641
12642 if (TREE_CODE (arg01) == REAL_CST)
12643 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12644 else
12645 is_positive = tree_int_cst_sgn (arg01);
12646
12647 /* (X - c) > X becomes false. */
12648 if (code == GT_EXPR
12649 && ((code0 == MINUS_EXPR && is_positive >= 0)
12650 || (code0 == PLUS_EXPR && is_positive <= 0)))
6ac01510
ILT
12651 {
12652 if (TREE_CODE (arg01) == INTEGER_CST
12653 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12654 fold_overflow_warning (("assuming signed overflow does not "
12655 "occur when assuming that (X - c) > X "
12656 "is always false"),
12657 WARN_STRICT_OVERFLOW_ALL);
12658 return constant_boolean_node (0, type);
12659 }
e26ec0bb
RS
12660
12661 /* Likewise (X + c) < X becomes false. */
12662 if (code == LT_EXPR
12663 && ((code0 == PLUS_EXPR && is_positive >= 0)
12664 || (code0 == MINUS_EXPR && is_positive <= 0)))
6ac01510
ILT
12665 {
12666 if (TREE_CODE (arg01) == INTEGER_CST
12667 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12668 fold_overflow_warning (("assuming signed overflow does not "
12669 "occur when assuming that "
12670 "(X + c) < X is always false"),
12671 WARN_STRICT_OVERFLOW_ALL);
12672 return constant_boolean_node (0, type);
12673 }
e26ec0bb
RS
12674
12675 /* Convert (X - c) <= X to true. */
12676 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12677 && code == LE_EXPR
0eeb03e6
JM
12678 && ((code0 == MINUS_EXPR && is_positive >= 0)
12679 || (code0 == PLUS_EXPR && is_positive <= 0)))
6ac01510
ILT
12680 {
12681 if (TREE_CODE (arg01) == INTEGER_CST
12682 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12683 fold_overflow_warning (("assuming signed overflow does not "
12684 "occur when assuming that "
12685 "(X - c) <= X is always true"),
12686 WARN_STRICT_OVERFLOW_ALL);
12687 return constant_boolean_node (1, type);
12688 }
0eeb03e6
JM
12689
12690 /* Convert (X + c) >= X to true. */
12691 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12692 && code == GE_EXPR
12693 && ((code0 == PLUS_EXPR && is_positive >= 0)
12694 || (code0 == MINUS_EXPR && is_positive <= 0)))
6ac01510
ILT
12695 {
12696 if (TREE_CODE (arg01) == INTEGER_CST
12697 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12698 fold_overflow_warning (("assuming signed overflow does not "
12699 "occur when assuming that "
12700 "(X + c) >= X is always true"),
12701 WARN_STRICT_OVERFLOW_ALL);
12702 return constant_boolean_node (1, type);
12703 }
0eeb03e6
JM
12704
12705 if (TREE_CODE (arg01) == INTEGER_CST)
12706 {
12707 /* Convert X + c > X and X - c < X to true for integers. */
12708 if (code == GT_EXPR
12709 && ((code0 == PLUS_EXPR && is_positive > 0)
12710 || (code0 == MINUS_EXPR && is_positive < 0)))
6ac01510
ILT
12711 {
12712 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12713 fold_overflow_warning (("assuming signed overflow does "
12714 "not occur when assuming that "
12715 "(X + c) > X is always true"),
12716 WARN_STRICT_OVERFLOW_ALL);
12717 return constant_boolean_node (1, type);
12718 }
0eeb03e6
JM
12719
12720 if (code == LT_EXPR
12721 && ((code0 == MINUS_EXPR && is_positive > 0)
12722 || (code0 == PLUS_EXPR && is_positive < 0)))
6ac01510
ILT
12723 {
12724 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12725 fold_overflow_warning (("assuming signed overflow does "
12726 "not occur when assuming that "
12727 "(X - c) < X is always true"),
12728 WARN_STRICT_OVERFLOW_ALL);
12729 return constant_boolean_node (1, type);
12730 }
0eeb03e6
JM
12731
12732 /* Convert X + c <= X and X - c >= X to false for integers. */
12733 if (code == LE_EXPR
12734 && ((code0 == PLUS_EXPR && is_positive > 0)
12735 || (code0 == MINUS_EXPR && is_positive < 0)))
6ac01510
ILT
12736 {
12737 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12738 fold_overflow_warning (("assuming signed overflow does "
12739 "not occur when assuming that "
12740 "(X + c) <= X is always false"),
12741 WARN_STRICT_OVERFLOW_ALL);
12742 return constant_boolean_node (0, type);
12743 }
0eeb03e6
JM
12744
12745 if (code == GE_EXPR
12746 && ((code0 == MINUS_EXPR && is_positive > 0)
12747 || (code0 == PLUS_EXPR && is_positive < 0)))
6ac01510
ILT
12748 {
12749 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12750 fold_overflow_warning (("assuming signed overflow does "
12751 "not occur when assuming that "
f870ab63 12752 "(X - c) >= X is always false"),
6ac01510
ILT
12753 WARN_STRICT_OVERFLOW_ALL);
12754 return constant_boolean_node (0, type);
12755 }
0eeb03e6
JM
12756 }
12757 }
12758
0aee4751 12759 /* Comparisons with the highest or lowest possible integer of
f0dbdfbb 12760 the specified precision will have known values. */
0aee4751 12761 {
f0dbdfbb
EB
12762 tree arg1_type = TREE_TYPE (arg1);
12763 unsigned int width = TYPE_PRECISION (arg1_type);
0aee4751
KH
12764
12765 if (TREE_CODE (arg1) == INTEGER_CST
0aee4751 12766 && width <= 2 * HOST_BITS_PER_WIDE_INT
f0dbdfbb 12767 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
0aee4751
KH
12768 {
12769 HOST_WIDE_INT signed_max_hi;
12770 unsigned HOST_WIDE_INT signed_max_lo;
12771 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12772
12773 if (width <= HOST_BITS_PER_WIDE_INT)
12774 {
12775 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12776 - 1;
12777 signed_max_hi = 0;
12778 max_hi = 0;
12779
f0dbdfbb 12780 if (TYPE_UNSIGNED (arg1_type))
0aee4751
KH
12781 {
12782 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12783 min_lo = 0;
12784 min_hi = 0;
12785 }
12786 else
12787 {
12788 max_lo = signed_max_lo;
12789 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12790 min_hi = -1;
12791 }
12792 }
12793 else
12794 {
12795 width -= HOST_BITS_PER_WIDE_INT;
12796 signed_max_lo = -1;
12797 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12798 - 1;
12799 max_lo = -1;
12800 min_lo = 0;
12801
f0dbdfbb 12802 if (TYPE_UNSIGNED (arg1_type))
0aee4751
KH
12803 {
12804 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12805 min_hi = 0;
12806 }
12807 else
12808 {
12809 max_hi = signed_max_hi;
12810 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12811 }
12812 }
12813
12814 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12815 && TREE_INT_CST_LOW (arg1) == max_lo)
12816 switch (code)
12817 {
12818 case GT_EXPR:
db3927fb 12819 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
0aee4751
KH
12820
12821 case GE_EXPR:
db3927fb 12822 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
0aee4751
KH
12823
12824 case LE_EXPR:
db3927fb 12825 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
0aee4751
KH
12826
12827 case LT_EXPR:
db3927fb 12828 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
0aee4751
KH
12829
12830 /* The GE_EXPR and LT_EXPR cases above are not normally
12831 reached because of previous transformations. */
12832
12833 default:
12834 break;
12835 }
12836 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12837 == max_hi
12838 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12839 switch (code)
12840 {
12841 case GT_EXPR:
000d8d44 12842 arg1 = const_binop (PLUS_EXPR, arg1,
43a5d30b 12843 build_int_cst (TREE_TYPE (arg1), 1));
db3927fb
AH
12844 return fold_build2_loc (loc, EQ_EXPR, type,
12845 fold_convert_loc (loc,
12846 TREE_TYPE (arg1), arg0),
86122f72 12847 arg1);
0aee4751 12848 case LE_EXPR:
000d8d44 12849 arg1 = const_binop (PLUS_EXPR, arg1,
43a5d30b 12850 build_int_cst (TREE_TYPE (arg1), 1));
db3927fb
AH
12851 return fold_build2_loc (loc, NE_EXPR, type,
12852 fold_convert_loc (loc, TREE_TYPE (arg1),
12853 arg0),
86122f72 12854 arg1);
0aee4751
KH
12855 default:
12856 break;
12857 }
12858 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12859 == min_hi
12860 && TREE_INT_CST_LOW (arg1) == min_lo)
12861 switch (code)
12862 {
12863 case LT_EXPR:
db3927fb 12864 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
0aee4751
KH
12865
12866 case LE_EXPR:
db3927fb 12867 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
0aee4751
KH
12868
12869 case GE_EXPR:
db3927fb 12870 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
0aee4751
KH
12871
12872 case GT_EXPR:
db3927fb 12873 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
0aee4751
KH
12874
12875 default:
12876 break;
12877 }
12878 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12879 == min_hi
12880 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12881 switch (code)
12882 {
12883 case GE_EXPR:
43a5d30b 12884 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
db3927fb
AH
12885 return fold_build2_loc (loc, NE_EXPR, type,
12886 fold_convert_loc (loc,
12887 TREE_TYPE (arg1), arg0),
86122f72 12888 arg1);
0aee4751 12889 case LT_EXPR:
43a5d30b 12890 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
db3927fb
AH
12891 return fold_build2_loc (loc, EQ_EXPR, type,
12892 fold_convert_loc (loc, TREE_TYPE (arg1),
12893 arg0),
86122f72 12894 arg1);
0aee4751
KH
12895 default:
12896 break;
12897 }
12898
5cdc4a26 12899 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
0aee4751 12900 && TREE_INT_CST_LOW (arg1) == signed_max_lo
f0dbdfbb
EB
12901 && TYPE_UNSIGNED (arg1_type)
12902 /* We will flip the signedness of the comparison operator
12903 associated with the mode of arg1, so the sign bit is
12904 specified by this mode. Check that arg1 is the signed
12905 max associated with this sign bit. */
12906 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
0aee4751 12907 /* signed_type does not work on pointer types. */
f0dbdfbb 12908 && INTEGRAL_TYPE_P (arg1_type))
0aee4751
KH
12909 {
12910 /* The following case also applies to X < signed_max+1
12911 and X >= signed_max+1 because previous transformations. */
12912 if (code == LE_EXPR || code == GT_EXPR)
12913 {
86122f72 12914 tree st;
12753674 12915 st = signed_type_for (TREE_TYPE (arg1));
db3927fb
AH
12916 return fold_build2_loc (loc,
12917 code == LE_EXPR ? GE_EXPR : LT_EXPR,
12918 type, fold_convert_loc (loc, st, arg0),
86122f72 12919 build_int_cst (st, 0));
0aee4751
KH
12920 }
12921 }
12922 }
12923 }
12924
0aee4751
KH
12925 /* If we are comparing an ABS_EXPR with a constant, we can
12926 convert all the cases into explicit comparisons, but they may
12927 well not be faster than doing the ABS and one comparison.
12928 But ABS (X) <= C is a range comparison, which becomes a subtraction
12929 and a comparison, and is probably faster. */
e26ec0bb
RS
12930 if (code == LE_EXPR
12931 && TREE_CODE (arg1) == INTEGER_CST
12932 && TREE_CODE (arg0) == ABS_EXPR
12933 && ! TREE_SIDE_EFFECTS (arg0)
12934 && (0 != (tem = negate_expr (arg1)))
12935 && TREE_CODE (tem) == INTEGER_CST
455f14dd 12936 && !TREE_OVERFLOW (tem))
db3927fb 12937 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
7f20a5b7
KH
12938 build2 (GE_EXPR, type,
12939 TREE_OPERAND (arg0, 0), tem),
12940 build2 (LE_EXPR, type,
12941 TREE_OPERAND (arg0, 0), arg1));
0aee4751
KH
12942
12943 /* Convert ABS_EXPR<x> >= 0 to true. */
6ac01510 12944 strict_overflow_p = false;
e26ec0bb 12945 if (code == GE_EXPR
e26ec0bb
RS
12946 && (integer_zerop (arg1)
12947 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6ac01510
ILT
12948 && real_zerop (arg1)))
12949 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12950 {
12951 if (strict_overflow_p)
12952 fold_overflow_warning (("assuming signed overflow does not occur "
12953 "when simplifying comparison of "
12954 "absolute value and zero"),
12955 WARN_STRICT_OVERFLOW_CONDITIONAL);
db3927fb 12956 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6ac01510 12957 }
0aee4751
KH
12958
12959 /* Convert ABS_EXPR<x> < 0 to false. */
6ac01510 12960 strict_overflow_p = false;
e26ec0bb 12961 if (code == LT_EXPR
6ac01510
ILT
12962 && (integer_zerop (arg1) || real_zerop (arg1))
12963 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12964 {
12965 if (strict_overflow_p)
12966 fold_overflow_warning (("assuming signed overflow does not occur "
12967 "when simplifying comparison of "
12968 "absolute value and zero"),
12969 WARN_STRICT_OVERFLOW_CONDITIONAL);
db3927fb 12970 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6ac01510 12971 }
0aee4751 12972
0aee4751
KH
12973 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12974 and similarly for >= into !=. */
12975 if ((code == LT_EXPR || code == GE_EXPR)
12976 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12977 && TREE_CODE (arg1) == LSHIFT_EXPR
12978 && integer_onep (TREE_OPERAND (arg1, 0)))
db3927fb
AH
12979 {
12980 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12981 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12982 TREE_OPERAND (arg1, 1)),
12983 build_int_cst (TREE_TYPE (arg0), 0));
12984 goto fold_binary_exit;
12985 }
0aee4751 12986
e26ec0bb
RS
12987 if ((code == LT_EXPR || code == GE_EXPR)
12988 && TYPE_UNSIGNED (TREE_TYPE (arg0))
1043771b 12989 && CONVERT_EXPR_P (arg1)
e26ec0bb
RS
12990 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12991 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
db3927fb
AH
12992 {
12993 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12994 fold_convert_loc (loc, TREE_TYPE (arg0),
12995 build2 (RSHIFT_EXPR,
12996 TREE_TYPE (arg0), arg0,
12997 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12998 1))),
12999 build_int_cst (TREE_TYPE (arg0), 0));
13000 goto fold_binary_exit;
13001 }
0aee4751 13002
e26ec0bb 13003 return NULL_TREE;
0aee4751
KH
13004
13005 case UNORDERED_EXPR:
13006 case ORDERED_EXPR:
13007 case UNLT_EXPR:
13008 case UNLE_EXPR:
13009 case UNGT_EXPR:
13010 case UNGE_EXPR:
13011 case UNEQ_EXPR:
13012 case LTGT_EXPR:
13013 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13014 {
13015 t1 = fold_relational_const (code, type, arg0, arg1);
13016 if (t1 != NULL_TREE)
13017 return t1;
13018 }
13019
13020 /* If the first operand is NaN, the result is constant. */
13021 if (TREE_CODE (arg0) == REAL_CST
13022 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13023 && (code != LTGT_EXPR || ! flag_trapping_math))
13024 {
13025 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13026 ? integer_zero_node
13027 : integer_one_node;
db3927fb 13028 return omit_one_operand_loc (loc, type, t1, arg1);
0aee4751
KH
13029 }
13030
13031 /* If the second operand is NaN, the result is constant. */
13032 if (TREE_CODE (arg1) == REAL_CST
13033 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13034 && (code != LTGT_EXPR || ! flag_trapping_math))
13035 {
13036 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13037 ? integer_zero_node
13038 : integer_one_node;
db3927fb 13039 return omit_one_operand_loc (loc, type, t1, arg0);
0aee4751
KH
13040 }
13041
13042 /* Simplify unordered comparison of something with itself. */
13043 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13044 && operand_equal_p (arg0, arg1, 0))
13045 return constant_boolean_node (1, type);
13046
13047 if (code == LTGT_EXPR
13048 && !flag_trapping_math
13049 && operand_equal_p (arg0, arg1, 0))
13050 return constant_boolean_node (0, type);
13051
13052 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13053 {
13054 tree targ0 = strip_float_extensions (arg0);
13055 tree targ1 = strip_float_extensions (arg1);
13056 tree newtype = TREE_TYPE (targ0);
13057
13058 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13059 newtype = TREE_TYPE (targ1);
13060
13061 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
db3927fb
AH
13062 return fold_build2_loc (loc, code, type,
13063 fold_convert_loc (loc, newtype, targ0),
13064 fold_convert_loc (loc, newtype, targ1));
0aee4751
KH
13065 }
13066
62ab45cc 13067 return NULL_TREE;
0aee4751
KH
13068
13069 case COMPOUND_EXPR:
13070 /* When pedantic, a compound expression can be neither an lvalue
13071 nor an integer constant expression. */
13072 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
62ab45cc 13073 return NULL_TREE;
0aee4751
KH
13074 /* Don't let (0, 0) be null pointer constant. */
13075 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
db3927fb
AH
13076 : fold_convert_loc (loc, type, arg1);
13077 return pedantic_non_lvalue_loc (loc, tem);
0aee4751
KH
13078
13079 case COMPLEX_EXPR:
fd6c76f4
RS
13080 if ((TREE_CODE (arg0) == REAL_CST
13081 && TREE_CODE (arg1) == REAL_CST)
13082 || (TREE_CODE (arg0) == INTEGER_CST
13083 && TREE_CODE (arg1) == INTEGER_CST))
0aee4751 13084 return build_complex (type, arg0, arg1);
62ab45cc 13085 return NULL_TREE;
0aee4751 13086
cb4819f0
KH
13087 case ASSERT_EXPR:
13088 /* An ASSERT_EXPR should never be passed to fold_binary. */
13089 gcc_unreachable ();
13090
0aee4751 13091 default:
62ab45cc 13092 return NULL_TREE;
0aee4751 13093 } /* switch (code) */
db3927fb
AH
13094 fold_binary_exit:
13095 protected_set_expr_location (tem, loc);
13096 return tem;
0aee4751
KH
13097}
13098
c703e618
EB
13099/* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13100 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13101 of GOTO_EXPR. */
8c900457
GL
13102
13103static tree
c703e618 13104contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
8c900457
GL
13105{
13106 switch (TREE_CODE (*tp))
13107 {
13108 case LABEL_EXPR:
13109 return *tp;
c703e618 13110
8c900457
GL
13111 case GOTO_EXPR:
13112 *walk_subtrees = 0;
c703e618
EB
13113
13114 /* ... fall through ... */
13115
8c900457
GL
13116 default:
13117 return NULL_TREE;
13118 }
13119}
13120
c703e618
EB
13121/* Return whether the sub-tree ST contains a label which is accessible from
13122 outside the sub-tree. */
8c900457
GL
13123
13124static bool
13125contains_label_p (tree st)
13126{
c703e618
EB
13127 return
13128 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
8c900457
GL
13129}
13130
7cf57259
KH
13131/* Fold a ternary expression of code CODE and type TYPE with operands
13132 OP0, OP1, and OP2. Return the folded expression if folding is
13133 successful. Otherwise, return NULL_TREE. */
9bdae6af 13134
721425b6 13135tree
db3927fb
AH
13136fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13137 tree op0, tree op1, tree op2)
9bdae6af 13138{
9bdae6af
KH
13139 tree tem;
13140 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
9bdae6af 13141 enum tree_code_class kind = TREE_CODE_CLASS (code);
9bdae6af
KH
13142
13143 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13144 && TREE_CODE_LENGTH (code) == 3);
13145
3ea2c264
KH
13146 /* Strip any conversions that don't change the mode. This is safe
13147 for every expression, except for a comparison expression because
13148 its signedness is derived from its operands. So, in the latter
13149 case, only strip conversions that don't change the signedness.
9bdae6af 13150
3ea2c264
KH
13151 Note that this is done as an internal manipulation within the
13152 constant folder, in order to find the simplest representation of
13153 the arguments so that their form can be studied. In any cases,
13154 the appropriate type conversions should be put back in the tree
13155 that will get out of the constant folder. */
13156 if (op0)
13157 {
13158 arg0 = op0;
13159 STRIP_NOPS (arg0);
13160 }
9bdae6af 13161
3ea2c264
KH
13162 if (op1)
13163 {
13164 arg1 = op1;
13165 STRIP_NOPS (arg1);
9bdae6af
KH
13166 }
13167
13168 switch (code)
13169 {
13170 case COMPONENT_REF:
13171 if (TREE_CODE (arg0) == CONSTRUCTOR
13172 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13173 {
4038c495
GB
13174 unsigned HOST_WIDE_INT idx;
13175 tree field, value;
13176 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13177 if (field == arg1)
13178 return value;
9bdae6af 13179 }
62ab45cc 13180 return NULL_TREE;
9bdae6af
KH
13181
13182 case COND_EXPR:
13183 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13184 so all simple results must be passed through pedantic_non_lvalue. */
13185 if (TREE_CODE (arg0) == INTEGER_CST)
13186 {
8c900457 13187 tree unused_op = integer_zerop (arg0) ? op1 : op2;
3ea2c264 13188 tem = integer_zerop (arg0) ? op2 : op1;
9bdae6af
KH
13189 /* Only optimize constant conditions when the selected branch
13190 has the same type as the COND_EXPR. This avoids optimizing
8c900457
GL
13191 away "c ? x : throw", where the throw has a void type.
13192 Avoid throwing away that operand which contains label. */
13193 if ((!TREE_SIDE_EFFECTS (unused_op)
13194 || !contains_label_p (unused_op))
13195 && (! VOID_TYPE_P (TREE_TYPE (tem))
13196 || VOID_TYPE_P (type)))
db3927fb 13197 return pedantic_non_lvalue_loc (loc, tem);
62ab45cc 13198 return NULL_TREE;
9bdae6af 13199 }
3ea2c264 13200 if (operand_equal_p (arg1, op2, 0))
db3927fb 13201 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
9bdae6af
KH
13202
13203 /* If we have A op B ? A : C, we may be able to convert this to a
13204 simpler expression, depending on the operation and the values
13205 of B and C. Signed zeros prevent all of these transformations,
13206 for reasons given above each one.
13207
13208 Also try swapping the arguments and inverting the conditional. */
13209 if (COMPARISON_CLASS_P (arg0)
13210 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13211 arg1, TREE_OPERAND (arg0, 1))
13212 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13213 {
db3927fb 13214 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
9bdae6af
KH
13215 if (tem)
13216 return tem;
13217 }
13218
13219 if (COMPARISON_CLASS_P (arg0)
13220 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
3ea2c264 13221 op2,
9bdae6af 13222 TREE_OPERAND (arg0, 1))
3ea2c264 13223 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
9bdae6af 13224 {
db3927fb 13225 tem = fold_truth_not_expr (loc, arg0);
d817ed3b 13226 if (tem && COMPARISON_CLASS_P (tem))
9bdae6af 13227 {
db3927fb 13228 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
9bdae6af
KH
13229 if (tem)
13230 return tem;
13231 }
13232 }
13233
13234 /* If the second operand is simpler than the third, swap them
13235 since that produces better jump optimization results. */
3dac16bd
RG
13236 if (truth_value_p (TREE_CODE (arg0))
13237 && tree_swap_operands_p (op1, op2, false))
9bdae6af
KH
13238 {
13239 /* See if this can be inverted. If it can't, possibly because
13240 it was a floating-point inequality comparison, don't do
13241 anything. */
db3927fb 13242 tem = fold_truth_not_expr (loc, arg0);
d817ed3b 13243 if (tem)
db3927fb 13244 return fold_build3_loc (loc, code, type, tem, op2, op1);
9bdae6af
KH
13245 }
13246
13247 /* Convert A ? 1 : 0 to simply A. */
3ea2c264
KH
13248 if (integer_onep (op1)
13249 && integer_zerop (op2)
13250 /* If we try to convert OP0 to our type, the
9bdae6af
KH
13251 call to fold will try to move the conversion inside
13252 a COND, which will recurse. In that case, the COND_EXPR
13253 is probably the best choice, so leave it alone. */
13254 && type == TREE_TYPE (arg0))
db3927fb 13255 return pedantic_non_lvalue_loc (loc, arg0);
9bdae6af
KH
13256
13257 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13258 over COND_EXPR in cases such as floating point comparisons. */
3ea2c264
KH
13259 if (integer_zerop (op1)
13260 && integer_onep (op2)
9bdae6af 13261 && truth_value_p (TREE_CODE (arg0)))
db3927fb
AH
13262 return pedantic_non_lvalue_loc (loc,
13263 fold_convert_loc (loc, type,
13264 invert_truthvalue_loc (loc,
13265 arg0)));
9bdae6af
KH
13266
13267 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13268 if (TREE_CODE (arg0) == LT_EXPR
789e604d
JJ
13269 && integer_zerop (TREE_OPERAND (arg0, 1))
13270 && integer_zerop (op2)
13271 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13272 {
13273 /* sign_bit_p only checks ARG1 bits within A's precision.
13274 If <sign bit of A> has wider type than A, bits outside
13275 of A's precision in <sign bit of A> need to be checked.
13276 If they are all 0, this optimization needs to be done
13277 in unsigned A's type, if they are all 1 in signed A's type,
13278 otherwise this can't be done. */
13279 if (TYPE_PRECISION (TREE_TYPE (tem))
13280 < TYPE_PRECISION (TREE_TYPE (arg1))
13281 && TYPE_PRECISION (TREE_TYPE (tem))
13282 < TYPE_PRECISION (type))
13283 {
13284 unsigned HOST_WIDE_INT mask_lo;
13285 HOST_WIDE_INT mask_hi;
13286 int inner_width, outer_width;
13287 tree tem_type;
13288
13289 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13290 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13291 if (outer_width > TYPE_PRECISION (type))
13292 outer_width = TYPE_PRECISION (type);
13293
13294 if (outer_width > HOST_BITS_PER_WIDE_INT)
13295 {
13296 mask_hi = ((unsigned HOST_WIDE_INT) -1
13297 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13298 mask_lo = -1;
13299 }
13300 else
13301 {
13302 mask_hi = 0;
13303 mask_lo = ((unsigned HOST_WIDE_INT) -1
13304 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13305 }
13306 if (inner_width > HOST_BITS_PER_WIDE_INT)
13307 {
13308 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13309 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13310 mask_lo = 0;
13311 }
13312 else
13313 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13314 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13315
13316 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13317 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13318 {
12753674 13319 tem_type = signed_type_for (TREE_TYPE (tem));
db3927fb 13320 tem = fold_convert_loc (loc, tem_type, tem);
789e604d
JJ
13321 }
13322 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13323 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13324 {
ca5ba2a3 13325 tem_type = unsigned_type_for (TREE_TYPE (tem));
db3927fb 13326 tem = fold_convert_loc (loc, tem_type, tem);
789e604d
JJ
13327 }
13328 else
13329 tem = NULL;
13330 }
13331
13332 if (tem)
db3927fb
AH
13333 return
13334 fold_convert_loc (loc, type,
13335 fold_build2_loc (loc, BIT_AND_EXPR,
13336 TREE_TYPE (tem), tem,
13337 fold_convert_loc (loc,
13338 TREE_TYPE (tem),
13339 arg1)));
789e604d 13340 }
9bdae6af
KH
13341
13342 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13343 already handled above. */
13344 if (TREE_CODE (arg0) == BIT_AND_EXPR
13345 && integer_onep (TREE_OPERAND (arg0, 1))
3ea2c264 13346 && integer_zerop (op2)
9bdae6af
KH
13347 && integer_pow2p (arg1))
13348 {
13349 tree tem = TREE_OPERAND (arg0, 0);
13350 STRIP_NOPS (tem);
13351 if (TREE_CODE (tem) == RSHIFT_EXPR
13352 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13353 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13354 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
db3927fb 13355 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7f20a5b7 13356 TREE_OPERAND (tem, 0), arg1);
9bdae6af
KH
13357 }
13358
13359 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13360 is probably obsolete because the first operand should be a
13361 truth value (that's why we have the two cases above), but let's
13362 leave it in until we can confirm this for all front-ends. */
3ea2c264 13363 if (integer_zerop (op2)
9bdae6af
KH
13364 && TREE_CODE (arg0) == NE_EXPR
13365 && integer_zerop (TREE_OPERAND (arg0, 1))
13366 && integer_pow2p (arg1)
13367 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13368 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13369 arg1, OEP_ONLY_CONST))
db3927fb
AH
13370 return pedantic_non_lvalue_loc (loc,
13371 fold_convert_loc (loc, type,
13372 TREE_OPERAND (arg0, 0)));
9bdae6af
KH
13373
13374 /* Convert A ? B : 0 into A && B if A and B are truth values. */
3ea2c264 13375 if (integer_zerop (op2)
9bdae6af
KH
13376 && truth_value_p (TREE_CODE (arg0))
13377 && truth_value_p (TREE_CODE (arg1)))
db3927fb
AH
13378 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13379 fold_convert_loc (loc, type, arg0),
726ac11e 13380 arg1);
9bdae6af
KH
13381
13382 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
3ea2c264 13383 if (integer_onep (op2)
9bdae6af
KH
13384 && truth_value_p (TREE_CODE (arg0))
13385 && truth_value_p (TREE_CODE (arg1)))
13386 {
13387 /* Only perform transformation if ARG0 is easily inverted. */
db3927fb 13388 tem = fold_truth_not_expr (loc, arg0);
d817ed3b 13389 if (tem)
db3927fb
AH
13390 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13391 fold_convert_loc (loc, type, tem),
726ac11e 13392 arg1);
9bdae6af
KH
13393 }
13394
13395 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13396 if (integer_zerop (arg1)
13397 && truth_value_p (TREE_CODE (arg0))
3ea2c264 13398 && truth_value_p (TREE_CODE (op2)))
9bdae6af
KH
13399 {
13400 /* Only perform transformation if ARG0 is easily inverted. */
db3927fb 13401 tem = fold_truth_not_expr (loc, arg0);
d817ed3b 13402 if (tem)
db3927fb
AH
13403 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13404 fold_convert_loc (loc, type, tem),
726ac11e 13405 op2);
9bdae6af
KH
13406 }
13407
13408 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13409 if (integer_onep (arg1)
13410 && truth_value_p (TREE_CODE (arg0))
3ea2c264 13411 && truth_value_p (TREE_CODE (op2)))
db3927fb
AH
13412 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13413 fold_convert_loc (loc, type, arg0),
726ac11e 13414 op2);
9bdae6af 13415
62ab45cc 13416 return NULL_TREE;
9bdae6af
KH
13417
13418 case CALL_EXPR:
5039610b
SL
13419 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13420 of fold_ternary on them. */
13421 gcc_unreachable ();
9bdae6af 13422
dcd25113 13423 case BIT_FIELD_REF:
5773afc5
DN
13424 if ((TREE_CODE (arg0) == VECTOR_CST
13425 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
e55f42fb 13426 && type == TREE_TYPE (TREE_TYPE (arg0)))
dcd25113
JJ
13427 {
13428 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13429 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13430
13431 if (width != 0
13432 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13433 && (idx % width) == 0
13434 && (idx = idx / width)
13435 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13436 {
5773afc5
DN
13437 tree elements = NULL_TREE;
13438
13439 if (TREE_CODE (arg0) == VECTOR_CST)
13440 elements = TREE_VECTOR_CST_ELTS (arg0);
13441 else
13442 {
13443 unsigned HOST_WIDE_INT idx;
13444 tree value;
13445
13446 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13447 elements = tree_cons (NULL_TREE, value, elements);
13448 }
40182dbf 13449 while (idx-- > 0 && elements)
dcd25113 13450 elements = TREE_CHAIN (elements);
40182dbf
JJ
13451 if (elements)
13452 return TREE_VALUE (elements);
13453 else
db3927fb 13454 return fold_convert_loc (loc, type, integer_zero_node);
dcd25113
JJ
13455 }
13456 }
ee1f1270
RG
13457
13458 /* A bit-field-ref that referenced the full argument can be stripped. */
13459 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13460 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13461 && integer_zerop (op2))
db3927fb 13462 return fold_convert_loc (loc, type, arg0);
ee1f1270 13463
dcd25113
JJ
13464 return NULL_TREE;
13465
9bdae6af 13466 default:
62ab45cc 13467 return NULL_TREE;
9bdae6af
KH
13468 } /* switch (code) */
13469}
13470
6d716ca8
RS
13471/* Perform constant folding and related simplification of EXPR.
13472 The related simplifications include x*1 => x, x*0 => 0, etc.,
13473 and application of the associative law.
13474 NOP_EXPR conversions may be removed freely (as long as we
af5bdf6a 13475 are careful not to change the type of the overall expression).
6d716ca8
RS
13476 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13477 but we can constant-fold them if they have constant operands. */
13478
5dfa45d0
JJ
13479#ifdef ENABLE_FOLD_CHECKING
13480# define fold(x) fold_1 (x)
13481static tree fold_1 (tree);
13482static
13483#endif
6d716ca8 13484tree
fa8db1f7 13485fold (tree expr)
6d716ca8 13486{
ea993805 13487 const tree t = expr;
b3694847 13488 enum tree_code code = TREE_CODE (t);
6615c446 13489 enum tree_code_class kind = TREE_CODE_CLASS (code);
62ab45cc 13490 tree tem;
db3927fb 13491 location_t loc = EXPR_LOCATION (expr);
6de9cd9a 13492
1796dff4 13493 /* Return right away if a constant. */
6615c446 13494 if (kind == tcc_constant)
1796dff4 13495 return t;
b6cc0a72 13496
5039610b
SL
13497 /* CALL_EXPR-like objects with variable numbers of operands are
13498 treated specially. */
13499 if (kind == tcc_vl_exp)
13500 {
13501 if (code == CALL_EXPR)
13502 {
db3927fb 13503 tem = fold_call_expr (loc, expr, false);
5039610b
SL
13504 return tem ? tem : expr;
13505 }
13506 return expr;
13507 }
13508
726a989a 13509 if (IS_EXPR_CODE_CLASS (kind))
659d8efa 13510 {
fbaa905c 13511 tree type = TREE_TYPE (t);
7cf57259 13512 tree op0, op1, op2;
fbaa905c 13513
659d8efa
KH
13514 switch (TREE_CODE_LENGTH (code))
13515 {
13516 case 1:
fbaa905c 13517 op0 = TREE_OPERAND (t, 0);
db3927fb 13518 tem = fold_unary_loc (loc, code, type, op0);
62ab45cc 13519 return tem ? tem : expr;
0aee4751 13520 case 2:
fbaa905c
KH
13521 op0 = TREE_OPERAND (t, 0);
13522 op1 = TREE_OPERAND (t, 1);
db3927fb 13523 tem = fold_binary_loc (loc, code, type, op0, op1);
62ab45cc 13524 return tem ? tem : expr;
9bdae6af 13525 case 3:
7cf57259
KH
13526 op0 = TREE_OPERAND (t, 0);
13527 op1 = TREE_OPERAND (t, 1);
13528 op2 = TREE_OPERAND (t, 2);
db3927fb 13529 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
62ab45cc 13530 return tem ? tem : expr;
659d8efa
KH
13531 default:
13532 break;
13533 }
13534 }
13535
6d716ca8
RS
13536 switch (code)
13537 {
39fcde8f
EB
13538 case ARRAY_REF:
13539 {
13540 tree op0 = TREE_OPERAND (t, 0);
13541 tree op1 = TREE_OPERAND (t, 1);
13542
13543 if (TREE_CODE (op1) == INTEGER_CST
13544 && TREE_CODE (op0) == CONSTRUCTOR
13545 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13546 {
13547 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13548 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13549 unsigned HOST_WIDE_INT begin = 0;
13550
13551 /* Find a matching index by means of a binary search. */
13552 while (begin != end)
13553 {
13554 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13555 tree index = VEC_index (constructor_elt, elts, middle)->index;
13556
13557 if (TREE_CODE (index) == INTEGER_CST
13558 && tree_int_cst_lt (index, op1))
13559 begin = middle + 1;
13560 else if (TREE_CODE (index) == INTEGER_CST
13561 && tree_int_cst_lt (op1, index))
13562 end = middle;
13563 else if (TREE_CODE (index) == RANGE_EXPR
13564 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13565 begin = middle + 1;
13566 else if (TREE_CODE (index) == RANGE_EXPR
13567 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13568 end = middle;
13569 else
13570 return VEC_index (constructor_elt, elts, middle)->value;
13571 }
13572 }
13573
13574 return t;
13575 }
13576
6d716ca8
RS
13577 case CONST_DECL:
13578 return fold (DECL_INITIAL (t));
13579
6d716ca8
RS
13580 default:
13581 return t;
13582 } /* switch (code) */
13583}
39dfb55a 13584
5dfa45d0
JJ
13585#ifdef ENABLE_FOLD_CHECKING
13586#undef fold
13587
ac545c64
KG
13588static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13589static void fold_check_failed (const_tree, const_tree);
13590void print_fold_checksum (const_tree);
5dfa45d0
JJ
13591
13592/* When --enable-checking=fold, compute a digest of expr before
13593 and after actual fold call to see if fold did not accidentally
13594 change original expr. */
13595
13596tree
13597fold (tree expr)
13598{
13599 tree ret;
13600 struct md5_ctx ctx;
13601 unsigned char checksum_before[16], checksum_after[16];
13602 htab_t ht;
13603
13604 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13605 md5_init_ctx (&ctx);
13606 fold_checksum_tree (expr, &ctx, ht);
13607 md5_finish_ctx (&ctx, checksum_before);
13608 htab_empty (ht);
13609
13610 ret = fold_1 (expr);
13611
13612 md5_init_ctx (&ctx);
13613 fold_checksum_tree (expr, &ctx, ht);
13614 md5_finish_ctx (&ctx, checksum_after);
13615 htab_delete (ht);
13616
13617 if (memcmp (checksum_before, checksum_after, 16))
13618 fold_check_failed (expr, ret);
13619
13620 return ret;
13621}
13622
13623void
ac545c64 13624print_fold_checksum (const_tree expr)
5dfa45d0
JJ
13625{
13626 struct md5_ctx ctx;
13627 unsigned char checksum[16], cnt;
13628 htab_t ht;
13629
13630 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13631 md5_init_ctx (&ctx);
13632 fold_checksum_tree (expr, &ctx, ht);
13633 md5_finish_ctx (&ctx, checksum);
13634 htab_delete (ht);
13635 for (cnt = 0; cnt < 16; ++cnt)
13636 fprintf (stderr, "%02x", checksum[cnt]);
13637 putc ('\n', stderr);
13638}
13639
13640static void
ac545c64 13641fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
5dfa45d0
JJ
13642{
13643 internal_error ("fold check: original tree changed by fold");
13644}
13645
13646static void
ac545c64 13647fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
5dfa45d0 13648{
ac545c64 13649 const void **slot;
5dfa45d0 13650 enum tree_code code;
ea6dafb0 13651 union tree_node buf;
5dfa45d0 13652 int i, len;
b8698a0f 13653
d763bb10 13654recursive_label:
5dfa45d0 13655
0bccc606 13656 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
46c5394b
DB
13657 <= sizeof (struct tree_function_decl))
13658 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
5dfa45d0
JJ
13659 if (expr == NULL)
13660 return;
ac545c64 13661 slot = (const void **) htab_find_slot (ht, expr, INSERT);
5dfa45d0
JJ
13662 if (*slot != NULL)
13663 return;
13664 *slot = expr;
13665 code = TREE_CODE (expr);
6615c446
JO
13666 if (TREE_CODE_CLASS (code) == tcc_declaration
13667 && DECL_ASSEMBLER_NAME_SET_P (expr))
5dfa45d0
JJ
13668 {
13669 /* Allow DECL_ASSEMBLER_NAME to be modified. */
3f7f53c7 13670 memcpy ((char *) &buf, expr, tree_size (expr));
ac545c64 13671 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
3f7f53c7 13672 expr = (tree) &buf;
5dfa45d0 13673 }
6615c446 13674 else if (TREE_CODE_CLASS (code) == tcc_type
5cf96841
JJ
13675 && (TYPE_POINTER_TO (expr)
13676 || TYPE_REFERENCE_TO (expr)
d763bb10 13677 || TYPE_CACHED_VALUES_P (expr)
5cf96841
JJ
13678 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13679 || TYPE_NEXT_VARIANT (expr)))
5dfa45d0 13680 {
b9193259 13681 /* Allow these fields to be modified. */
ac545c64 13682 tree tmp;
3f7f53c7 13683 memcpy ((char *) &buf, expr, tree_size (expr));
ac545c64
KG
13684 expr = tmp = (tree) &buf;
13685 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13686 TYPE_POINTER_TO (tmp) = NULL;
13687 TYPE_REFERENCE_TO (tmp) = NULL;
5cf96841 13688 TYPE_NEXT_VARIANT (tmp) = NULL;
ac545c64 13689 if (TYPE_CACHED_VALUES_P (tmp))
0ebfd2c9 13690 {
ac545c64
KG
13691 TYPE_CACHED_VALUES_P (tmp) = 0;
13692 TYPE_CACHED_VALUES (tmp) = NULL;
0ebfd2c9 13693 }
5dfa45d0
JJ
13694 }
13695 md5_process_bytes (expr, tree_size (expr), ctx);
13696 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
6615c446 13697 if (TREE_CODE_CLASS (code) != tcc_type
d763bb10 13698 && TREE_CODE_CLASS (code) != tcc_declaration
70826cbb
SP
13699 && code != TREE_LIST
13700 && code != SSA_NAME)
5dfa45d0 13701 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
5dfa45d0
JJ
13702 switch (TREE_CODE_CLASS (code))
13703 {
6615c446 13704 case tcc_constant:
5dfa45d0
JJ
13705 switch (code)
13706 {
13707 case STRING_CST:
13708 md5_process_bytes (TREE_STRING_POINTER (expr),
13709 TREE_STRING_LENGTH (expr), ctx);
13710 break;
13711 case COMPLEX_CST:
13712 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13713 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13714 break;
13715 case VECTOR_CST:
13716 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13717 break;
13718 default:
13719 break;
13720 }
13721 break;
6615c446 13722 case tcc_exceptional:
5dfa45d0
JJ
13723 switch (code)
13724 {
13725 case TREE_LIST:
13726 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13727 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
d763bb10
AP
13728 expr = TREE_CHAIN (expr);
13729 goto recursive_label;
5dfa45d0
JJ
13730 break;
13731 case TREE_VEC:
13732 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13733 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13734 break;
13735 default:
13736 break;
13737 }
13738 break;
6615c446
JO
13739 case tcc_expression:
13740 case tcc_reference:
13741 case tcc_comparison:
13742 case tcc_unary:
13743 case tcc_binary:
13744 case tcc_statement:
5039610b
SL
13745 case tcc_vl_exp:
13746 len = TREE_OPERAND_LENGTH (expr);
5dfa45d0
JJ
13747 for (i = 0; i < len; ++i)
13748 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13749 break;
6615c446 13750 case tcc_declaration:
5dfa45d0
JJ
13751 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13752 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
3eb04608
DB
13753 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13754 {
13755 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13756 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13757 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13758 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13759 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13760 }
46c5394b
DB
13761 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13762 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
b8698a0f 13763
46c5394b
DB
13764 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13765 {
13766 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13767 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13768 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13769 }
5dfa45d0 13770 break;
6615c446 13771 case tcc_type:
a40de696
AP
13772 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13773 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
5dfa45d0
JJ
13774 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13775 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13776 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13777 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
a40de696
AP
13778 if (INTEGRAL_TYPE_P (expr)
13779 || SCALAR_FLOAT_TYPE_P (expr))
13780 {
13781 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13782 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13783 }
5dfa45d0 13784 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
b9193259
DJ
13785 if (TREE_CODE (expr) == RECORD_TYPE
13786 || TREE_CODE (expr) == UNION_TYPE
13787 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13788 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
5dfa45d0
JJ
13789 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13790 break;
13791 default:
13792 break;
13793 }
13794}
13795
f1b42630
AN
13796/* Helper function for outputting the checksum of a tree T. When
13797 debugging with gdb, you can "define mynext" to be "next" followed
13798 by "call debug_fold_checksum (op0)", then just trace down till the
13799 outputs differ. */
13800
24e47c76 13801DEBUG_FUNCTION void
ac545c64 13802debug_fold_checksum (const_tree t)
f1b42630
AN
13803{
13804 int i;
13805 unsigned char checksum[16];
13806 struct md5_ctx ctx;
13807 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
b8698a0f 13808
f1b42630
AN
13809 md5_init_ctx (&ctx);
13810 fold_checksum_tree (t, &ctx, ht);
13811 md5_finish_ctx (&ctx, checksum);
13812 htab_empty (ht);
13813
13814 for (i = 0; i < 16; i++)
13815 fprintf (stderr, "%d ", checksum[i]);
13816
13817 fprintf (stderr, "\n");
13818}
13819
5dfa45d0
JJ
13820#endif
13821
ba199a53 13822/* Fold a unary tree expression with code CODE of type TYPE with an
db3927fb
AH
13823 operand OP0. LOC is the location of the resulting expression.
13824 Return a folded expression if successful. Otherwise, return a tree
13825 expression with code CODE of type TYPE with an operand OP0. */
ba199a53
KH
13826
13827tree
db3927fb
AH
13828fold_build1_stat_loc (location_t loc,
13829 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
ba199a53 13830{
e2fe73f6
AP
13831 tree tem;
13832#ifdef ENABLE_FOLD_CHECKING
13833 unsigned char checksum_before[16], checksum_after[16];
13834 struct md5_ctx ctx;
13835 htab_t ht;
13836
13837 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13838 md5_init_ctx (&ctx);
13839 fold_checksum_tree (op0, &ctx, ht);
13840 md5_finish_ctx (&ctx, checksum_before);
13841 htab_empty (ht);
13842#endif
b8698a0f 13843
db3927fb 13844 tem = fold_unary_loc (loc, code, type, op0);
e2fe73f6 13845 if (!tem)
db3927fb
AH
13846 {
13847 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13848 SET_EXPR_LOCATION (tem, loc);
13849 }
b8698a0f 13850
e2fe73f6
AP
13851#ifdef ENABLE_FOLD_CHECKING
13852 md5_init_ctx (&ctx);
13853 fold_checksum_tree (op0, &ctx, ht);
13854 md5_finish_ctx (&ctx, checksum_after);
13855 htab_delete (ht);
ba199a53 13856
e2fe73f6
AP
13857 if (memcmp (checksum_before, checksum_after, 16))
13858 fold_check_failed (op0, tem);
13859#endif
13860 return tem;
ba199a53
KH
13861}
13862
13863/* Fold a binary tree expression with code CODE of type TYPE with
db3927fb
AH
13864 operands OP0 and OP1. LOC is the location of the resulting
13865 expression. Return a folded expression if successful. Otherwise,
13866 return a tree expression with code CODE of type TYPE with operands
13867 OP0 and OP1. */
ba199a53
KH
13868
13869tree
db3927fb
AH
13870fold_build2_stat_loc (location_t loc,
13871 enum tree_code code, tree type, tree op0, tree op1
13872 MEM_STAT_DECL)
ba199a53 13873{
e2fe73f6
AP
13874 tree tem;
13875#ifdef ENABLE_FOLD_CHECKING
13876 unsigned char checksum_before_op0[16],
13877 checksum_before_op1[16],
13878 checksum_after_op0[16],
13879 checksum_after_op1[16];
13880 struct md5_ctx ctx;
13881 htab_t ht;
13882
13883 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13884 md5_init_ctx (&ctx);
13885 fold_checksum_tree (op0, &ctx, ht);
13886 md5_finish_ctx (&ctx, checksum_before_op0);
13887 htab_empty (ht);
13888
13889 md5_init_ctx (&ctx);
13890 fold_checksum_tree (op1, &ctx, ht);
13891 md5_finish_ctx (&ctx, checksum_before_op1);
13892 htab_empty (ht);
13893#endif
13894
db3927fb 13895 tem = fold_binary_loc (loc, code, type, op0, op1);
e2fe73f6 13896 if (!tem)
db3927fb
AH
13897 {
13898 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13899 SET_EXPR_LOCATION (tem, loc);
13900 }
b8698a0f 13901
e2fe73f6
AP
13902#ifdef ENABLE_FOLD_CHECKING
13903 md5_init_ctx (&ctx);
13904 fold_checksum_tree (op0, &ctx, ht);
13905 md5_finish_ctx (&ctx, checksum_after_op0);
13906 htab_empty (ht);
13907
13908 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13909 fold_check_failed (op0, tem);
b8698a0f 13910
e2fe73f6
AP
13911 md5_init_ctx (&ctx);
13912 fold_checksum_tree (op1, &ctx, ht);
13913 md5_finish_ctx (&ctx, checksum_after_op1);
13914 htab_delete (ht);
ba199a53 13915
e2fe73f6
AP
13916 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13917 fold_check_failed (op1, tem);
13918#endif
13919 return tem;
ba199a53
KH
13920}
13921
13922/* Fold a ternary tree expression with code CODE of type TYPE with
830113fd 13923 operands OP0, OP1, and OP2. Return a folded expression if
ba199a53
KH
13924 successful. Otherwise, return a tree expression with code CODE of
13925 type TYPE with operands OP0, OP1, and OP2. */
13926
13927tree
db3927fb
AH
13928fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
13929 tree op0, tree op1, tree op2 MEM_STAT_DECL)
5808968e
AP
13930{
13931 tree tem;
e2fe73f6
AP
13932#ifdef ENABLE_FOLD_CHECKING
13933 unsigned char checksum_before_op0[16],
13934 checksum_before_op1[16],
13935 checksum_before_op2[16],
13936 checksum_after_op0[16],
13937 checksum_after_op1[16],
13938 checksum_after_op2[16];
13939 struct md5_ctx ctx;
13940 htab_t ht;
13941
13942 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13943 md5_init_ctx (&ctx);
13944 fold_checksum_tree (op0, &ctx, ht);
13945 md5_finish_ctx (&ctx, checksum_before_op0);
13946 htab_empty (ht);
ba199a53 13947
e2fe73f6
AP
13948 md5_init_ctx (&ctx);
13949 fold_checksum_tree (op1, &ctx, ht);
13950 md5_finish_ctx (&ctx, checksum_before_op1);
13951 htab_empty (ht);
13952
13953 md5_init_ctx (&ctx);
13954 fold_checksum_tree (op2, &ctx, ht);
13955 md5_finish_ctx (&ctx, checksum_before_op2);
13956 htab_empty (ht);
13957#endif
5039610b
SL
13958
13959 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
db3927fb 13960 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
e2fe73f6 13961 if (!tem)
db3927fb
AH
13962 {
13963 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13964 SET_EXPR_LOCATION (tem, loc);
13965 }
b8698a0f 13966
e2fe73f6
AP
13967#ifdef ENABLE_FOLD_CHECKING
13968 md5_init_ctx (&ctx);
13969 fold_checksum_tree (op0, &ctx, ht);
13970 md5_finish_ctx (&ctx, checksum_after_op0);
13971 htab_empty (ht);
13972
13973 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13974 fold_check_failed (op0, tem);
b8698a0f 13975
e2fe73f6
AP
13976 md5_init_ctx (&ctx);
13977 fold_checksum_tree (op1, &ctx, ht);
13978 md5_finish_ctx (&ctx, checksum_after_op1);
13979 htab_empty (ht);
13980
13981 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13982 fold_check_failed (op1, tem);
b8698a0f 13983
e2fe73f6
AP
13984 md5_init_ctx (&ctx);
13985 fold_checksum_tree (op2, &ctx, ht);
13986 md5_finish_ctx (&ctx, checksum_after_op2);
13987 htab_delete (ht);
13988
13989 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13990 fold_check_failed (op2, tem);
13991#endif
13992 return tem;
ba199a53
KH
13993}
13994
94a0dd7b
SL
13995/* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13996 arguments in ARGARRAY, and a null static chain.
5039610b 13997 Return a folded expression if successful. Otherwise, return a CALL_EXPR
94a0dd7b 13998 of type TYPE from the given operands as constructed by build_call_array. */
5039610b
SL
13999
14000tree
db3927fb
AH
14001fold_build_call_array_loc (location_t loc, tree type, tree fn,
14002 int nargs, tree *argarray)
5039610b
SL
14003{
14004 tree tem;
14005#ifdef ENABLE_FOLD_CHECKING
14006 unsigned char checksum_before_fn[16],
14007 checksum_before_arglist[16],
14008 checksum_after_fn[16],
14009 checksum_after_arglist[16];
14010 struct md5_ctx ctx;
14011 htab_t ht;
94a0dd7b 14012 int i;
5039610b
SL
14013
14014 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14015 md5_init_ctx (&ctx);
14016 fold_checksum_tree (fn, &ctx, ht);
14017 md5_finish_ctx (&ctx, checksum_before_fn);
14018 htab_empty (ht);
14019
14020 md5_init_ctx (&ctx);
94a0dd7b
SL
14021 for (i = 0; i < nargs; i++)
14022 fold_checksum_tree (argarray[i], &ctx, ht);
5039610b
SL
14023 md5_finish_ctx (&ctx, checksum_before_arglist);
14024 htab_empty (ht);
14025#endif
14026
db3927fb 14027 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
b8698a0f 14028
5039610b
SL
14029#ifdef ENABLE_FOLD_CHECKING
14030 md5_init_ctx (&ctx);
14031 fold_checksum_tree (fn, &ctx, ht);
14032 md5_finish_ctx (&ctx, checksum_after_fn);
14033 htab_empty (ht);
14034
14035 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14036 fold_check_failed (fn, tem);
b8698a0f 14037
5039610b 14038 md5_init_ctx (&ctx);
94a0dd7b
SL
14039 for (i = 0; i < nargs; i++)
14040 fold_checksum_tree (argarray[i], &ctx, ht);
5039610b
SL
14041 md5_finish_ctx (&ctx, checksum_after_arglist);
14042 htab_delete (ht);
14043
14044 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
94a0dd7b 14045 fold_check_failed (NULL_TREE, tem);
5039610b
SL
14046#endif
14047 return tem;
14048}
14049
a98ebe2e 14050/* Perform constant folding and related simplification of initializer
00d1b1d6 14051 expression EXPR. These behave identically to "fold_buildN" but ignore
3e4093b6
RS
14052 potential run-time traps and exceptions that fold must preserve. */
14053
00d1b1d6
JM
14054#define START_FOLD_INIT \
14055 int saved_signaling_nans = flag_signaling_nans;\
14056 int saved_trapping_math = flag_trapping_math;\
14057 int saved_rounding_math = flag_rounding_math;\
14058 int saved_trapv = flag_trapv;\
63b48197 14059 int saved_folding_initializer = folding_initializer;\
00d1b1d6
JM
14060 flag_signaling_nans = 0;\
14061 flag_trapping_math = 0;\
14062 flag_rounding_math = 0;\
63b48197
MS
14063 flag_trapv = 0;\
14064 folding_initializer = 1;
00d1b1d6
JM
14065
14066#define END_FOLD_INIT \
14067 flag_signaling_nans = saved_signaling_nans;\
14068 flag_trapping_math = saved_trapping_math;\
14069 flag_rounding_math = saved_rounding_math;\
63b48197
MS
14070 flag_trapv = saved_trapv;\
14071 folding_initializer = saved_folding_initializer;
00d1b1d6
JM
14072
14073tree
db3927fb
AH
14074fold_build1_initializer_loc (location_t loc, enum tree_code code,
14075 tree type, tree op)
00d1b1d6
JM
14076{
14077 tree result;
14078 START_FOLD_INIT;
14079
db3927fb 14080 result = fold_build1_loc (loc, code, type, op);
00d1b1d6
JM
14081
14082 END_FOLD_INIT;
14083 return result;
14084}
14085
3e4093b6 14086tree
db3927fb
AH
14087fold_build2_initializer_loc (location_t loc, enum tree_code code,
14088 tree type, tree op0, tree op1)
3e4093b6 14089{
3e4093b6 14090 tree result;
00d1b1d6
JM
14091 START_FOLD_INIT;
14092
db3927fb 14093 result = fold_build2_loc (loc, code, type, op0, op1);
3e4093b6 14094
00d1b1d6
JM
14095 END_FOLD_INIT;
14096 return result;
14097}
3e4093b6 14098
00d1b1d6 14099tree
db3927fb
AH
14100fold_build3_initializer_loc (location_t loc, enum tree_code code,
14101 tree type, tree op0, tree op1, tree op2)
00d1b1d6
JM
14102{
14103 tree result;
14104 START_FOLD_INIT;
3e4093b6 14105
db3927fb 14106 result = fold_build3_loc (loc, code, type, op0, op1, op2);
3e4093b6 14107
00d1b1d6 14108 END_FOLD_INIT;
3e4093b6
RS
14109 return result;
14110}
14111
5039610b 14112tree
db3927fb
AH
14113fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14114 int nargs, tree *argarray)
5039610b
SL
14115{
14116 tree result;
14117 START_FOLD_INIT;
14118
db3927fb 14119 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
5039610b
SL
14120
14121 END_FOLD_INIT;
14122 return result;
14123}
14124
00d1b1d6
JM
14125#undef START_FOLD_INIT
14126#undef END_FOLD_INIT
14127
c5c76735
JL
14128/* Determine if first argument is a multiple of second argument. Return 0 if
14129 it is not, or we cannot easily determined it to be.
39dfb55a 14130
c5c76735
JL
14131 An example of the sort of thing we care about (at this point; this routine
14132 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14133 fold cases do now) is discovering that
39dfb55a
JL
14134
14135 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14136
14137 is a multiple of
14138
14139 SAVE_EXPR (J * 8)
14140
c5c76735 14141 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
39dfb55a
JL
14142
14143 This code also handles discovering that
14144
14145 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14146
c5c76735 14147 is a multiple of 8 so we don't have to worry about dealing with a
39dfb55a
JL
14148 possible remainder.
14149
c5c76735
JL
14150 Note that we *look* inside a SAVE_EXPR only to determine how it was
14151 calculated; it is not safe for fold to do much of anything else with the
14152 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14153 at run time. For example, the latter example above *cannot* be implemented
14154 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14155 evaluation time of the original SAVE_EXPR is not necessarily the same at
14156 the time the new expression is evaluated. The only optimization of this
39dfb55a
JL
14157 sort that would be valid is changing
14158
14159 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
39dfb55a 14160
c5c76735 14161 divided by 8 to
39dfb55a
JL
14162
14163 SAVE_EXPR (I) * SAVE_EXPR (J)
14164
14165 (where the same SAVE_EXPR (J) is used in the original and the
14166 transformed version). */
14167
d4e70294 14168int
ac545c64 14169multiple_of_p (tree type, const_tree top, const_tree bottom)
39dfb55a
JL
14170{
14171 if (operand_equal_p (top, bottom, 0))
14172 return 1;
14173
14174 if (TREE_CODE (type) != INTEGER_TYPE)
14175 return 0;
14176
14177 switch (TREE_CODE (top))
14178 {
29317008
RH
14179 case BIT_AND_EXPR:
14180 /* Bitwise and provides a power of two multiple. If the mask is
14181 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14182 if (!integer_pow2p (bottom))
14183 return 0;
14184 /* FALLTHRU */
14185
39dfb55a
JL
14186 case MULT_EXPR:
14187 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14188 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14189
14190 case PLUS_EXPR:
14191 case MINUS_EXPR:
14192 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14193 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14194
fba2c0cd
JJ
14195 case LSHIFT_EXPR:
14196 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14197 {
14198 tree op1, t1;
14199
14200 op1 = TREE_OPERAND (top, 1);
14201 /* const_binop may not detect overflow correctly,
14202 so check for it explicitly here. */
14203 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14204 > TREE_INT_CST_LOW (op1)
14205 && TREE_INT_CST_HIGH (op1) == 0
088414c1
RS
14206 && 0 != (t1 = fold_convert (type,
14207 const_binop (LSHIFT_EXPR,
14208 size_one_node,
43a5d30b 14209 op1)))
455f14dd 14210 && !TREE_OVERFLOW (t1))
fba2c0cd
JJ
14211 return multiple_of_p (type, t1, bottom);
14212 }
14213 return 0;
14214
39dfb55a 14215 case NOP_EXPR:
c5c76735 14216 /* Can't handle conversions from non-integral or wider integral type. */
39dfb55a
JL
14217 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14218 || (TYPE_PRECISION (type)
14219 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14220 return 0;
c5c76735 14221
30f7a378 14222 /* .. fall through ... */
c5c76735 14223
39dfb55a
JL
14224 case SAVE_EXPR:
14225 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14226
9e9ef331
EB
14227 case COND_EXPR:
14228 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14229 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14230
39dfb55a 14231 case INTEGER_CST:
fba2c0cd 14232 if (TREE_CODE (bottom) != INTEGER_CST
81737468 14233 || integer_zerop (bottom)
8df83eae 14234 || (TYPE_UNSIGNED (type)
fba2c0cd
JJ
14235 && (tree_int_cst_sgn (top) < 0
14236 || tree_int_cst_sgn (bottom) < 0)))
39dfb55a 14237 return 0;
b73a6056
RS
14238 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14239 top, bottom, 0));
39dfb55a
JL
14240
14241 default:
14242 return 0;
14243 }
14244}
a36556a8 14245
e918a58a
RAE
14246/* Return true if CODE or TYPE is known to be non-negative. */
14247
14248static bool
14249tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14250{
14251 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14252 && truth_value_p (code))
14253 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14254 have a signed:1 type (where the value is -1 and 0). */
14255 return true;
14256 return false;
14257}
14258
14259/* Return true if (CODE OP0) is known to be non-negative. If the return
6ac01510
ILT
14260 value is based on the assumption that signed overflow is undefined,
14261 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14262 *STRICT_OVERFLOW_P. */
a36556a8 14263
2d3cd5d5 14264bool
e918a58a
RAE
14265tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14266 bool *strict_overflow_p)
a36556a8 14267{
e918a58a 14268 if (TYPE_UNSIGNED (type))
682d0395 14269 return true;
b49ceb45 14270
e918a58a 14271 switch (code)
a36556a8 14272 {
88e3805d 14273 case ABS_EXPR:
1ade5842
JM
14274 /* We can't return 1 if flag_wrapv is set because
14275 ABS_EXPR<INT_MIN> = INT_MIN. */
e918a58a 14276 if (!INTEGRAL_TYPE_P (type))
eeef0e45 14277 return true;
e918a58a 14278 if (TYPE_OVERFLOW_UNDEFINED (type))
6ac01510
ILT
14279 {
14280 *strict_overflow_p = true;
14281 return true;
14282 }
1ade5842 14283 break;
7dba8395 14284
e918a58a
RAE
14285 case NON_LVALUE_EXPR:
14286 case FLOAT_EXPR:
14287 case FIX_TRUNC_EXPR:
14288 return tree_expr_nonnegative_warnv_p (op0,
14289 strict_overflow_p);
f7df23be 14290
e918a58a
RAE
14291 case NOP_EXPR:
14292 {
14293 tree inner_type = TREE_TYPE (op0);
14294 tree outer_type = type;
f7df23be 14295
e918a58a
RAE
14296 if (TREE_CODE (outer_type) == REAL_TYPE)
14297 {
14298 if (TREE_CODE (inner_type) == REAL_TYPE)
14299 return tree_expr_nonnegative_warnv_p (op0,
14300 strict_overflow_p);
14301 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14302 {
14303 if (TYPE_UNSIGNED (inner_type))
14304 return true;
14305 return tree_expr_nonnegative_warnv_p (op0,
14306 strict_overflow_p);
14307 }
14308 }
14309 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14310 {
14311 if (TREE_CODE (inner_type) == REAL_TYPE)
14312 return tree_expr_nonnegative_warnv_p (op0,
14313 strict_overflow_p);
14314 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14315 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14316 && TYPE_UNSIGNED (inner_type);
14317 }
14318 }
14319 break;
14320
14321 default:
14322 return tree_simple_nonnegative_warnv_p (code, type);
14323 }
14324
14325 /* We don't know sign of `t', so be conservative and return false. */
14326 return false;
14327}
325217ed 14328
e918a58a
RAE
14329/* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14330 value is based on the assumption that signed overflow is undefined,
14331 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14332 *STRICT_OVERFLOW_P. */
14333
2d3cd5d5 14334bool
e918a58a
RAE
14335tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14336 tree op1, bool *strict_overflow_p)
14337{
14338 if (TYPE_UNSIGNED (type))
14339 return true;
14340
14341 switch (code)
14342 {
5be014d5 14343 case POINTER_PLUS_EXPR:
f7df23be 14344 case PLUS_EXPR:
e918a58a
RAE
14345 if (FLOAT_TYPE_P (type))
14346 return (tree_expr_nonnegative_warnv_p (op0,
6ac01510 14347 strict_overflow_p)
e918a58a 14348 && tree_expr_nonnegative_warnv_p (op1,
6ac01510 14349 strict_overflow_p));
96f26e41 14350
e15bb5c6 14351 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
e2cca9be 14352 both unsigned and at least 2 bits shorter than the result. */
e918a58a
RAE
14353 if (TREE_CODE (type) == INTEGER_TYPE
14354 && TREE_CODE (op0) == NOP_EXPR
14355 && TREE_CODE (op1) == NOP_EXPR)
96f26e41 14356 {
e918a58a
RAE
14357 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14358 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
8df83eae
RK
14359 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14360 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
96f26e41
RS
14361 {
14362 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14363 TYPE_PRECISION (inner2)) + 1;
e918a58a 14364 return prec < TYPE_PRECISION (type);
96f26e41
RS
14365 }
14366 }
14367 break;
f7df23be
RS
14368
14369 case MULT_EXPR:
e918a58a 14370 if (FLOAT_TYPE_P (type))
f7df23be
RS
14371 {
14372 /* x * x for floating point x is always non-negative. */
e918a58a 14373 if (operand_equal_p (op0, op1, 0))
682d0395 14374 return true;
e918a58a 14375 return (tree_expr_nonnegative_warnv_p (op0,
6ac01510 14376 strict_overflow_p)
e918a58a 14377 && tree_expr_nonnegative_warnv_p (op1,
6ac01510 14378 strict_overflow_p));
f7df23be 14379 }
96f26e41 14380
e15bb5c6 14381 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
96f26e41 14382 both unsigned and their total bits is shorter than the result. */
e918a58a 14383 if (TREE_CODE (type) == INTEGER_TYPE
cdd6a337
MLI
14384 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14385 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
96f26e41 14386 {
b8698a0f 14387 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
cdd6a337
MLI
14388 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14389 : TREE_TYPE (op0);
b8698a0f 14390 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
cdd6a337
MLI
14391 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14392 : TREE_TYPE (op1);
14393
14394 bool unsigned0 = TYPE_UNSIGNED (inner0);
14395 bool unsigned1 = TYPE_UNSIGNED (inner1);
14396
14397 if (TREE_CODE (op0) == INTEGER_CST)
14398 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14399
14400 if (TREE_CODE (op1) == INTEGER_CST)
14401 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14402
14403 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14404 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14405 {
14406 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14407 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14408 : TYPE_PRECISION (inner0);
14409
14410 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14411 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14412 : TYPE_PRECISION (inner1);
14413
14414 return precision0 + precision1 < TYPE_PRECISION (type);
14415 }
96f26e41 14416 }
682d0395 14417 return false;
f7df23be 14418
196f5a8d
VR
14419 case BIT_AND_EXPR:
14420 case MAX_EXPR:
e918a58a 14421 return (tree_expr_nonnegative_warnv_p (op0,
6ac01510 14422 strict_overflow_p)
e918a58a 14423 || tree_expr_nonnegative_warnv_p (op1,
6ac01510 14424 strict_overflow_p));
196f5a8d
VR
14425
14426 case BIT_IOR_EXPR:
14427 case BIT_XOR_EXPR:
14428 case MIN_EXPR:
14429 case RDIV_EXPR:
ada11335
KG
14430 case TRUNC_DIV_EXPR:
14431 case CEIL_DIV_EXPR:
14432 case FLOOR_DIV_EXPR:
14433 case ROUND_DIV_EXPR:
e918a58a 14434 return (tree_expr_nonnegative_warnv_p (op0,
6ac01510 14435 strict_overflow_p)
e918a58a 14436 && tree_expr_nonnegative_warnv_p (op1,
6ac01510 14437 strict_overflow_p));
96f26e41 14438
ada11335
KG
14439 case TRUNC_MOD_EXPR:
14440 case CEIL_MOD_EXPR:
14441 case FLOOR_MOD_EXPR:
14442 case ROUND_MOD_EXPR:
e918a58a 14443 return tree_expr_nonnegative_warnv_p (op0,
6ac01510 14444 strict_overflow_p);
e918a58a
RAE
14445 default:
14446 return tree_simple_nonnegative_warnv_p (code, type);
14447 }
96f26e41 14448
e918a58a
RAE
14449 /* We don't know sign of `t', so be conservative and return false. */
14450 return false;
14451}
96f26e41 14452
e918a58a
RAE
14453/* Return true if T is known to be non-negative. If the return
14454 value is based on the assumption that signed overflow is undefined,
14455 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14456 *STRICT_OVERFLOW_P. */
14457
2d3cd5d5 14458bool
e918a58a
RAE
14459tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14460{
14461 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14462 return true;
14463
07c40d0b 14464 switch (TREE_CODE (t))
e918a58a 14465 {
e918a58a
RAE
14466 case INTEGER_CST:
14467 return tree_int_cst_sgn (t) >= 0;
14468
14469 case REAL_CST:
14470 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14471
14472 case FIXED_CST:
14473 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
196f5a8d
VR
14474
14475 case COND_EXPR:
6ac01510
ILT
14476 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14477 strict_overflow_p)
14478 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14479 strict_overflow_p));
e918a58a
RAE
14480 default:
14481 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14482 TREE_TYPE (t));
14483 }
14484 /* We don't know sign of `t', so be conservative and return false. */
14485 return false;
14486}
b1500d00 14487
a1a6e271
RAE
14488/* Return true if T is known to be non-negative. If the return
14489 value is based on the assumption that signed overflow is undefined,
14490 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14491 *STRICT_OVERFLOW_P. */
14492
14493bool
726a989a 14494tree_call_nonnegative_warnv_p (tree type, tree fndecl,
a1a6e271
RAE
14495 tree arg0, tree arg1, bool *strict_overflow_p)
14496{
14497 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14498 switch (DECL_FUNCTION_CODE (fndecl))
14499 {
14500 CASE_FLT_FN (BUILT_IN_ACOS):
14501 CASE_FLT_FN (BUILT_IN_ACOSH):
14502 CASE_FLT_FN (BUILT_IN_CABS):
14503 CASE_FLT_FN (BUILT_IN_COSH):
14504 CASE_FLT_FN (BUILT_IN_ERFC):
14505 CASE_FLT_FN (BUILT_IN_EXP):
14506 CASE_FLT_FN (BUILT_IN_EXP10):
14507 CASE_FLT_FN (BUILT_IN_EXP2):
14508 CASE_FLT_FN (BUILT_IN_FABS):
14509 CASE_FLT_FN (BUILT_IN_FDIM):
14510 CASE_FLT_FN (BUILT_IN_HYPOT):
14511 CASE_FLT_FN (BUILT_IN_POW10):
14512 CASE_INT_FN (BUILT_IN_FFS):
14513 CASE_INT_FN (BUILT_IN_PARITY):
14514 CASE_INT_FN (BUILT_IN_POPCOUNT):
14515 case BUILT_IN_BSWAP32:
14516 case BUILT_IN_BSWAP64:
14517 /* Always true. */
14518 return true;
14519
14520 CASE_FLT_FN (BUILT_IN_SQRT):
14521 /* sqrt(-0.0) is -0.0. */
14522 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14523 return true;
14524 return tree_expr_nonnegative_warnv_p (arg0,
14525 strict_overflow_p);
14526
14527 CASE_FLT_FN (BUILT_IN_ASINH):
14528 CASE_FLT_FN (BUILT_IN_ATAN):
14529 CASE_FLT_FN (BUILT_IN_ATANH):
14530 CASE_FLT_FN (BUILT_IN_CBRT):
14531 CASE_FLT_FN (BUILT_IN_CEIL):
14532 CASE_FLT_FN (BUILT_IN_ERF):
14533 CASE_FLT_FN (BUILT_IN_EXPM1):
14534 CASE_FLT_FN (BUILT_IN_FLOOR):
14535 CASE_FLT_FN (BUILT_IN_FMOD):
14536 CASE_FLT_FN (BUILT_IN_FREXP):
14537 CASE_FLT_FN (BUILT_IN_LCEIL):
14538 CASE_FLT_FN (BUILT_IN_LDEXP):
14539 CASE_FLT_FN (BUILT_IN_LFLOOR):
14540 CASE_FLT_FN (BUILT_IN_LLCEIL):
14541 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14542 CASE_FLT_FN (BUILT_IN_LLRINT):
14543 CASE_FLT_FN (BUILT_IN_LLROUND):
14544 CASE_FLT_FN (BUILT_IN_LRINT):
14545 CASE_FLT_FN (BUILT_IN_LROUND):
14546 CASE_FLT_FN (BUILT_IN_MODF):
14547 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14548 CASE_FLT_FN (BUILT_IN_RINT):
14549 CASE_FLT_FN (BUILT_IN_ROUND):
14550 CASE_FLT_FN (BUILT_IN_SCALB):
14551 CASE_FLT_FN (BUILT_IN_SCALBLN):
14552 CASE_FLT_FN (BUILT_IN_SCALBN):
14553 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14554 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14555 CASE_FLT_FN (BUILT_IN_SINH):
14556 CASE_FLT_FN (BUILT_IN_TANH):
14557 CASE_FLT_FN (BUILT_IN_TRUNC):
14558 /* True if the 1st argument is nonnegative. */
14559 return tree_expr_nonnegative_warnv_p (arg0,
14560 strict_overflow_p);
14561
14562 CASE_FLT_FN (BUILT_IN_FMAX):
14563 /* True if the 1st OR 2nd arguments are nonnegative. */
14564 return (tree_expr_nonnegative_warnv_p (arg0,
14565 strict_overflow_p)
14566 || (tree_expr_nonnegative_warnv_p (arg1,
14567 strict_overflow_p)));
14568
14569 CASE_FLT_FN (BUILT_IN_FMIN):
14570 /* True if the 1st AND 2nd arguments are nonnegative. */
14571 return (tree_expr_nonnegative_warnv_p (arg0,
14572 strict_overflow_p)
14573 && (tree_expr_nonnegative_warnv_p (arg1,
14574 strict_overflow_p)));
14575
14576 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14577 /* True if the 2nd argument is nonnegative. */
14578 return tree_expr_nonnegative_warnv_p (arg1,
14579 strict_overflow_p);
14580
14581 CASE_FLT_FN (BUILT_IN_POWI):
14582 /* True if the 1st argument is nonnegative or the second
14583 argument is an even integer. */
d0599470
RAE
14584 if (TREE_CODE (arg1) == INTEGER_CST
14585 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14586 return true;
a1a6e271
RAE
14587 return tree_expr_nonnegative_warnv_p (arg0,
14588 strict_overflow_p);
14589
14590 CASE_FLT_FN (BUILT_IN_POW):
14591 /* True if the 1st argument is nonnegative or the second
14592 argument is an even integer valued real. */
14593 if (TREE_CODE (arg1) == REAL_CST)
14594 {
14595 REAL_VALUE_TYPE c;
14596 HOST_WIDE_INT n;
14597
14598 c = TREE_REAL_CST (arg1);
14599 n = real_to_integer (&c);
14600 if ((n & 1) == 0)
14601 {
14602 REAL_VALUE_TYPE cint;
14603 real_from_integer (&cint, VOIDmode, n,
14604 n < 0 ? -1 : 0, 0);
14605 if (real_identical (&c, &cint))
14606 return true;
14607 }
14608 }
14609 return tree_expr_nonnegative_warnv_p (arg0,
14610 strict_overflow_p);
14611
14612 default:
14613 break;
14614 }
726a989a 14615 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
a1a6e271
RAE
14616 type);
14617}
14618
e918a58a
RAE
14619/* Return true if T is known to be non-negative. If the return
14620 value is based on the assumption that signed overflow is undefined,
14621 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14622 *STRICT_OVERFLOW_P. */
96f26e41 14623
2d3cd5d5 14624bool
e918a58a
RAE
14625tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14626{
07c40d0b 14627 enum tree_code code = TREE_CODE (t);
e918a58a
RAE
14628 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14629 return true;
96f26e41 14630
e918a58a
RAE
14631 switch (code)
14632 {
3a5b9284
RH
14633 case TARGET_EXPR:
14634 {
14635 tree temp = TARGET_EXPR_SLOT (t);
14636 t = TARGET_EXPR_INITIAL (t);
14637
14638 /* If the initializer is non-void, then it's a normal expression
14639 that will be assigned to the slot. */
14640 if (!VOID_TYPE_P (t))
6ac01510 14641 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
3a5b9284
RH
14642
14643 /* Otherwise, the initializer sets the slot in some way. One common
14644 way is an assignment statement at the end of the initializer. */
14645 while (1)
14646 {
14647 if (TREE_CODE (t) == BIND_EXPR)
14648 t = expr_last (BIND_EXPR_BODY (t));
14649 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14650 || TREE_CODE (t) == TRY_CATCH_EXPR)
14651 t = expr_last (TREE_OPERAND (t, 0));
14652 else if (TREE_CODE (t) == STATEMENT_LIST)
14653 t = expr_last (t);
14654 else
14655 break;
14656 }
726a989a
RB
14657 if (TREE_CODE (t) == MODIFY_EXPR
14658 && TREE_OPERAND (t, 0) == temp)
14659 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
6ac01510 14660 strict_overflow_p);
3a5b9284 14661
682d0395 14662 return false;
3a5b9284
RH
14663 }
14664
07bae5ad 14665 case CALL_EXPR:
2f503025 14666 {
a1a6e271
RAE
14667 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14668 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14669
726a989a 14670 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
a1a6e271
RAE
14671 get_callee_fndecl (t),
14672 arg0,
14673 arg1,
14674 strict_overflow_p);
2f503025 14675 }
e918a58a
RAE
14676 case COMPOUND_EXPR:
14677 case MODIFY_EXPR:
726a989a 14678 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
e918a58a
RAE
14679 strict_overflow_p);
14680 case BIND_EXPR:
14681 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14682 strict_overflow_p);
14683 case SAVE_EXPR:
14684 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14685 strict_overflow_p);
07bae5ad 14686
a36556a8 14687 default:
e918a58a
RAE
14688 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14689 TREE_TYPE (t));
a36556a8 14690 }
96f26e41
RS
14691
14692 /* We don't know sign of `t', so be conservative and return false. */
682d0395 14693 return false;
a36556a8
ZW
14694}
14695
e918a58a
RAE
14696/* Return true if T is known to be non-negative. If the return
14697 value is based on the assumption that signed overflow is undefined,
14698 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14699 *STRICT_OVERFLOW_P. */
14700
14701bool
14702tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14703{
14704 enum tree_code code;
14705 if (t == error_mark_node)
14706 return false;
14707
14708 code = TREE_CODE (t);
14709 switch (TREE_CODE_CLASS (code))
14710 {
14711 case tcc_binary:
14712 case tcc_comparison:
14713 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14714 TREE_TYPE (t),
14715 TREE_OPERAND (t, 0),
14716 TREE_OPERAND (t, 1),
14717 strict_overflow_p);
14718
14719 case tcc_unary:
14720 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14721 TREE_TYPE (t),
14722 TREE_OPERAND (t, 0),
14723 strict_overflow_p);
14724
14725 case tcc_constant:
14726 case tcc_declaration:
14727 case tcc_reference:
14728 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14729
14730 default:
14731 break;
14732 }
14733
14734 switch (code)
14735 {
14736 case TRUTH_AND_EXPR:
14737 case TRUTH_OR_EXPR:
14738 case TRUTH_XOR_EXPR:
14739 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14740 TREE_TYPE (t),
14741 TREE_OPERAND (t, 0),
14742 TREE_OPERAND (t, 1),
14743 strict_overflow_p);
14744 case TRUTH_NOT_EXPR:
14745 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14746 TREE_TYPE (t),
14747 TREE_OPERAND (t, 0),
14748 strict_overflow_p);
14749
14750 case COND_EXPR:
14751 case CONSTRUCTOR:
14752 case OBJ_TYPE_REF:
14753 case ASSERT_EXPR:
14754 case ADDR_EXPR:
14755 case WITH_SIZE_EXPR:
e918a58a 14756 case SSA_NAME:
e918a58a
RAE
14757 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14758
14759 default:
14760 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14761 }
14762}
14763
6ac01510
ILT
14764/* Return true if `t' is known to be non-negative. Handle warnings
14765 about undefined signed overflow. */
14766
14767bool
14768tree_expr_nonnegative_p (tree t)
14769{
14770 bool ret, strict_overflow_p;
14771
14772 strict_overflow_p = false;
14773 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14774 if (strict_overflow_p)
14775 fold_overflow_warning (("assuming signed overflow does not occur when "
14776 "determining that expression is always "
14777 "non-negative"),
14778 WARN_STRICT_OVERFLOW_MISC);
14779 return ret;
14780}
14781
74dd418c
RAE
14782
14783/* Return true when (CODE OP0) is an address and is known to be nonzero.
8e7b3a43 14784 For floating point we further ensure that T is not denormal.
6ac01510
ILT
14785 Similar logic is present in nonzero_address in rtlanal.h.
14786
14787 If the return value is based on the assumption that signed overflow
14788 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14789 change *STRICT_OVERFLOW_P. */
8e7b3a43 14790
2d3cd5d5 14791bool
74dd418c
RAE
14792tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14793 bool *strict_overflow_p)
8e7b3a43 14794{
74dd418c
RAE
14795 switch (code)
14796 {
14797 case ABS_EXPR:
14798 return tree_expr_nonzero_warnv_p (op0,
14799 strict_overflow_p);
8e7b3a43 14800
74dd418c
RAE
14801 case NOP_EXPR:
14802 {
14803 tree inner_type = TREE_TYPE (op0);
14804 tree outer_type = type;
8e7b3a43 14805
74dd418c
RAE
14806 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14807 && tree_expr_nonzero_warnv_p (op0,
14808 strict_overflow_p));
14809 }
14810 break;
b16caf72 14811
74dd418c
RAE
14812 case NON_LVALUE_EXPR:
14813 return tree_expr_nonzero_warnv_p (op0,
6ac01510 14814 strict_overflow_p);
8e7b3a43 14815
74dd418c
RAE
14816 default:
14817 break;
14818 }
14819
14820 return false;
14821}
14822
14823/* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14824 For floating point we further ensure that T is not denormal.
14825 Similar logic is present in nonzero_address in rtlanal.h.
14826
14827 If the return value is based on the assumption that signed overflow
14828 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14829 change *STRICT_OVERFLOW_P. */
8e7b3a43 14830
2d3cd5d5 14831bool
74dd418c
RAE
14832tree_binary_nonzero_warnv_p (enum tree_code code,
14833 tree type,
14834 tree op0,
14835 tree op1, bool *strict_overflow_p)
14836{
14837 bool sub_strict_overflow_p;
14838 switch (code)
14839 {
5be014d5 14840 case POINTER_PLUS_EXPR:
8e7b3a43 14841 case PLUS_EXPR:
eeef0e45 14842 if (TYPE_OVERFLOW_UNDEFINED (type))
8e7b3a43
KH
14843 {
14844 /* With the presence of negative values it is hard
14845 to say something. */
6ac01510 14846 sub_strict_overflow_p = false;
74dd418c 14847 if (!tree_expr_nonnegative_warnv_p (op0,
6ac01510 14848 &sub_strict_overflow_p)
74dd418c 14849 || !tree_expr_nonnegative_warnv_p (op1,
6ac01510 14850 &sub_strict_overflow_p))
8e7b3a43
KH
14851 return false;
14852 /* One of operands must be positive and the other non-negative. */
6ac01510
ILT
14853 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14854 overflows, on a twos-complement machine the sum of two
14855 nonnegative numbers can never be zero. */
74dd418c 14856 return (tree_expr_nonzero_warnv_p (op0,
6ac01510 14857 strict_overflow_p)
74dd418c 14858 || tree_expr_nonzero_warnv_p (op1,
6ac01510 14859 strict_overflow_p));
8e7b3a43
KH
14860 }
14861 break;
14862
14863 case MULT_EXPR:
eeef0e45 14864 if (TYPE_OVERFLOW_UNDEFINED (type))
8e7b3a43 14865 {
74dd418c 14866 if (tree_expr_nonzero_warnv_p (op0,
6ac01510 14867 strict_overflow_p)
74dd418c 14868 && tree_expr_nonzero_warnv_p (op1,
6ac01510
ILT
14869 strict_overflow_p))
14870 {
14871 *strict_overflow_p = true;
14872 return true;
14873 }
8e7b3a43
KH
14874 }
14875 break;
14876
74dd418c
RAE
14877 case MIN_EXPR:
14878 sub_strict_overflow_p = false;
14879 if (tree_expr_nonzero_warnv_p (op0,
14880 &sub_strict_overflow_p)
14881 && tree_expr_nonzero_warnv_p (op1,
14882 &sub_strict_overflow_p))
14883 {
14884 if (sub_strict_overflow_p)
14885 *strict_overflow_p = true;
14886 }
14887 break;
8e7b3a43 14888
74dd418c
RAE
14889 case MAX_EXPR:
14890 sub_strict_overflow_p = false;
14891 if (tree_expr_nonzero_warnv_p (op0,
14892 &sub_strict_overflow_p))
14893 {
14894 if (sub_strict_overflow_p)
14895 *strict_overflow_p = true;
14896
14897 /* When both operands are nonzero, then MAX must be too. */
14898 if (tree_expr_nonzero_warnv_p (op1,
14899 strict_overflow_p))
14900 return true;
14901
14902 /* MAX where operand 0 is positive is positive. */
14903 return tree_expr_nonnegative_warnv_p (op0,
14904 strict_overflow_p);
14905 }
14906 /* MAX where operand 1 is positive is positive. */
14907 else if (tree_expr_nonzero_warnv_p (op1,
14908 &sub_strict_overflow_p)
14909 && tree_expr_nonnegative_warnv_p (op1,
14910 &sub_strict_overflow_p))
14911 {
14912 if (sub_strict_overflow_p)
14913 *strict_overflow_p = true;
14914 return true;
14915 }
14916 break;
14917
14918 case BIT_IOR_EXPR:
14919 return (tree_expr_nonzero_warnv_p (op1,
14920 strict_overflow_p)
14921 || tree_expr_nonzero_warnv_p (op0,
14922 strict_overflow_p));
14923
14924 default:
8e7b3a43 14925 break;
74dd418c 14926 }
8e7b3a43 14927
74dd418c
RAE
14928 return false;
14929}
14930
14931/* Return true when T is an address and is known to be nonzero.
14932 For floating point we further ensure that T is not denormal.
14933 Similar logic is present in nonzero_address in rtlanal.h.
14934
14935 If the return value is based on the assumption that signed overflow
14936 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14937 change *STRICT_OVERFLOW_P. */
14938
2d3cd5d5 14939bool
74dd418c
RAE
14940tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14941{
14942 bool sub_strict_overflow_p;
14943 switch (TREE_CODE (t))
14944 {
74dd418c
RAE
14945 case INTEGER_CST:
14946 return !integer_zerop (t);
14947
14948 case ADDR_EXPR:
88f19756 14949 {
3d7a712a
RG
14950 tree base = TREE_OPERAND (t, 0);
14951 if (!DECL_P (base))
14952 base = get_base_address (base);
88f19756
RH
14953
14954 if (!base)
14955 return false;
14956
4d35e75c
PB
14957 /* Weak declarations may link to NULL. Other things may also be NULL
14958 so protect with -fdelete-null-pointer-checks; but not variables
14959 allocated on the stack. */
14960 if (DECL_P (base)
14961 && (flag_delete_null_pointer_checks
3d7a712a
RG
14962 || (DECL_CONTEXT (base)
14963 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
14964 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
b45f0e58 14965 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
88f19756
RH
14966
14967 /* Constants are never weak. */
6615c446 14968 if (CONSTANT_CLASS_P (base))
88f19756
RH
14969 return true;
14970
14971 return false;
14972 }
8e7b3a43
KH
14973
14974 case COND_EXPR:
6ac01510
ILT
14975 sub_strict_overflow_p = false;
14976 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14977 &sub_strict_overflow_p)
14978 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14979 &sub_strict_overflow_p))
14980 {
14981 if (sub_strict_overflow_p)
14982 *strict_overflow_p = true;
14983 return true;
14984 }
14985 break;
8e7b3a43 14986
74dd418c 14987 default:
6ac01510 14988 break;
74dd418c
RAE
14989 }
14990 return false;
14991}
8e7b3a43 14992
74dd418c
RAE
14993/* Return true when T is an address and is known to be nonzero.
14994 For floating point we further ensure that T is not denormal.
14995 Similar logic is present in nonzero_address in rtlanal.h.
6ac01510 14996
74dd418c
RAE
14997 If the return value is based on the assumption that signed overflow
14998 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14999 change *STRICT_OVERFLOW_P. */
8e7b3a43 15000
74dd418c
RAE
15001bool
15002tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15003{
15004 tree type = TREE_TYPE (t);
15005 enum tree_code code;
15006
15007 /* Doing something useful for floating point would need more work. */
15008 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15009 return false;
15010
15011 code = TREE_CODE (t);
15012 switch (TREE_CODE_CLASS (code))
15013 {
15014 case tcc_unary:
15015 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15016 strict_overflow_p);
15017 case tcc_binary:
15018 case tcc_comparison:
15019 return tree_binary_nonzero_warnv_p (code, type,
15020 TREE_OPERAND (t, 0),
15021 TREE_OPERAND (t, 1),
6ac01510 15022 strict_overflow_p);
74dd418c
RAE
15023 case tcc_constant:
15024 case tcc_declaration:
15025 case tcc_reference:
15026 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15027
15028 default:
8e7b3a43 15029 break;
74dd418c
RAE
15030 }
15031
15032 switch (code)
15033 {
15034 case TRUTH_NOT_EXPR:
15035 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15036 strict_overflow_p);
15037
15038 case TRUTH_AND_EXPR:
15039 case TRUTH_OR_EXPR:
15040 case TRUTH_XOR_EXPR:
15041 return tree_binary_nonzero_warnv_p (code, type,
15042 TREE_OPERAND (t, 0),
15043 TREE_OPERAND (t, 1),
15044 strict_overflow_p);
15045
15046 case COND_EXPR:
15047 case CONSTRUCTOR:
15048 case OBJ_TYPE_REF:
15049 case ASSERT_EXPR:
15050 case ADDR_EXPR:
15051 case WITH_SIZE_EXPR:
74dd418c 15052 case SSA_NAME:
74dd418c 15053 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8e7b3a43
KH
15054
15055 case COMPOUND_EXPR:
15056 case MODIFY_EXPR:
15057 case BIND_EXPR:
726a989a 15058 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
6ac01510 15059 strict_overflow_p);
8e7b3a43
KH
15060
15061 case SAVE_EXPR:
6ac01510
ILT
15062 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15063 strict_overflow_p);
8e7b3a43 15064
4db8040c
JM
15065 case CALL_EXPR:
15066 return alloca_call_p (t);
15067
8e7b3a43
KH
15068 default:
15069 break;
15070 }
15071 return false;
15072}
15073
6ac01510
ILT
15074/* Return true when T is an address and is known to be nonzero.
15075 Handle warnings about undefined signed overflow. */
15076
15077bool
15078tree_expr_nonzero_p (tree t)
15079{
15080 bool ret, strict_overflow_p;
15081
15082 strict_overflow_p = false;
15083 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15084 if (strict_overflow_p)
15085 fold_overflow_warning (("assuming signed overflow does not occur when "
15086 "determining that expression is always "
15087 "non-zero"),
15088 WARN_STRICT_OVERFLOW_MISC);
15089 return ret;
15090}
15091
6de9cd9a
DN
15092/* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15093 attempt to fold the expression to a constant without modifying TYPE,
15094 OP0 or OP1.
15095
15096 If the expression could be simplified to a constant, then return
15097 the constant. If the expression would not be simplified to a
41704a38 15098 constant, then return NULL_TREE. */
6de9cd9a
DN
15099
15100tree
b52d5eaa 15101fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
6de9cd9a 15102{
054632e8
RS
15103 tree tem = fold_binary (code, type, op0, op1);
15104 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
6de9cd9a
DN
15105}
15106
15107/* Given the components of a unary expression CODE, TYPE and OP0,
15108 attempt to fold the expression to a constant without modifying
d1822754 15109 TYPE or OP0.
6de9cd9a
DN
15110
15111 If the expression could be simplified to a constant, then return
15112 the constant. If the expression would not be simplified to a
41704a38 15113 constant, then return NULL_TREE. */
6de9cd9a
DN
15114
15115tree
b52d5eaa 15116fold_unary_to_constant (enum tree_code code, tree type, tree op0)
6de9cd9a 15117{
054632e8
RS
15118 tree tem = fold_unary (code, type, op0);
15119 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
6de9cd9a
DN
15120}
15121
15122/* If EXP represents referencing an element in a constant string
15123 (either via pointer arithmetic or array indexing), return the
15124 tree representing the value accessed, otherwise return NULL. */
15125
15126tree
15127fold_read_from_constant_string (tree exp)
15128{
8e3dc7a3
RG
15129 if ((TREE_CODE (exp) == INDIRECT_REF
15130 || TREE_CODE (exp) == ARRAY_REF)
15131 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
6de9cd9a
DN
15132 {
15133 tree exp1 = TREE_OPERAND (exp, 0);
15134 tree index;
15135 tree string;
db3927fb 15136 location_t loc = EXPR_LOCATION (exp);
6de9cd9a
DN
15137
15138 if (TREE_CODE (exp) == INDIRECT_REF)
44de5aeb 15139 string = string_constant (exp1, &index);
6de9cd9a
DN
15140 else
15141 {
44de5aeb 15142 tree low_bound = array_ref_low_bound (exp);
db3927fb 15143 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
d1822754 15144
6de9cd9a
DN
15145 /* Optimize the special-case of a zero lower bound.
15146
15147 We convert the low_bound to sizetype to avoid some problems
15148 with constant folding. (E.g. suppose the lower bound is 1,
15149 and its mode is QI. Without the conversion,l (ARRAY
15150 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fa10beec 15151 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6de9cd9a 15152 if (! integer_zerop (low_bound))
db3927fb
AH
15153 index = size_diffop_loc (loc, index,
15154 fold_convert_loc (loc, sizetype, low_bound));
6de9cd9a
DN
15155
15156 string = exp1;
15157 }
15158
15159 if (string
f9c3744b 15160 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
6de9cd9a
DN
15161 && TREE_CODE (string) == STRING_CST
15162 && TREE_CODE (index) == INTEGER_CST
15163 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15164 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15165 == MODE_INT)
15166 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
0c4d4efb
DJ
15167 return build_int_cst_type (TREE_TYPE (exp),
15168 (TREE_STRING_POINTER (string)
15169 [TREE_INT_CST_LOW (index)]));
6de9cd9a
DN
15170 }
15171 return NULL;
15172}
15173
33d13fac 15174/* Return the tree for neg (ARG0) when ARG0 is known to be either
325217ed 15175 an integer constant, real, or fixed-point constant.
33d13fac
KH
15176
15177 TYPE is the type of the result. */
15178
15179static tree
15180fold_negate_const (tree arg0, tree type)
15181{
15182 tree t = NULL_TREE;
15183
0bccc606 15184 switch (TREE_CODE (arg0))
33d13fac 15185 {
0bccc606
NS
15186 case INTEGER_CST:
15187 {
9589f23e
AS
15188 double_int val = tree_to_double_int (arg0);
15189 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15190
15191 t = force_fit_type_double (type, val, 1,
b8fca551 15192 (overflow | TREE_OVERFLOW (arg0))
d95787e6 15193 && !TYPE_UNSIGNED (type));
0bccc606
NS
15194 break;
15195 }
3e6688a7 15196
0bccc606 15197 case REAL_CST:
d49b6e1e 15198 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
0bccc606 15199 break;
d1822754 15200
325217ed
CF
15201 case FIXED_CST:
15202 {
15203 FIXED_VALUE_TYPE f;
15204 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15205 &(TREE_FIXED_CST (arg0)), NULL,
15206 TYPE_SATURATING (type));
15207 t = build_fixed (type, f);
15208 /* Propagate overflow flags. */
15209 if (overflow_p | TREE_OVERFLOW (arg0))
28ddeea1 15210 TREE_OVERFLOW (t) = 1;
325217ed
CF
15211 break;
15212 }
15213
0bccc606
NS
15214 default:
15215 gcc_unreachable ();
15216 }
3e6688a7 15217
33d13fac
KH
15218 return t;
15219}
15220
73c4ab99
KH
15221/* Return the tree for abs (ARG0) when ARG0 is known to be either
15222 an integer constant or real constant.
15223
15224 TYPE is the type of the result. */
15225
9655d83b 15226tree
73c4ab99
KH
15227fold_abs_const (tree arg0, tree type)
15228{
15229 tree t = NULL_TREE;
15230
0bccc606 15231 switch (TREE_CODE (arg0))
73c4ab99 15232 {
0bccc606 15233 case INTEGER_CST:
9589f23e
AS
15234 {
15235 double_int val = tree_to_double_int (arg0);
15236
15237 /* If the value is unsigned or non-negative, then the absolute value
15238 is the same as the ordinary value. */
15239 if (TYPE_UNSIGNED (type)
15240 || !double_int_negative_p (val))
15241 t = arg0;
15242
15243 /* If the value is negative, then the absolute value is
15244 its negation. */
15245 else
15246 {
15247 int overflow;
15248
15249 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15250 t = force_fit_type_double (type, val, -1,
15251 overflow | TREE_OVERFLOW (arg0));
15252 }
15253 }
0bccc606 15254 break;
3e6688a7 15255
0bccc606 15256 case REAL_CST:
73c4ab99 15257 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
d49b6e1e 15258 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
73c4ab99 15259 else
0bccc606
NS
15260 t = arg0;
15261 break;
3e6688a7 15262
0bccc606
NS
15263 default:
15264 gcc_unreachable ();
73c4ab99 15265 }
3e6688a7 15266
73c4ab99
KH
15267 return t;
15268}
15269
a653e758
RS
15270/* Return the tree for not (ARG0) when ARG0 is known to be an integer
15271 constant. TYPE is the type of the result. */
15272
15273static tree
9589f23e 15274fold_not_const (const_tree arg0, tree type)
a653e758 15275{
9589f23e 15276 double_int val;
a653e758 15277
0bccc606 15278 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
3e6688a7 15279
9589f23e
AS
15280 val = double_int_not (tree_to_double_int (arg0));
15281 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
a653e758
RS
15282}
15283
8e7b3a43
KH
15284/* Given CODE, a relational operator, the target type, TYPE and two
15285 constant operands OP0 and OP1, return the result of the
15286 relational operation. If the result is not a compile time
15287 constant, then return NULL_TREE. */
15288
15289static tree
15290fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15291{
1382f0f0 15292 int result, invert;
8e7b3a43
KH
15293
15294 /* From here on, the only cases we handle are when the result is
ee8db92b
RS
15295 known to be a constant. */
15296
15297 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15298 {
adb8e07e
RS
15299 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15300 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15301
ee8db92b 15302 /* Handle the cases where either operand is a NaN. */
adb8e07e 15303 if (real_isnan (c0) || real_isnan (c1))
ee8db92b
RS
15304 {
15305 switch (code)
15306 {
15307 case EQ_EXPR:
15308 case ORDERED_EXPR:
15309 result = 0;
15310 break;
15311
15312 case NE_EXPR:
15313 case UNORDERED_EXPR:
15314 case UNLT_EXPR:
15315 case UNLE_EXPR:
15316 case UNGT_EXPR:
15317 case UNGE_EXPR:
15318 case UNEQ_EXPR:
15319 result = 1;
15320 break;
15321
15322 case LT_EXPR:
15323 case LE_EXPR:
15324 case GT_EXPR:
15325 case GE_EXPR:
15326 case LTGT_EXPR:
15327 if (flag_trapping_math)
15328 return NULL_TREE;
15329 result = 0;
15330 break;
15331
15332 default:
0bccc606 15333 gcc_unreachable ();
ee8db92b
RS
15334 }
15335
15336 return constant_boolean_node (result, type);
15337 }
15338
adb8e07e 15339 return constant_boolean_node (real_compare (code, c0, c1), type);
ee8db92b
RS
15340 }
15341
325217ed
CF
15342 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15343 {
15344 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15345 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15346 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15347 }
15348
23b9463b
RS
15349 /* Handle equality/inequality of complex constants. */
15350 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15351 {
15352 tree rcond = fold_relational_const (code, type,
15353 TREE_REALPART (op0),
15354 TREE_REALPART (op1));
15355 tree icond = fold_relational_const (code, type,
15356 TREE_IMAGPART (op0),
15357 TREE_IMAGPART (op1));
15358 if (code == EQ_EXPR)
15359 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15360 else if (code == NE_EXPR)
15361 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15362 else
15363 return NULL_TREE;
15364 }
15365
ee8db92b 15366 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
8e7b3a43
KH
15367
15368 To compute GT, swap the arguments and do LT.
15369 To compute GE, do LT and invert the result.
15370 To compute LE, swap the arguments, do LT and invert the result.
15371 To compute NE, do EQ and invert the result.
15372
15373 Therefore, the code below must handle only EQ and LT. */
15374
15375 if (code == LE_EXPR || code == GT_EXPR)
15376 {
1382f0f0
RS
15377 tree tem = op0;
15378 op0 = op1;
15379 op1 = tem;
8e7b3a43
KH
15380 code = swap_tree_comparison (code);
15381 }
15382
15383 /* Note that it is safe to invert for real values here because we
ee8db92b 15384 have already handled the one case that it matters. */
8e7b3a43 15385
8e7b3a43
KH
15386 invert = 0;
15387 if (code == NE_EXPR || code == GE_EXPR)
15388 {
15389 invert = 1;
d1a7edaf 15390 code = invert_tree_comparison (code, false);
8e7b3a43
KH
15391 }
15392
15393 /* Compute a result for LT or EQ if args permit;
15394 Otherwise return T. */
15395 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15396 {
15397 if (code == EQ_EXPR)
1382f0f0
RS
15398 result = tree_int_cst_equal (op0, op1);
15399 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15400 result = INT_CST_LT_UNSIGNED (op0, op1);
8e7b3a43 15401 else
1382f0f0 15402 result = INT_CST_LT (op0, op1);
8e7b3a43 15403 }
1382f0f0 15404 else
8e7b3a43
KH
15405 return NULL_TREE;
15406
15407 if (invert)
1382f0f0
RS
15408 result ^= 1;
15409 return constant_boolean_node (result, type);
8e7b3a43
KH
15410}
15411
3a687f8b
MM
15412/* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15413 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15414 itself. */
0ad28dde
AP
15415
15416tree
15417fold_build_cleanup_point_expr (tree type, tree expr)
15418{
15419 /* If the expression does not have side effects then we don't have to wrap
15420 it with a cleanup point expression. */
15421 if (!TREE_SIDE_EFFECTS (expr))
15422 return expr;
0e256a82
AP
15423
15424 /* If the expression is a return, check to see if the expression inside the
15425 return has no side effects or the right hand side of the modify expression
15426 inside the return. If either don't have side effects set we don't need to
15427 wrap the expression in a cleanup point expression. Note we don't check the
15428 left hand side of the modify because it should always be a return decl. */
15429 if (TREE_CODE (expr) == RETURN_EXPR)
15430 {
15431 tree op = TREE_OPERAND (expr, 0);
15432 if (!op || !TREE_SIDE_EFFECTS (op))
15433 return expr;
15434 op = TREE_OPERAND (op, 1);
15435 if (!TREE_SIDE_EFFECTS (op))
15436 return expr;
15437 }
b8698a0f 15438
0ad28dde
AP
15439 return build1 (CLEANUP_POINT_EXPR, type, expr);
15440}
15441
30d2e943
RG
15442/* Given a pointer value OP0 and a type TYPE, return a simplified version
15443 of an indirection through OP0, or NULL_TREE if no simplification is
15444 possible. */
cd3ce9b4 15445
095ecc24 15446tree
db3927fb 15447fold_indirect_ref_1 (location_t loc, tree type, tree op0)
cd3ce9b4 15448{
30d2e943 15449 tree sub = op0;
cd3ce9b4
JM
15450 tree subtype;
15451
6033ae2a 15452 STRIP_NOPS (sub);
6a720599
JM
15453 subtype = TREE_TYPE (sub);
15454 if (!POINTER_TYPE_P (subtype))
15455 return NULL_TREE;
15456
cd3ce9b4
JM
15457 if (TREE_CODE (sub) == ADDR_EXPR)
15458 {
15459 tree op = TREE_OPERAND (sub, 0);
15460 tree optype = TREE_TYPE (op);
f9f63ff2
AP
15461 /* *&CONST_DECL -> to the value of the const decl. */
15462 if (TREE_CODE (op) == CONST_DECL)
15463 return DECL_INITIAL (op);
41b9109a 15464 /* *&p => p; make sure to handle *&"str"[cst] here. */
30d2e943 15465 if (type == optype)
41b9109a
RG
15466 {
15467 tree fop = fold_read_from_constant_string (op);
15468 if (fop)
15469 return fop;
15470 else
15471 return op;
15472 }
cd3ce9b4
JM
15473 /* *(foo *)&fooarray => fooarray[0] */
15474 else if (TREE_CODE (optype) == ARRAY_TYPE
30d2e943 15475 && type == TREE_TYPE (optype))
0d56ab33
AP
15476 {
15477 tree type_domain = TYPE_DOMAIN (optype);
15478 tree min_val = size_zero_node;
15479 if (type_domain && TYPE_MIN_VALUE (type_domain))
15480 min_val = TYPE_MIN_VALUE (type_domain);
db3927fb
AH
15481 op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15482 SET_EXPR_LOCATION (op0, loc);
15483 return op0;
0d56ab33 15484 }
4853940c
AP
15485 /* *(foo *)&complexfoo => __real__ complexfoo */
15486 else if (TREE_CODE (optype) == COMPLEX_TYPE
15487 && type == TREE_TYPE (optype))
db3927fb 15488 return fold_build1_loc (loc, REALPART_EXPR, type, op);
0890b981
AP
15489 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15490 else if (TREE_CODE (optype) == VECTOR_TYPE
15491 && type == TREE_TYPE (optype))
15492 {
15493 tree part_width = TYPE_SIZE (type);
15494 tree index = bitsize_int (0);
db3927fb 15495 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
0890b981 15496 }
cd3ce9b4
JM
15497 }
15498
a12bdb97
AP
15499 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15500 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15501 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
b8698a0f 15502 {
a12bdb97
AP
15503 tree op00 = TREE_OPERAND (sub, 0);
15504 tree op01 = TREE_OPERAND (sub, 1);
15505 tree op00type;
b8698a0f 15506
a12bdb97
AP
15507 STRIP_NOPS (op00);
15508 op00type = TREE_TYPE (op00);
15509 if (TREE_CODE (op00) == ADDR_EXPR
15510 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15511 && type == TREE_TYPE (TREE_TYPE (op00type)))
b8698a0f 15512 {
a12bdb97
AP
15513 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15514 tree part_width = TYPE_SIZE (type);
15515 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15516 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15517 tree index = bitsize_int (indexi);
15518
15519 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
db3927fb
AH
15520 return fold_build3_loc (loc,
15521 BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
a12bdb97 15522 part_width, index);
b8698a0f 15523
a12bdb97
AP
15524 }
15525 }
15526
15527
4853940c 15528 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
5be014d5 15529 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
4853940c
AP
15530 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15531 {
15532 tree op00 = TREE_OPERAND (sub, 0);
15533 tree op01 = TREE_OPERAND (sub, 1);
15534 tree op00type;
15535
15536 STRIP_NOPS (op00);
15537 op00type = TREE_TYPE (op00);
15538 if (TREE_CODE (op00) == ADDR_EXPR
15539 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15540 && type == TREE_TYPE (TREE_TYPE (op00type)))
15541 {
15542 tree size = TYPE_SIZE_UNIT (type);
15543 if (tree_int_cst_equal (size, op01))
db3927fb
AH
15544 return fold_build1_loc (loc, IMAGPART_EXPR, type,
15545 TREE_OPERAND (op00, 0));
4853940c
AP
15546 }
15547 }
b8698a0f 15548
cd3ce9b4 15549 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
cd3ce9b4 15550 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
30d2e943 15551 && type == TREE_TYPE (TREE_TYPE (subtype)))
cd3ce9b4 15552 {
0d56ab33
AP
15553 tree type_domain;
15554 tree min_val = size_zero_node;
db3927fb 15555 sub = build_fold_indirect_ref_loc (loc, sub);
0d56ab33
AP
15556 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15557 if (type_domain && TYPE_MIN_VALUE (type_domain))
15558 min_val = TYPE_MIN_VALUE (type_domain);
db3927fb
AH
15559 op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15560 SET_EXPR_LOCATION (op0, loc);
15561 return op0;
cd3ce9b4
JM
15562 }
15563
6a720599
JM
15564 return NULL_TREE;
15565}
15566
15567/* Builds an expression for an indirection through T, simplifying some
15568 cases. */
15569
15570tree
db3927fb 15571build_fold_indirect_ref_loc (location_t loc, tree t)
6a720599 15572{
30d2e943 15573 tree type = TREE_TYPE (TREE_TYPE (t));
db3927fb 15574 tree sub = fold_indirect_ref_1 (loc, type, t);
6a720599
JM
15575
15576 if (sub)
15577 return sub;
db3927fb
AH
15578
15579 t = build1 (INDIRECT_REF, type, t);
15580 SET_EXPR_LOCATION (t, loc);
15581 return t;
6a720599
JM
15582}
15583
15584/* Given an INDIRECT_REF T, return either T or a simplified version. */
15585
15586tree
db3927fb 15587fold_indirect_ref_loc (location_t loc, tree t)
6a720599 15588{
db3927fb 15589 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
6a720599
JM
15590
15591 if (sub)
15592 return sub;
15593 else
15594 return t;
cd3ce9b4
JM
15595}
15596
9675412f
RS
15597/* Strip non-trapping, non-side-effecting tree nodes from an expression
15598 whose result is ignored. The type of the returned tree need not be
15599 the same as the original expression. */
15600
15601tree
15602fold_ignored_result (tree t)
15603{
15604 if (!TREE_SIDE_EFFECTS (t))
15605 return integer_zero_node;
15606
15607 for (;;)
15608 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15609 {
6615c446 15610 case tcc_unary:
9675412f
RS
15611 t = TREE_OPERAND (t, 0);
15612 break;
15613
6615c446
JO
15614 case tcc_binary:
15615 case tcc_comparison:
9675412f
RS
15616 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15617 t = TREE_OPERAND (t, 0);
15618 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15619 t = TREE_OPERAND (t, 1);
15620 else
15621 return t;
15622 break;
15623
6615c446 15624 case tcc_expression:
9675412f
RS
15625 switch (TREE_CODE (t))
15626 {
15627 case COMPOUND_EXPR:
15628 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15629 return t;
15630 t = TREE_OPERAND (t, 0);
15631 break;
15632
15633 case COND_EXPR:
15634 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15635 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15636 return t;
15637 t = TREE_OPERAND (t, 0);
15638 break;
15639
15640 default:
15641 return t;
15642 }
15643 break;
15644
15645 default:
15646 return t;
15647 }
15648}
15649
15931954
RH
15650/* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15651 This can only be applied to objects of a sizetype. */
15652
15653tree
db3927fb 15654round_up_loc (location_t loc, tree value, int divisor)
15931954 15655{
0a936b12 15656 tree div = NULL_TREE;
15931954 15657
0bccc606 15658 gcc_assert (divisor > 0);
15931954
RH
15659 if (divisor == 1)
15660 return value;
15661
15931954 15662 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
0a936b12
NS
15663 have to do anything. Only do this when we are not given a const,
15664 because in that case, this check is more expensive than just
8c27b7d4 15665 doing it. */
0a936b12
NS
15666 if (TREE_CODE (value) != INTEGER_CST)
15667 {
ce552f75 15668 div = build_int_cst (TREE_TYPE (value), divisor);
0a936b12
NS
15669
15670 if (multiple_of_p (TREE_TYPE (value), value, div))
15671 return value;
15672 }
15931954
RH
15673
15674 /* If divisor is a power of two, simplify this to bit manipulation. */
15675 if (divisor == (divisor & -divisor))
15676 {
74890d7b
RS
15677 if (TREE_CODE (value) == INTEGER_CST)
15678 {
9589f23e 15679 double_int val = tree_to_double_int (value);
bcf52d7b 15680 bool overflow_p;
74890d7b 15681
9589f23e 15682 if ((val.low & (divisor - 1)) == 0)
74890d7b
RS
15683 return value;
15684
bcf52d7b 15685 overflow_p = TREE_OVERFLOW (value);
9589f23e
AS
15686 val.low &= ~(divisor - 1);
15687 val.low += divisor;
15688 if (val.low == 0)
74890d7b 15689 {
9589f23e
AS
15690 val.high++;
15691 if (val.high == 0)
bcf52d7b 15692 overflow_p = true;
74890d7b 15693 }
bcf52d7b 15694
9589f23e 15695 return force_fit_type_double (TREE_TYPE (value), val,
bcf52d7b 15696 -1, overflow_p);
74890d7b
RS
15697 }
15698 else
15699 {
bcf52d7b
RS
15700 tree t;
15701
74890d7b 15702 t = build_int_cst (TREE_TYPE (value), divisor - 1);
db3927fb 15703 value = size_binop_loc (loc, PLUS_EXPR, value, t);
74890d7b 15704 t = build_int_cst (TREE_TYPE (value), -divisor);
db3927fb 15705 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
74890d7b 15706 }
15931954
RH
15707 }
15708 else
15709 {
0a936b12 15710 if (!div)
ce552f75 15711 div = build_int_cst (TREE_TYPE (value), divisor);
db3927fb
AH
15712 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15713 value = size_binop_loc (loc, MULT_EXPR, value, div);
15931954
RH
15714 }
15715
15716 return value;
15717}
15718
15719/* Likewise, but round down. */
15720
15721tree
db3927fb 15722round_down_loc (location_t loc, tree value, int divisor)
15931954 15723{
0a936b12 15724 tree div = NULL_TREE;
15931954 15725
0bccc606 15726 gcc_assert (divisor > 0);
15931954
RH
15727 if (divisor == 1)
15728 return value;
15729
15931954 15730 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
0a936b12
NS
15731 have to do anything. Only do this when we are not given a const,
15732 because in that case, this check is more expensive than just
8c27b7d4 15733 doing it. */
0a936b12
NS
15734 if (TREE_CODE (value) != INTEGER_CST)
15735 {
ce552f75 15736 div = build_int_cst (TREE_TYPE (value), divisor);
0a936b12
NS
15737
15738 if (multiple_of_p (TREE_TYPE (value), value, div))
15739 return value;
15740 }
15931954
RH
15741
15742 /* If divisor is a power of two, simplify this to bit manipulation. */
15743 if (divisor == (divisor & -divisor))
15744 {
0a936b12 15745 tree t;
3e6688a7 15746
7d60be94 15747 t = build_int_cst (TREE_TYPE (value), -divisor);
db3927fb 15748 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15931954
RH
15749 }
15750 else
15751 {
0a936b12 15752 if (!div)
ce552f75 15753 div = build_int_cst (TREE_TYPE (value), divisor);
db3927fb
AH
15754 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15755 value = size_binop_loc (loc, MULT_EXPR, value, div);
15931954
RH
15756 }
15757
15758 return value;
15759}
2f4675b4 15760
7299dbfb
ZD
15761/* Returns the pointer to the base of the object addressed by EXP and
15762 extracts the information about the offset of the access, storing it
15763 to PBITPOS and POFFSET. */
15764
15765static tree
15766split_address_to_core_and_offset (tree exp,
15767 HOST_WIDE_INT *pbitpos, tree *poffset)
15768{
15769 tree core;
15770 enum machine_mode mode;
15771 int unsignedp, volatilep;
15772 HOST_WIDE_INT bitsize;
db3927fb 15773 location_t loc = EXPR_LOCATION (exp);
7299dbfb
ZD
15774
15775 if (TREE_CODE (exp) == ADDR_EXPR)
15776 {
15777 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
2614034e
EB
15778 poffset, &mode, &unsignedp, &volatilep,
15779 false);
db3927fb 15780 core = build_fold_addr_expr_loc (loc, core);
7299dbfb
ZD
15781 }
15782 else
15783 {
15784 core = exp;
15785 *pbitpos = 0;
15786 *poffset = NULL_TREE;
15787 }
15788
15789 return core;
15790}
15791
2f4675b4 15792/* Returns true if addresses of E1 and E2 differ by a constant, false
7299dbfb 15793 otherwise. If they do, E1 - E2 is stored in *DIFF. */
2f4675b4
ZD
15794
15795bool
15796ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15797{
15798 tree core1, core2;
2f4675b4
ZD
15799 HOST_WIDE_INT bitpos1, bitpos2;
15800 tree toffset1, toffset2, tdiff, type;
3e6688a7 15801
7299dbfb
ZD
15802 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15803 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
2f4675b4
ZD
15804
15805 if (bitpos1 % BITS_PER_UNIT != 0
15806 || bitpos2 % BITS_PER_UNIT != 0
15807 || !operand_equal_p (core1, core2, 0))
15808 return false;
15809
15810 if (toffset1 && toffset2)
15811 {
15812 type = TREE_TYPE (toffset1);
15813 if (type != TREE_TYPE (toffset2))
15814 toffset2 = fold_convert (type, toffset2);
15815
7f20a5b7 15816 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
87de2376 15817 if (!cst_and_fits_in_hwi (tdiff))
2f4675b4
ZD
15818 return false;
15819
87de2376 15820 *diff = int_cst_value (tdiff);
2f4675b4
ZD
15821 }
15822 else if (toffset1 || toffset2)
15823 {
15824 /* If only one of the offsets is non-constant, the difference cannot
15825 be a constant. */
15826 return false;
15827 }
15828 else
15829 *diff = 0;
15830
15831 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15832 return true;
15833}
e3bb43c0
RS
15834
15835/* Simplify the floating point expression EXP when the sign of the
15836 result is not significant. Return NULL_TREE if no simplification
15837 is possible. */
15838
15839tree
15840fold_strip_sign_ops (tree exp)
15841{
15842 tree arg0, arg1;
db3927fb 15843 location_t loc = EXPR_LOCATION (exp);
e3bb43c0
RS
15844
15845 switch (TREE_CODE (exp))
15846 {
15847 case ABS_EXPR:
15848 case NEGATE_EXPR:
15849 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15850 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15851
15852 case MULT_EXPR:
15853 case RDIV_EXPR:
15854 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15855 return NULL_TREE;
15856 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15857 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15858 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
db3927fb 15859 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
7f20a5b7
KH
15860 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15861 arg1 ? arg1 : TREE_OPERAND (exp, 1));
e3bb43c0
RS
15862 break;
15863
b7e85170
KG
15864 case COMPOUND_EXPR:
15865 arg0 = TREE_OPERAND (exp, 0);
15866 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15867 if (arg1)
db3927fb 15868 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
b7e85170 15869 break;
b8698a0f 15870
b7e85170
KG
15871 case COND_EXPR:
15872 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15873 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15874 if (arg0 || arg1)
db3927fb
AH
15875 return fold_build3_loc (loc,
15876 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
b7e85170
KG
15877 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15878 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15879 break;
b8698a0f 15880
b81e7144 15881 case CALL_EXPR:
6af46feb
KG
15882 {
15883 const enum built_in_function fcode = builtin_mathfn_code (exp);
15884 switch (fcode)
15885 {
15886 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15887 /* Strip copysign function call, return the 1st argument. */
5039610b
SL
15888 arg0 = CALL_EXPR_ARG (exp, 0);
15889 arg1 = CALL_EXPR_ARG (exp, 1);
db3927fb 15890 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
6af46feb
KG
15891
15892 default:
15893 /* Strip sign ops from the argument of "odd" math functions. */
15894 if (negate_mathfn_p (fcode))
15895 {
5039610b 15896 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
6af46feb 15897 if (arg0)
db3927fb 15898 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
6af46feb
KG
15899 }
15900 break;
b81e7144 15901 }
6af46feb 15902 }
b81e7144
KG
15903 break;
15904
e3bb43c0
RS
15905 default:
15906 break;
15907 }
15908 return NULL_TREE;
15909}