]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/fold-const.c
hwint.c: New.
[thirdparty/gcc.git] / gcc / fold-const.c
CommitLineData
6d716ca8 1/* Fold a constant sub-tree into a single node for C-compiler
080ea642 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
c75c517d 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
d95787e6 4 Free Software Foundation, Inc.
6d716ca8 5
1322177d 6This file is part of GCC.
6d716ca8 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
9dcd6f09 10Software Foundation; either version 3, or (at your option) any later
1322177d 11version.
6d716ca8 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
6d716ca8
RS
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
6d716ca8 21
6dc42e49 22/*@@ This file should be rewritten to use an arbitrary precision
6d716ca8
RS
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
9589f23e 30/* The entry points in this file are fold, size_int_wide and size_binop.
6d716ca8
RS
31
32 fold takes a tree as argument and returns a simplified tree.
33
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
37
38 size_int takes an integer value, and creates a tree constant
0da6f3db
DE
39 with type from `sizetype'.
40
07beea0d
AH
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
0da6f3db 44
e9a25f70 45#include "config.h"
2fde567e 46#include "system.h"
4977bab6
ZW
47#include "coretypes.h"
48#include "tm.h"
6d716ca8
RS
49#include "flags.h"
50#include "tree.h"
d49b6e1e 51#include "realmpfr.h"
efe3eb65 52#include "rtl.h"
0e9295cf 53#include "expr.h"
6baf1cc8 54#include "tm_p.h"
bd03c084 55#include "target.h"
718f9c0f 56#include "diagnostic-core.h"
6ac01510 57#include "intl.h"
a3770a81 58#include "ggc.h"
4c160717 59#include "hashtab.h"
43577e6b 60#include "langhooks.h"
5dfa45d0 61#include "md5.h"
726a989a 62#include "gimple.h"
70f34814 63#include "tree-flow.h"
6d716ca8 64
110abdbc 65/* Nonzero if we are folding constants inside an initializer; zero
63b48197
MS
66 otherwise. */
67int folding_initializer = 0;
68
d1a7edaf
PB
69/* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72enum comparison_code {
73 COMPCODE_FALSE = 0,
74 COMPCODE_LT = 1,
75 COMPCODE_EQ = 2,
76 COMPCODE_LE = 3,
77 COMPCODE_GT = 4,
78 COMPCODE_LTGT = 5,
79 COMPCODE_GE = 6,
80 COMPCODE_ORD = 7,
81 COMPCODE_UNORD = 8,
82 COMPCODE_UNLT = 9,
83 COMPCODE_UNEQ = 10,
84 COMPCODE_UNLE = 11,
85 COMPCODE_UNGT = 12,
86 COMPCODE_NE = 13,
87 COMPCODE_UNGE = 14,
88 COMPCODE_TRUE = 15
89};
90
05d362b8 91static bool negate_mathfn_p (enum built_in_function);
fa8db1f7
AJ
92static bool negate_expr_p (tree);
93static tree negate_expr (tree);
94static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
db3927fb 95static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
43a5d30b 96static tree const_binop (enum tree_code, tree, tree);
d1a7edaf
PB
97static enum comparison_code comparison_to_compcode (enum tree_code);
98static enum tree_code compcode_to_comparison (enum comparison_code);
fa8db1f7
AJ
99static int operand_equal_for_comparison_p (tree, tree, tree);
100static int twoval_comparison_p (tree, tree *, tree *, int *);
db3927fb
AH
101static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106static tree optimize_bit_field_compare (location_t, enum tree_code,
107 tree, tree, tree);
108static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
109 HOST_WIDE_INT *,
fa8db1f7
AJ
110 enum machine_mode *, int *, int *,
111 tree *, tree *);
45dc13b9 112static int all_ones_mask_p (const_tree, int);
ac545c64
KG
113static tree sign_bit_p (tree, const_tree);
114static int simple_operand_p (const_tree);
fa8db1f7 115static tree range_binop (enum tree_code, tree, tree, int, tree, int);
f8fe0545
EB
116static tree range_predecessor (tree);
117static tree range_successor (tree);
a243fb4a 118extern tree make_range (tree, int *, tree *, tree *, bool *);
a243fb4a
MLI
119extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
120 tree, tree);
db3927fb
AH
121static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
122static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
fa8db1f7 123static tree unextend (tree, int, int, tree);
db3927fb
AH
124static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
125static tree optimize_minmax_comparison (location_t, enum tree_code,
126 tree, tree, tree);
6ac01510
ILT
127static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
128static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
db3927fb
AH
129static tree fold_binary_op_with_conditional_arg (location_t,
130 enum tree_code, tree,
e9da788c 131 tree, tree,
3b70b82a 132 tree, tree, int);
db3927fb
AH
133static tree fold_mathfn_compare (location_t,
134 enum built_in_function, enum tree_code,
fa8db1f7 135 tree, tree, tree);
db3927fb
AH
136static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
137static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
ac545c64 138static bool reorder_operands_p (const_tree, const_tree);
33d13fac 139static tree fold_negate_const (tree, tree);
9589f23e 140static tree fold_not_const (const_tree, tree);
8e7b3a43 141static tree fold_relational_const (enum tree_code, tree, tree, tree);
d1d1c602 142static tree fold_convert_const (enum tree_code, tree, tree);
78bf6e2f 143
33d13fac 144
c9019218
JJ
145/* Similar to protected_set_expr_location, but never modify x in place,
146 if location can and needs to be set, unshare it. */
147
148static inline tree
149protected_set_expr_location_unshare (tree x, location_t loc)
150{
151 if (CAN_HAVE_LOCATION_P (x)
152 && EXPR_LOCATION (x) != loc
153 && !(TREE_CODE (x) == SAVE_EXPR
154 || TREE_CODE (x) == TARGET_EXPR
155 || TREE_CODE (x) == BIND_EXPR))
156 {
157 x = copy_node (x);
158 SET_EXPR_LOCATION (x, loc);
159 }
160 return x;
161}
162
163
d4b60170
RK
164/* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
165 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
166 and SUM1. Then this yields nonzero if overflow occurred during the
167 addition.
168
169 Overflow occurs if A and B have the same sign, but A and SUM differ in
170 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
171 sign. */
172#define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
6d716ca8 173\f
03b0db0a
RG
174/* If ARG2 divides ARG1 with zero remainder, carries out the division
175 of type CODE and returns the quotient.
176 Otherwise returns NULL_TREE. */
177
108f6c2f 178tree
ac545c64 179div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
03b0db0a 180{
2bd1333d 181 double_int quo, rem;
793e86a7
RG
182 int uns;
183
184 /* The sign of the division is according to operand two, that
185 does the correct thing for POINTER_PLUS_EXPR where we want
186 a signed division. */
187 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
188 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
189 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
190 uns = false;
03b0db0a 191
2bd1333d
AS
192 quo = double_int_divmod (tree_to_double_int (arg1),
193 tree_to_double_int (arg2),
194 uns, code, &rem);
03b0db0a 195
2bd1333d
AS
196 if (double_int_zero_p (rem))
197 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
03b0db0a 198
2bd1333d 199 return NULL_TREE;
03b0db0a 200}
6d716ca8 201\f
110abdbc 202/* This is nonzero if we should defer warnings about undefined
6ac01510
ILT
203 overflow. This facility exists because these warnings are a
204 special case. The code to estimate loop iterations does not want
205 to issue any warnings, since it works with expressions which do not
206 occur in user code. Various bits of cleanup code call fold(), but
207 only use the result if it has certain characteristics (e.g., is a
208 constant); that code only wants to issue a warning if the result is
209 used. */
210
211static int fold_deferring_overflow_warnings;
212
213/* If a warning about undefined overflow is deferred, this is the
214 warning. Note that this may cause us to turn two warnings into
215 one, but that is fine since it is sufficient to only give one
216 warning per expression. */
217
218static const char* fold_deferred_overflow_warning;
219
220/* If a warning about undefined overflow is deferred, this is the
221 level at which the warning should be emitted. */
222
223static enum warn_strict_overflow_code fold_deferred_overflow_code;
224
225/* Start deferring overflow warnings. We could use a stack here to
226 permit nested calls, but at present it is not necessary. */
227
228void
229fold_defer_overflow_warnings (void)
230{
231 ++fold_deferring_overflow_warnings;
232}
233
234/* Stop deferring overflow warnings. If there is a pending warning,
235 and ISSUE is true, then issue the warning if appropriate. STMT is
236 the statement with which the warning should be associated (used for
237 location information); STMT may be NULL. CODE is the level of the
238 warning--a warn_strict_overflow_code value. This function will use
239 the smaller of CODE and the deferred code when deciding whether to
240 issue the warning. CODE may be zero to mean to always use the
241 deferred code. */
242
243void
726a989a 244fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
6ac01510
ILT
245{
246 const char *warnmsg;
247 location_t locus;
248
249 gcc_assert (fold_deferring_overflow_warnings > 0);
250 --fold_deferring_overflow_warnings;
251 if (fold_deferring_overflow_warnings > 0)
252 {
253 if (fold_deferred_overflow_warning != NULL
254 && code != 0
255 && code < (int) fold_deferred_overflow_code)
32e8bb8e 256 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
6ac01510
ILT
257 return;
258 }
259
260 warnmsg = fold_deferred_overflow_warning;
261 fold_deferred_overflow_warning = NULL;
262
263 if (!issue || warnmsg == NULL)
264 return;
265
726a989a 266 if (gimple_no_warning_p (stmt))
e233ac97
ILT
267 return;
268
6ac01510
ILT
269 /* Use the smallest code level when deciding to issue the
270 warning. */
271 if (code == 0 || code > (int) fold_deferred_overflow_code)
272 code = fold_deferred_overflow_code;
273
274 if (!issue_strict_overflow_warning (code))
275 return;
276
726a989a 277 if (stmt == NULL)
6ac01510
ILT
278 locus = input_location;
279 else
726a989a 280 locus = gimple_location (stmt);
fab922b1 281 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
6ac01510
ILT
282}
283
284/* Stop deferring overflow warnings, ignoring any deferred
285 warnings. */
286
287void
288fold_undefer_and_ignore_overflow_warnings (void)
289{
726a989a 290 fold_undefer_overflow_warnings (false, NULL, 0);
6ac01510
ILT
291}
292
293/* Whether we are deferring overflow warnings. */
294
295bool
296fold_deferring_overflow_warnings_p (void)
297{
298 return fold_deferring_overflow_warnings > 0;
299}
300
301/* This is called when we fold something based on the fact that signed
302 overflow is undefined. */
303
304static void
305fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
306{
6ac01510
ILT
307 if (fold_deferring_overflow_warnings > 0)
308 {
309 if (fold_deferred_overflow_warning == NULL
310 || wc < fold_deferred_overflow_code)
311 {
312 fold_deferred_overflow_warning = gmsgid;
313 fold_deferred_overflow_code = wc;
314 }
315 }
316 else if (issue_strict_overflow_warning (wc))
317 warning (OPT_Wstrict_overflow, gmsgid);
318}
319\f
dd6f2a43
VR
320/* Return true if the built-in mathematical function specified by CODE
321 is odd, i.e. -f(x) == f(-x). */
05d362b8
RS
322
323static bool
324negate_mathfn_p (enum built_in_function code)
325{
326 switch (code)
327 {
ea6a6627
VR
328 CASE_FLT_FN (BUILT_IN_ASIN):
329 CASE_FLT_FN (BUILT_IN_ASINH):
330 CASE_FLT_FN (BUILT_IN_ATAN):
331 CASE_FLT_FN (BUILT_IN_ATANH):
4b26d10b
KG
332 CASE_FLT_FN (BUILT_IN_CASIN):
333 CASE_FLT_FN (BUILT_IN_CASINH):
334 CASE_FLT_FN (BUILT_IN_CATAN):
335 CASE_FLT_FN (BUILT_IN_CATANH):
ea6a6627 336 CASE_FLT_FN (BUILT_IN_CBRT):
4b26d10b
KG
337 CASE_FLT_FN (BUILT_IN_CPROJ):
338 CASE_FLT_FN (BUILT_IN_CSIN):
339 CASE_FLT_FN (BUILT_IN_CSINH):
340 CASE_FLT_FN (BUILT_IN_CTAN):
341 CASE_FLT_FN (BUILT_IN_CTANH):
5c5b2155
KG
342 CASE_FLT_FN (BUILT_IN_ERF):
343 CASE_FLT_FN (BUILT_IN_LLROUND):
344 CASE_FLT_FN (BUILT_IN_LROUND):
345 CASE_FLT_FN (BUILT_IN_ROUND):
ea6a6627
VR
346 CASE_FLT_FN (BUILT_IN_SIN):
347 CASE_FLT_FN (BUILT_IN_SINH):
348 CASE_FLT_FN (BUILT_IN_TAN):
349 CASE_FLT_FN (BUILT_IN_TANH):
5c5b2155 350 CASE_FLT_FN (BUILT_IN_TRUNC):
05d362b8
RS
351 return true;
352
5c5b2155
KG
353 CASE_FLT_FN (BUILT_IN_LLRINT):
354 CASE_FLT_FN (BUILT_IN_LRINT):
355 CASE_FLT_FN (BUILT_IN_NEARBYINT):
356 CASE_FLT_FN (BUILT_IN_RINT):
357 return !flag_rounding_math;
b8698a0f 358
05d362b8
RS
359 default:
360 break;
361 }
362 return false;
363}
364
82b85a85
ZD
365/* Check whether we may negate an integer constant T without causing
366 overflow. */
367
368bool
fa233e34 369may_negate_without_overflow_p (const_tree t)
82b85a85
ZD
370{
371 unsigned HOST_WIDE_INT val;
372 unsigned int prec;
373 tree type;
374
0bccc606 375 gcc_assert (TREE_CODE (t) == INTEGER_CST);
82b85a85
ZD
376
377 type = TREE_TYPE (t);
378 if (TYPE_UNSIGNED (type))
379 return false;
380
381 prec = TYPE_PRECISION (type);
382 if (prec > HOST_BITS_PER_WIDE_INT)
383 {
384 if (TREE_INT_CST_LOW (t) != 0)
385 return true;
386 prec -= HOST_BITS_PER_WIDE_INT;
387 val = TREE_INT_CST_HIGH (t);
388 }
389 else
390 val = TREE_INT_CST_LOW (t);
391 if (prec < HOST_BITS_PER_WIDE_INT)
392 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
393 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
394}
395
080ea642 396/* Determine whether an expression T can be cheaply negated using
1af8dcbf 397 the function negate_expr without introducing undefined overflow. */
080ea642
RS
398
399static bool
fa8db1f7 400negate_expr_p (tree t)
080ea642 401{
080ea642
RS
402 tree type;
403
404 if (t == 0)
405 return false;
406
407 type = TREE_TYPE (t);
408
409 STRIP_SIGN_NOPS (t);
410 switch (TREE_CODE (t))
411 {
412 case INTEGER_CST:
eeef0e45 413 if (TYPE_OVERFLOW_WRAPS (type))
05d362b8 414 return true;
080ea642
RS
415
416 /* Check that -CST will not overflow type. */
82b85a85 417 return may_negate_without_overflow_p (t);
189d4130 418 case BIT_NOT_EXPR:
eeef0e45
ILT
419 return (INTEGRAL_TYPE_P (type)
420 && TYPE_OVERFLOW_WRAPS (type));
080ea642 421
325217ed 422 case FIXED_CST:
080ea642 423 case NEGATE_EXPR:
080ea642
RS
424 return true;
425
4e62a017
RG
426 case REAL_CST:
427 /* We want to canonicalize to positive real constants. Pretend
428 that only negative ones can be easily negated. */
429 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
430
05d362b8
RS
431 case COMPLEX_CST:
432 return negate_expr_p (TREE_REALPART (t))
433 && negate_expr_p (TREE_IMAGPART (t));
434
1aeef526
KG
435 case COMPLEX_EXPR:
436 return negate_expr_p (TREE_OPERAND (t, 0))
437 && negate_expr_p (TREE_OPERAND (t, 1));
438
8fbbe90b
KG
439 case CONJ_EXPR:
440 return negate_expr_p (TREE_OPERAND (t, 0));
441
dfb36f9b 442 case PLUS_EXPR:
1b43b967
RS
443 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
444 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
dfb36f9b
RS
445 return false;
446 /* -(A + B) -> (-B) - A. */
447 if (negate_expr_p (TREE_OPERAND (t, 1))
448 && reorder_operands_p (TREE_OPERAND (t, 0),
449 TREE_OPERAND (t, 1)))
450 return true;
451 /* -(A + B) -> (-A) - B. */
452 return negate_expr_p (TREE_OPERAND (t, 0));
453
02a1994c
RS
454 case MINUS_EXPR:
455 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1b43b967
RS
456 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
457 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
05d362b8
RS
458 && reorder_operands_p (TREE_OPERAND (t, 0),
459 TREE_OPERAND (t, 1));
02a1994c 460
8ab49fef 461 case MULT_EXPR:
8df83eae 462 if (TYPE_UNSIGNED (TREE_TYPE (t)))
8ab49fef
RS
463 break;
464
465 /* Fall through. */
466
467 case RDIV_EXPR:
468 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
469 return negate_expr_p (TREE_OPERAND (t, 1))
470 || negate_expr_p (TREE_OPERAND (t, 0));
471 break;
472
965d7fa4
AP
473 case TRUNC_DIV_EXPR:
474 case ROUND_DIV_EXPR:
475 case FLOOR_DIV_EXPR:
476 case CEIL_DIV_EXPR:
477 case EXACT_DIV_EXPR:
6ac01510
ILT
478 /* In general we can't negate A / B, because if A is INT_MIN and
479 B is 1, we may turn this into INT_MIN / -1 which is undefined
480 and actually traps on some architectures. But if overflow is
481 undefined, we can negate, because - (INT_MIN / 1) is an
482 overflow. */
eeef0e45
ILT
483 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
484 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
965d7fa4
AP
485 break;
486 return negate_expr_p (TREE_OPERAND (t, 1))
487 || negate_expr_p (TREE_OPERAND (t, 0));
488
05d362b8
RS
489 case NOP_EXPR:
490 /* Negate -((double)float) as (double)(-float). */
491 if (TREE_CODE (type) == REAL_TYPE)
492 {
493 tree tem = strip_float_extensions (t);
494 if (tem != t)
495 return negate_expr_p (tem);
496 }
497 break;
498
499 case CALL_EXPR:
500 /* Negate -f(x) as f(-x). */
501 if (negate_mathfn_p (builtin_mathfn_code (t)))
5039610b 502 return negate_expr_p (CALL_EXPR_ARG (t, 0));
05d362b8
RS
503 break;
504
239a625e
RS
505 case RSHIFT_EXPR:
506 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
507 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
508 {
509 tree op1 = TREE_OPERAND (t, 1);
510 if (TREE_INT_CST_HIGH (op1) == 0
511 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
512 == TREE_INT_CST_LOW (op1))
513 return true;
514 }
515 break;
516
080ea642
RS
517 default:
518 break;
519 }
520 return false;
521}
522
1af8dcbf
RG
523/* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
524 simplification is possible.
525 If negate_expr_p would return true for T, NULL_TREE will never be
526 returned. */
6d716ca8 527
1baa375f 528static tree
db3927fb 529fold_negate_expr (location_t loc, tree t)
1baa375f 530{
1af8dcbf 531 tree type = TREE_TYPE (t);
1baa375f
RK
532 tree tem;
533
1baa375f
RK
534 switch (TREE_CODE (t))
535 {
189d4130
AP
536 /* Convert - (~A) to A + 1. */
537 case BIT_NOT_EXPR:
1af8dcbf 538 if (INTEGRAL_TYPE_P (type))
db3927fb 539 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
189d4130 540 build_int_cst (type, 1));
8bce9e98 541 break;
b8698a0f 542
1baa375f 543 case INTEGER_CST:
33d13fac 544 tem = fold_negate_const (t, type);
ee7d8048 545 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
eeef0e45 546 || !TYPE_OVERFLOW_TRAPS (type))
1baa375f
RK
547 return tem;
548 break;
549
8ab49fef 550 case REAL_CST:
33d13fac 551 tem = fold_negate_const (t, type);
8ab49fef 552 /* Two's complement FP formats, such as c4x, may overflow. */
455f14dd 553 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1af8dcbf 554 return tem;
8ab49fef
RS
555 break;
556
325217ed
CF
557 case FIXED_CST:
558 tem = fold_negate_const (t, type);
559 return tem;
560
05d362b8
RS
561 case COMPLEX_CST:
562 {
563 tree rpart = negate_expr (TREE_REALPART (t));
564 tree ipart = negate_expr (TREE_IMAGPART (t));
565
566 if ((TREE_CODE (rpart) == REAL_CST
567 && TREE_CODE (ipart) == REAL_CST)
568 || (TREE_CODE (rpart) == INTEGER_CST
569 && TREE_CODE (ipart) == INTEGER_CST))
570 return build_complex (type, rpart, ipart);
571 }
572 break;
573
1aeef526
KG
574 case COMPLEX_EXPR:
575 if (negate_expr_p (t))
db3927fb
AH
576 return fold_build2_loc (loc, COMPLEX_EXPR, type,
577 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
578 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
1aeef526 579 break;
b8698a0f 580
8fbbe90b
KG
581 case CONJ_EXPR:
582 if (negate_expr_p (t))
db3927fb
AH
583 return fold_build1_loc (loc, CONJ_EXPR, type,
584 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
8fbbe90b
KG
585 break;
586
1baa375f 587 case NEGATE_EXPR:
1af8dcbf 588 return TREE_OPERAND (t, 0);
1baa375f 589
dfb36f9b 590 case PLUS_EXPR:
1b43b967
RS
591 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
592 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
dfb36f9b
RS
593 {
594 /* -(A + B) -> (-B) - A. */
595 if (negate_expr_p (TREE_OPERAND (t, 1))
596 && reorder_operands_p (TREE_OPERAND (t, 0),
597 TREE_OPERAND (t, 1)))
59ce6d6b
RS
598 {
599 tem = negate_expr (TREE_OPERAND (t, 1));
db3927fb 600 return fold_build2_loc (loc, MINUS_EXPR, type,
1af8dcbf 601 tem, TREE_OPERAND (t, 0));
59ce6d6b
RS
602 }
603
dfb36f9b
RS
604 /* -(A + B) -> (-A) - B. */
605 if (negate_expr_p (TREE_OPERAND (t, 0)))
59ce6d6b
RS
606 {
607 tem = negate_expr (TREE_OPERAND (t, 0));
db3927fb 608 return fold_build2_loc (loc, MINUS_EXPR, type,
1af8dcbf 609 tem, TREE_OPERAND (t, 1));
59ce6d6b 610 }
dfb36f9b
RS
611 }
612 break;
613
1baa375f
RK
614 case MINUS_EXPR:
615 /* - (A - B) -> B - A */
1b43b967
RS
616 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
617 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
05d362b8 618 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
db3927fb 619 return fold_build2_loc (loc, MINUS_EXPR, type,
1af8dcbf 620 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1baa375f
RK
621 break;
622
8ab49fef 623 case MULT_EXPR:
1af8dcbf 624 if (TYPE_UNSIGNED (type))
8ab49fef
RS
625 break;
626
627 /* Fall through. */
628
629 case RDIV_EXPR:
1af8dcbf 630 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
8ab49fef
RS
631 {
632 tem = TREE_OPERAND (t, 1);
633 if (negate_expr_p (tem))
db3927fb 634 return fold_build2_loc (loc, TREE_CODE (t), type,
1af8dcbf 635 TREE_OPERAND (t, 0), negate_expr (tem));
8ab49fef
RS
636 tem = TREE_OPERAND (t, 0);
637 if (negate_expr_p (tem))
db3927fb 638 return fold_build2_loc (loc, TREE_CODE (t), type,
1af8dcbf 639 negate_expr (tem), TREE_OPERAND (t, 1));
8ab49fef
RS
640 }
641 break;
642
965d7fa4
AP
643 case TRUNC_DIV_EXPR:
644 case ROUND_DIV_EXPR:
645 case FLOOR_DIV_EXPR:
646 case CEIL_DIV_EXPR:
647 case EXACT_DIV_EXPR:
6ac01510
ILT
648 /* In general we can't negate A / B, because if A is INT_MIN and
649 B is 1, we may turn this into INT_MIN / -1 which is undefined
650 and actually traps on some architectures. But if overflow is
651 undefined, we can negate, because - (INT_MIN / 1) is an
652 overflow. */
eeef0e45 653 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
965d7fa4 654 {
6ac01510
ILT
655 const char * const warnmsg = G_("assuming signed overflow does not "
656 "occur when negating a division");
965d7fa4
AP
657 tem = TREE_OPERAND (t, 1);
658 if (negate_expr_p (tem))
6ac01510
ILT
659 {
660 if (INTEGRAL_TYPE_P (type)
661 && (TREE_CODE (tem) != INTEGER_CST
662 || integer_onep (tem)))
663 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
db3927fb 664 return fold_build2_loc (loc, TREE_CODE (t), type,
6ac01510
ILT
665 TREE_OPERAND (t, 0), negate_expr (tem));
666 }
965d7fa4
AP
667 tem = TREE_OPERAND (t, 0);
668 if (negate_expr_p (tem))
6ac01510
ILT
669 {
670 if (INTEGRAL_TYPE_P (type)
671 && (TREE_CODE (tem) != INTEGER_CST
672 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
673 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
db3927fb 674 return fold_build2_loc (loc, TREE_CODE (t), type,
6ac01510
ILT
675 negate_expr (tem), TREE_OPERAND (t, 1));
676 }
965d7fa4
AP
677 }
678 break;
679
05d362b8
RS
680 case NOP_EXPR:
681 /* Convert -((double)float) into (double)(-float). */
682 if (TREE_CODE (type) == REAL_TYPE)
683 {
684 tem = strip_float_extensions (t);
685 if (tem != t && negate_expr_p (tem))
db3927fb 686 return fold_convert_loc (loc, type, negate_expr (tem));
05d362b8
RS
687 }
688 break;
689
690 case CALL_EXPR:
691 /* Negate -f(x) as f(-x). */
692 if (negate_mathfn_p (builtin_mathfn_code (t))
5039610b 693 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
05d362b8 694 {
5039610b 695 tree fndecl, arg;
05d362b8
RS
696
697 fndecl = get_callee_fndecl (t);
5039610b 698 arg = negate_expr (CALL_EXPR_ARG (t, 0));
db3927fb 699 return build_call_expr_loc (loc, fndecl, 1, arg);
05d362b8
RS
700 }
701 break;
702
239a625e
RS
703 case RSHIFT_EXPR:
704 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
705 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
706 {
707 tree op1 = TREE_OPERAND (t, 1);
708 if (TREE_INT_CST_HIGH (op1) == 0
709 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
710 == TREE_INT_CST_LOW (op1))
711 {
8df83eae 712 tree ntype = TYPE_UNSIGNED (type)
12753674 713 ? signed_type_for (type)
ca5ba2a3 714 : unsigned_type_for (type);
db3927fb
AH
715 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
716 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
717 return fold_convert_loc (loc, type, temp);
239a625e
RS
718 }
719 }
720 break;
721
1baa375f
RK
722 default:
723 break;
724 }
725
1af8dcbf
RG
726 return NULL_TREE;
727}
728
729/* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
730 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
731 return NULL_TREE. */
732
733static tree
734negate_expr (tree t)
735{
736 tree type, tem;
db3927fb 737 location_t loc;
1af8dcbf
RG
738
739 if (t == NULL_TREE)
740 return NULL_TREE;
741
db3927fb 742 loc = EXPR_LOCATION (t);
1af8dcbf
RG
743 type = TREE_TYPE (t);
744 STRIP_SIGN_NOPS (t);
745
db3927fb 746 tem = fold_negate_expr (loc, t);
1af8dcbf 747 if (!tem)
c9019218 748 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
db3927fb 749 return fold_convert_loc (loc, type, tem);
1baa375f
RK
750}
751\f
752/* Split a tree IN into a constant, literal and variable parts that could be
753 combined with CODE to make IN. "constant" means an expression with
754 TREE_CONSTANT but that isn't an actual constant. CODE must be a
755 commutative arithmetic operation. Store the constant part into *CONP,
cff27795 756 the literal in *LITP and return the variable part. If a part isn't
1baa375f
RK
757 present, set it to null. If the tree does not decompose in this way,
758 return the entire tree as the variable part and the other parts as null.
759
760 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
cff27795
EB
761 case, we negate an operand that was subtracted. Except if it is a
762 literal for which we use *MINUS_LITP instead.
763
764 If NEGATE_P is true, we are negating all of IN, again except a literal
765 for which we use *MINUS_LITP instead.
1baa375f
RK
766
767 If IN is itself a literal or constant, return it as appropriate.
768
769 Note that we do not guarantee that any of the three values will be the
770 same type as IN, but they will have the same signedness and mode. */
771
772static tree
75040a04
AJ
773split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
774 tree *minus_litp, int negate_p)
6d716ca8 775{
1baa375f
RK
776 tree var = 0;
777
6d716ca8 778 *conp = 0;
1baa375f 779 *litp = 0;
cff27795 780 *minus_litp = 0;
1baa375f 781
30f7a378 782 /* Strip any conversions that don't change the machine mode or signedness. */
1baa375f
RK
783 STRIP_SIGN_NOPS (in);
784
325217ed
CF
785 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
786 || TREE_CODE (in) == FIXED_CST)
1baa375f 787 *litp = in;
1baa375f 788 else if (TREE_CODE (in) == code
41bb1f06 789 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
325217ed 790 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1baa375f
RK
791 /* We can associate addition and subtraction together (even
792 though the C standard doesn't say so) for integers because
793 the value is not affected. For reals, the value might be
794 affected, so we can't. */
795 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
796 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
797 {
798 tree op0 = TREE_OPERAND (in, 0);
799 tree op1 = TREE_OPERAND (in, 1);
800 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
801 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
802
803 /* First see if either of the operands is a literal, then a constant. */
325217ed
CF
804 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
805 || TREE_CODE (op0) == FIXED_CST)
1baa375f 806 *litp = op0, op0 = 0;
325217ed
CF
807 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
808 || TREE_CODE (op1) == FIXED_CST)
1baa375f
RK
809 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
810
811 if (op0 != 0 && TREE_CONSTANT (op0))
812 *conp = op0, op0 = 0;
813 else if (op1 != 0 && TREE_CONSTANT (op1))
814 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
815
816 /* If we haven't dealt with either operand, this is not a case we can
30f7a378 817 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1baa375f
RK
818 if (op0 != 0 && op1 != 0)
819 var = in;
820 else if (op0 != 0)
821 var = op0;
822 else
823 var = op1, neg_var_p = neg1_p;
6d716ca8 824
1baa375f 825 /* Now do any needed negations. */
cff27795
EB
826 if (neg_litp_p)
827 *minus_litp = *litp, *litp = 0;
828 if (neg_conp_p)
829 *conp = negate_expr (*conp);
830 if (neg_var_p)
831 var = negate_expr (var);
1baa375f 832 }
1796dff4
RH
833 else if (TREE_CONSTANT (in))
834 *conp = in;
1baa375f
RK
835 else
836 var = in;
837
838 if (negate_p)
6d716ca8 839 {
cff27795
EB
840 if (*litp)
841 *minus_litp = *litp, *litp = 0;
842 else if (*minus_litp)
843 *litp = *minus_litp, *minus_litp = 0;
1baa375f 844 *conp = negate_expr (*conp);
cff27795 845 var = negate_expr (var);
6d716ca8 846 }
1baa375f
RK
847
848 return var;
849}
850
db3927fb
AH
851/* Re-associate trees split by the above function. T1 and T2 are
852 either expressions to associate or null. Return the new
853 expression, if any. LOC is the location of the new expression. If
cff27795 854 we build an operation, do it in TYPE and with CODE. */
1baa375f
RK
855
856static tree
db3927fb 857associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
1baa375f 858{
1baa375f
RK
859 if (t1 == 0)
860 return t2;
861 else if (t2 == 0)
862 return t1;
863
1baa375f
RK
864 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
865 try to fold this since we will have infinite recursion. But do
866 deal with any NEGATE_EXPRs. */
867 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
868 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
869 {
1bed5ee3
JJ
870 if (code == PLUS_EXPR)
871 {
872 if (TREE_CODE (t1) == NEGATE_EXPR)
c9019218
JJ
873 return build2_loc (loc, MINUS_EXPR, type,
874 fold_convert_loc (loc, type, t2),
875 fold_convert_loc (loc, type,
876 TREE_OPERAND (t1, 0)));
1bed5ee3 877 else if (TREE_CODE (t2) == NEGATE_EXPR)
c9019218
JJ
878 return build2_loc (loc, MINUS_EXPR, type,
879 fold_convert_loc (loc, type, t1),
880 fold_convert_loc (loc, type,
881 TREE_OPERAND (t2, 0)));
18522563 882 else if (integer_zerop (t2))
db3927fb 883 return fold_convert_loc (loc, type, t1);
1bed5ee3 884 }
18522563
ZD
885 else if (code == MINUS_EXPR)
886 {
887 if (integer_zerop (t2))
db3927fb 888 return fold_convert_loc (loc, type, t1);
18522563
ZD
889 }
890
c9019218
JJ
891 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
892 fold_convert_loc (loc, type, t2));
1baa375f
RK
893 }
894
db3927fb 895 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
c9019218 896 fold_convert_loc (loc, type, t2));
6d716ca8
RS
897}
898\f
000d8d44
RS
899/* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
900 for use in int_const_binop, size_binop and size_diffop. */
901
902static bool
ac545c64 903int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
000d8d44
RS
904{
905 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
906 return false;
907 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
908 return false;
909
910 switch (code)
911 {
912 case LSHIFT_EXPR:
913 case RSHIFT_EXPR:
914 case LROTATE_EXPR:
915 case RROTATE_EXPR:
916 return true;
917
918 default:
919 break;
920 }
921
922 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
923 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
924 && TYPE_MODE (type1) == TYPE_MODE (type2);
925}
926
927
e9a25f70 928/* Combine two integer constants ARG1 and ARG2 under operation CODE
fd6c76f4
RS
929 to produce a new constant. Return NULL_TREE if we don't know how
930 to evaluate CODE at compile-time.
91d33e36 931
4c160717 932 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
6d716ca8 933
6de9cd9a 934tree
fa233e34 935int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
6d716ca8 936{
fd7de64c 937 double_int op1, op2, res, tmp;
b3694847 938 tree t;
4c160717 939 tree type = TREE_TYPE (arg1);
fd7de64c
AS
940 bool uns = TYPE_UNSIGNED (type);
941 bool is_sizetype
4c160717 942 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
fd7de64c 943 bool overflow = false;
3dedc65a 944
fd7de64c
AS
945 op1 = tree_to_double_int (arg1);
946 op2 = tree_to_double_int (arg2);
e9a25f70
JL
947
948 switch (code)
6d716ca8 949 {
e9a25f70 950 case BIT_IOR_EXPR:
fd7de64c 951 res = double_int_ior (op1, op2);
e9a25f70 952 break;
6d716ca8 953
e9a25f70 954 case BIT_XOR_EXPR:
fd7de64c 955 res = double_int_xor (op1, op2);
e9a25f70 956 break;
6d716ca8 957
e9a25f70 958 case BIT_AND_EXPR:
fd7de64c 959 res = double_int_and (op1, op2);
e9a25f70 960 break;
6d716ca8 961
e9a25f70 962 case RSHIFT_EXPR:
fd7de64c
AS
963 res = double_int_rshift (op1, double_int_to_shwi (op2),
964 TYPE_PRECISION (type), !uns);
965 break;
966
e9a25f70
JL
967 case LSHIFT_EXPR:
968 /* It's unclear from the C standard whether shifts can overflow.
969 The following code ignores overflow; perhaps a C standard
970 interpretation ruling is needed. */
fd7de64c
AS
971 res = double_int_lshift (op1, double_int_to_shwi (op2),
972 TYPE_PRECISION (type), !uns);
e9a25f70 973 break;
6d716ca8 974
e9a25f70 975 case RROTATE_EXPR:
fd7de64c
AS
976 res = double_int_rrotate (op1, double_int_to_shwi (op2),
977 TYPE_PRECISION (type));
978 break;
979
e9a25f70 980 case LROTATE_EXPR:
fd7de64c
AS
981 res = double_int_lrotate (op1, double_int_to_shwi (op2),
982 TYPE_PRECISION (type));
e9a25f70 983 break;
6d716ca8 984
e9a25f70 985 case PLUS_EXPR:
fd7de64c
AS
986 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
987 &res.low, &res.high);
e9a25f70 988 break;
6d716ca8 989
e9a25f70 990 case MINUS_EXPR:
fd7de64c
AS
991 neg_double (op2.low, op2.high, &res.low, &res.high);
992 add_double (op1.low, op1.high, res.low, res.high,
993 &res.low, &res.high);
994 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
e9a25f70 995 break;
6d716ca8 996
e9a25f70 997 case MULT_EXPR:
fd7de64c
AS
998 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
999 &res.low, &res.high);
e9a25f70 1000 break;
6d716ca8 1001
e9a25f70
JL
1002 case TRUNC_DIV_EXPR:
1003 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1004 case EXACT_DIV_EXPR:
1005 /* This is a shortcut for a common special case. */
fd7de64c 1006 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
455f14dd
RS
1007 && !TREE_OVERFLOW (arg1)
1008 && !TREE_OVERFLOW (arg2)
fd7de64c 1009 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
e9a25f70
JL
1010 {
1011 if (code == CEIL_DIV_EXPR)
fd7de64c 1012 op1.low += op2.low - 1;
05bccae2 1013
fd7de64c 1014 res.low = op1.low / op2.low, res.high = 0;
6d716ca8 1015 break;
e9a25f70 1016 }
6d716ca8 1017
30f7a378 1018 /* ... fall through ... */
6d716ca8 1019
b6cc0a72 1020 case ROUND_DIV_EXPR:
fd7de64c 1021 if (double_int_zero_p (op2))
fd6c76f4 1022 return NULL_TREE;
fd7de64c 1023 if (double_int_one_p (op2))
e9a25f70 1024 {
fd7de64c 1025 res = op1;
6d716ca8 1026 break;
e9a25f70 1027 }
fd7de64c
AS
1028 if (double_int_equal_p (op1, op2)
1029 && ! double_int_zero_p (op1))
e9a25f70 1030 {
fd7de64c 1031 res = double_int_one;
63e7fe9b 1032 break;
e9a25f70 1033 }
fd7de64c
AS
1034 overflow = div_and_round_double (code, uns,
1035 op1.low, op1.high, op2.low, op2.high,
1036 &res.low, &res.high,
1037 &tmp.low, &tmp.high);
e9a25f70 1038 break;
63e7fe9b 1039
e9a25f70
JL
1040 case TRUNC_MOD_EXPR:
1041 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1042 /* This is a shortcut for a common special case. */
fd7de64c 1043 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
455f14dd
RS
1044 && !TREE_OVERFLOW (arg1)
1045 && !TREE_OVERFLOW (arg2)
fd7de64c 1046 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
e9a25f70
JL
1047 {
1048 if (code == CEIL_MOD_EXPR)
fd7de64c
AS
1049 op1.low += op2.low - 1;
1050 res.low = op1.low % op2.low, res.high = 0;
63e7fe9b 1051 break;
e9a25f70 1052 }
63e7fe9b 1053
30f7a378 1054 /* ... fall through ... */
e9a25f70 1055
b6cc0a72 1056 case ROUND_MOD_EXPR:
fd7de64c 1057 if (double_int_zero_p (op2))
fd6c76f4 1058 return NULL_TREE;
e9a25f70 1059 overflow = div_and_round_double (code, uns,
fd7de64c
AS
1060 op1.low, op1.high, op2.low, op2.high,
1061 &tmp.low, &tmp.high,
1062 &res.low, &res.high);
e9a25f70
JL
1063 break;
1064
1065 case MIN_EXPR:
fd7de64c
AS
1066 res = double_int_min (op1, op2, uns);
1067 break;
d4b60170 1068
fd7de64c
AS
1069 case MAX_EXPR:
1070 res = double_int_max (op1, op2, uns);
e9a25f70 1071 break;
3dedc65a 1072
e9a25f70 1073 default:
fd6c76f4 1074 return NULL_TREE;
3dedc65a 1075 }
e9a25f70 1076
ca7a3bd7
NS
1077 if (notrunc)
1078 {
fd7de64c 1079 t = build_int_cst_wide (TREE_TYPE (arg1), res.low, res.high);
b8fca551 1080
ca7a3bd7
NS
1081 /* Propagate overflow flags ourselves. */
1082 if (((!uns || is_sizetype) && overflow)
1083 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
89b0433e
NS
1084 {
1085 t = copy_node (t);
1086 TREE_OVERFLOW (t) = 1;
89b0433e 1087 }
ca7a3bd7
NS
1088 }
1089 else
9589f23e 1090 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
b8fca551 1091 ((!uns || is_sizetype) && overflow)
d95787e6 1092 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
3e6688a7 1093
e9a25f70
JL
1094 return t;
1095}
1096
d4b60170
RK
1097/* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1098 constant. We assume ARG1 and ARG2 have the same data type, or at least
858214db 1099 are the same kind of constant and the same machine mode. Return zero if
43a5d30b 1100 combining the constants is not allowed in the current operating mode. */
e9a25f70
JL
1101
1102static tree
43a5d30b 1103const_binop (enum tree_code code, tree arg1, tree arg2)
e9a25f70 1104{
858214db
EB
1105 /* Sanity check for the recursive cases. */
1106 if (!arg1 || !arg2)
1107 return NULL_TREE;
1108
b6cc0a72
KH
1109 STRIP_NOPS (arg1);
1110 STRIP_NOPS (arg2);
e9a25f70
JL
1111
1112 if (TREE_CODE (arg1) == INTEGER_CST)
43a5d30b 1113 return int_const_binop (code, arg1, arg2, 0);
e9a25f70 1114
6d716ca8
RS
1115 if (TREE_CODE (arg1) == REAL_CST)
1116 {
3e4093b6 1117 enum machine_mode mode;
79c844cd
RK
1118 REAL_VALUE_TYPE d1;
1119 REAL_VALUE_TYPE d2;
15e5ad76 1120 REAL_VALUE_TYPE value;
d284eb28
RS
1121 REAL_VALUE_TYPE result;
1122 bool inexact;
3e4093b6 1123 tree t, type;
6d716ca8 1124
fd6c76f4
RS
1125 /* The following codes are handled by real_arithmetic. */
1126 switch (code)
1127 {
1128 case PLUS_EXPR:
1129 case MINUS_EXPR:
1130 case MULT_EXPR:
1131 case RDIV_EXPR:
1132 case MIN_EXPR:
1133 case MAX_EXPR:
1134 break;
1135
1136 default:
1137 return NULL_TREE;
1138 }
1139
79c844cd
RK
1140 d1 = TREE_REAL_CST (arg1);
1141 d2 = TREE_REAL_CST (arg2);
5f610074 1142
3e4093b6
RS
1143 type = TREE_TYPE (arg1);
1144 mode = TYPE_MODE (type);
1145
1146 /* Don't perform operation if we honor signaling NaNs and
1147 either operand is a NaN. */
1148 if (HONOR_SNANS (mode)
1149 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1150 return NULL_TREE;
1151
1152 /* Don't perform operation if it would raise a division
1153 by zero exception. */
1154 if (code == RDIV_EXPR
1155 && REAL_VALUES_EQUAL (d2, dconst0)
1156 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1157 return NULL_TREE;
1158
5f610074
RK
1159 /* If either operand is a NaN, just return it. Otherwise, set up
1160 for floating-point trap; we return an overflow. */
1161 if (REAL_VALUE_ISNAN (d1))
1162 return arg1;
1163 else if (REAL_VALUE_ISNAN (d2))
1164 return arg2;
a4d3481d 1165
d284eb28
RS
1166 inexact = real_arithmetic (&value, code, &d1, &d2);
1167 real_convert (&result, mode, &value);
b6cc0a72 1168
68328cda
EB
1169 /* Don't constant fold this floating point operation if
1170 the result has overflowed and flag_trapping_math. */
68328cda
EB
1171 if (flag_trapping_math
1172 && MODE_HAS_INFINITIES (mode)
1173 && REAL_VALUE_ISINF (result)
1174 && !REAL_VALUE_ISINF (d1)
1175 && !REAL_VALUE_ISINF (d2))
1176 return NULL_TREE;
1177
d284eb28
RS
1178 /* Don't constant fold this floating point operation if the
1179 result may dependent upon the run-time rounding mode and
762297d9
RS
1180 flag_rounding_math is set, or if GCC's software emulation
1181 is unable to accurately represent the result. */
762297d9 1182 if ((flag_rounding_math
4099e2c2 1183 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
d284eb28
RS
1184 && (inexact || !real_identical (&result, &value)))
1185 return NULL_TREE;
1186
1187 t = build_real (type, result);
649ff3b4 1188
ca7a3bd7 1189 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
7c7b029d 1190 return t;
6d716ca8 1191 }
fd6c76f4 1192
325217ed
CF
1193 if (TREE_CODE (arg1) == FIXED_CST)
1194 {
1195 FIXED_VALUE_TYPE f1;
1196 FIXED_VALUE_TYPE f2;
1197 FIXED_VALUE_TYPE result;
1198 tree t, type;
1199 int sat_p;
1200 bool overflow_p;
1201
1202 /* The following codes are handled by fixed_arithmetic. */
1203 switch (code)
1204 {
1205 case PLUS_EXPR:
1206 case MINUS_EXPR:
1207 case MULT_EXPR:
1208 case TRUNC_DIV_EXPR:
1209 f2 = TREE_FIXED_CST (arg2);
1210 break;
1211
1212 case LSHIFT_EXPR:
1213 case RSHIFT_EXPR:
1214 f2.data.high = TREE_INT_CST_HIGH (arg2);
1215 f2.data.low = TREE_INT_CST_LOW (arg2);
1216 f2.mode = SImode;
1217 break;
1218
1219 default:
1220 return NULL_TREE;
1221 }
1222
1223 f1 = TREE_FIXED_CST (arg1);
1224 type = TREE_TYPE (arg1);
1225 sat_p = TYPE_SATURATING (type);
1226 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1227 t = build_fixed (type, result);
1228 /* Propagate overflow flags. */
1229 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
28ddeea1 1230 TREE_OVERFLOW (t) = 1;
325217ed
CF
1231 return t;
1232 }
1233
6d716ca8
RS
1234 if (TREE_CODE (arg1) == COMPLEX_CST)
1235 {
b3694847
SS
1236 tree type = TREE_TYPE (arg1);
1237 tree r1 = TREE_REALPART (arg1);
1238 tree i1 = TREE_IMAGPART (arg1);
1239 tree r2 = TREE_REALPART (arg2);
1240 tree i2 = TREE_IMAGPART (arg2);
858214db 1241 tree real, imag;
6d716ca8
RS
1242
1243 switch (code)
1244 {
1245 case PLUS_EXPR:
6d716ca8 1246 case MINUS_EXPR:
43a5d30b
AS
1247 real = const_binop (code, r1, r2);
1248 imag = const_binop (code, i1, i2);
6d716ca8
RS
1249 break;
1250
1251 case MULT_EXPR:
2f440f6a 1252 if (COMPLEX_FLOAT_TYPE_P (type))
ca75b926
KG
1253 return do_mpc_arg2 (arg1, arg2, type,
1254 /* do_nonfinite= */ folding_initializer,
1255 mpc_mul);
2f440f6a 1256
858214db 1257 real = const_binop (MINUS_EXPR,
43a5d30b
AS
1258 const_binop (MULT_EXPR, r1, r2),
1259 const_binop (MULT_EXPR, i1, i2));
858214db 1260 imag = const_binop (PLUS_EXPR,
43a5d30b
AS
1261 const_binop (MULT_EXPR, r1, i2),
1262 const_binop (MULT_EXPR, i1, r2));
6d716ca8
RS
1263 break;
1264
1265 case RDIV_EXPR:
2f440f6a 1266 if (COMPLEX_FLOAT_TYPE_P (type))
ca75b926
KG
1267 return do_mpc_arg2 (arg1, arg2, type,
1268 /* do_nonfinite= */ folding_initializer,
1269 mpc_div);
e3d5405d 1270 /* Fallthru ... */
e3d5405d
KG
1271 case TRUNC_DIV_EXPR:
1272 case CEIL_DIV_EXPR:
1273 case FLOOR_DIV_EXPR:
1274 case ROUND_DIV_EXPR:
1275 if (flag_complex_method == 0)
6d716ca8 1276 {
e3d5405d
KG
1277 /* Keep this algorithm in sync with
1278 tree-complex.c:expand_complex_div_straight().
1279
1280 Expand complex division to scalars, straightforward algorithm.
1281 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1282 t = br*br + bi*bi
1283 */
b3694847 1284 tree magsquared
6d716ca8 1285 = const_binop (PLUS_EXPR,
43a5d30b
AS
1286 const_binop (MULT_EXPR, r2, r2),
1287 const_binop (MULT_EXPR, i2, i2));
858214db
EB
1288 tree t1
1289 = const_binop (PLUS_EXPR,
43a5d30b
AS
1290 const_binop (MULT_EXPR, r1, r2),
1291 const_binop (MULT_EXPR, i1, i2));
858214db
EB
1292 tree t2
1293 = const_binop (MINUS_EXPR,
43a5d30b
AS
1294 const_binop (MULT_EXPR, i1, r2),
1295 const_binop (MULT_EXPR, r1, i2));
c10166c4 1296
43a5d30b
AS
1297 real = const_binop (code, t1, magsquared);
1298 imag = const_binop (code, t2, magsquared);
6d716ca8 1299 }
e3d5405d
KG
1300 else
1301 {
1302 /* Keep this algorithm in sync with
1303 tree-complex.c:expand_complex_div_wide().
1304
1305 Expand complex division to scalars, modified algorithm to minimize
1306 overflow with wide input ranges. */
08d19889
KG
1307 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1308 fold_abs_const (r2, TREE_TYPE (type)),
1309 fold_abs_const (i2, TREE_TYPE (type)));
b8698a0f 1310
e3d5405d
KG
1311 if (integer_nonzerop (compare))
1312 {
1313 /* In the TRUE branch, we compute
1314 ratio = br/bi;
1315 div = (br * ratio) + bi;
1316 tr = (ar * ratio) + ai;
1317 ti = (ai * ratio) - ar;
1318 tr = tr / div;
1319 ti = ti / div; */
43a5d30b 1320 tree ratio = const_binop (code, r2, i2);
08d19889 1321 tree div = const_binop (PLUS_EXPR, i2,
43a5d30b
AS
1322 const_binop (MULT_EXPR, r2, ratio));
1323 real = const_binop (MULT_EXPR, r1, ratio);
1324 real = const_binop (PLUS_EXPR, real, i1);
1325 real = const_binop (code, real, div);
1326
1327 imag = const_binop (MULT_EXPR, i1, ratio);
1328 imag = const_binop (MINUS_EXPR, imag, r1);
1329 imag = const_binop (code, imag, div);
e3d5405d
KG
1330 }
1331 else
1332 {
1333 /* In the FALSE branch, we compute
1334 ratio = d/c;
1335 divisor = (d * ratio) + c;
1336 tr = (b * ratio) + a;
1337 ti = b - (a * ratio);
1338 tr = tr / div;
1339 ti = ti / div; */
43a5d30b 1340 tree ratio = const_binop (code, i2, r2);
08d19889 1341 tree div = const_binop (PLUS_EXPR, r2,
43a5d30b 1342 const_binop (MULT_EXPR, i2, ratio));
08d19889 1343
43a5d30b
AS
1344 real = const_binop (MULT_EXPR, i1, ratio);
1345 real = const_binop (PLUS_EXPR, real, r1);
1346 real = const_binop (code, real, div);
08d19889 1347
43a5d30b
AS
1348 imag = const_binop (MULT_EXPR, r1, ratio);
1349 imag = const_binop (MINUS_EXPR, i1, imag);
1350 imag = const_binop (code, imag, div);
e3d5405d
KG
1351 }
1352 }
6d716ca8
RS
1353 break;
1354
1355 default:
fd6c76f4 1356 return NULL_TREE;
6d716ca8 1357 }
858214db
EB
1358
1359 if (real && imag)
1360 return build_complex (type, real, imag);
6d716ca8 1361 }
858214db 1362
d1d1c602
BM
1363 if (TREE_CODE (arg1) == VECTOR_CST)
1364 {
1365 tree type = TREE_TYPE(arg1);
1366 int count = TYPE_VECTOR_SUBPARTS (type), i;
1367 tree elements1, elements2, list = NULL_TREE;
b8698a0f 1368
d1d1c602
BM
1369 if(TREE_CODE(arg2) != VECTOR_CST)
1370 return NULL_TREE;
b8698a0f 1371
d1d1c602
BM
1372 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1373 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1374
1375 for (i = 0; i < count; i++)
1376 {
1377 tree elem1, elem2, elem;
b8698a0f 1378
d1d1c602
BM
1379 /* The trailing elements can be empty and should be treated as 0 */
1380 if(!elements1)
1381 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1382 else
1383 {
1384 elem1 = TREE_VALUE(elements1);
1385 elements1 = TREE_CHAIN (elements1);
b8698a0f
L
1386 }
1387
d1d1c602
BM
1388 if(!elements2)
1389 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1390 else
1391 {
1392 elem2 = TREE_VALUE(elements2);
1393 elements2 = TREE_CHAIN (elements2);
1394 }
b8698a0f 1395
43a5d30b 1396 elem = const_binop (code, elem1, elem2);
b8698a0f 1397
d1d1c602
BM
1398 /* It is possible that const_binop cannot handle the given
1399 code and return NULL_TREE */
1400 if(elem == NULL_TREE)
1401 return NULL_TREE;
b8698a0f 1402
d1d1c602
BM
1403 list = tree_cons (NULL_TREE, elem, list);
1404 }
b8698a0f 1405 return build_vector(type, nreverse(list));
d1d1c602 1406 }
fd6c76f4 1407 return NULL_TREE;
6d716ca8 1408}
4c160717 1409
ce552f75
NS
1410/* Create a size type INT_CST node with NUMBER sign extended. KIND
1411 indicates which particular sizetype to create. */
d4b60170 1412
fed3cef0 1413tree
3e95a7cb 1414size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
fed3cef0 1415{
ce552f75 1416 return build_int_cst (sizetype_tab[(int) kind], number);
fed3cef0 1417}
ce552f75 1418\f
fed3cef0
RK
1419/* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1420 is a tree code. The type of the result is taken from the operands.
000d8d44 1421 Both must be equivalent integer types, ala int_binop_types_match_p.
6d716ca8
RS
1422 If the operands are constant, so is the result. */
1423
1424tree
db3927fb 1425size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
6d716ca8 1426{
fed3cef0
RK
1427 tree type = TREE_TYPE (arg0);
1428
7ebcc52c
VR
1429 if (arg0 == error_mark_node || arg1 == error_mark_node)
1430 return error_mark_node;
1431
000d8d44
RS
1432 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1433 TREE_TYPE (arg1)));
fed3cef0 1434
6d716ca8
RS
1435 /* Handle the special case of two integer constants faster. */
1436 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1437 {
1438 /* And some specific cases even faster than that. */
74890d7b
RS
1439 if (code == PLUS_EXPR)
1440 {
1441 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1442 return arg1;
1443 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1444 return arg0;
1445 }
1446 else if (code == MINUS_EXPR)
1447 {
1448 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1449 return arg0;
1450 }
1451 else if (code == MULT_EXPR)
1452 {
1453 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1454 return arg1;
1455 }
9898deac 1456
6d716ca8 1457 /* Handle general case of two integer constants. */
4c160717 1458 return int_const_binop (code, arg0, arg1, 0);
6d716ca8
RS
1459 }
1460
db3927fb 1461 return fold_build2_loc (loc, code, type, arg0, arg1);
6d716ca8 1462}
697073d9 1463
fed3cef0
RK
1464/* Given two values, either both of sizetype or both of bitsizetype,
1465 compute the difference between the two values. Return the value
1466 in signed type corresponding to the type of the operands. */
697073d9
JM
1467
1468tree
db3927fb 1469size_diffop_loc (location_t loc, tree arg0, tree arg1)
697073d9 1470{
fed3cef0
RK
1471 tree type = TREE_TYPE (arg0);
1472 tree ctype;
697073d9 1473
000d8d44
RS
1474 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1475 TREE_TYPE (arg1)));
697073d9 1476
fed3cef0 1477 /* If the type is already signed, just do the simple thing. */
8df83eae 1478 if (!TYPE_UNSIGNED (type))
db3927fb 1479 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
fed3cef0 1480
000d8d44
RS
1481 if (type == sizetype)
1482 ctype = ssizetype;
1483 else if (type == bitsizetype)
1484 ctype = sbitsizetype;
1485 else
12753674 1486 ctype = signed_type_for (type);
fed3cef0
RK
1487
1488 /* If either operand is not a constant, do the conversions to the signed
1489 type and subtract. The hardware will do the right thing with any
1490 overflow in the subtraction. */
1491 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
db3927fb
AH
1492 return size_binop_loc (loc, MINUS_EXPR,
1493 fold_convert_loc (loc, ctype, arg0),
1494 fold_convert_loc (loc, ctype, arg1));
fed3cef0
RK
1495
1496 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1497 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1498 overflow) and negate (which can't either). Special-case a result
1499 of zero while we're here. */
1500 if (tree_int_cst_equal (arg0, arg1))
57decb7e 1501 return build_int_cst (ctype, 0);
fed3cef0 1502 else if (tree_int_cst_lt (arg1, arg0))
db3927fb
AH
1503 return fold_convert_loc (loc, ctype,
1504 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
fed3cef0 1505 else
db3927fb
AH
1506 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1507 fold_convert_loc (loc, ctype,
1508 size_binop_loc (loc,
1509 MINUS_EXPR,
1510 arg1, arg0)));
697073d9 1511}
6d716ca8 1512\f
c756af79
RH
1513/* A subroutine of fold_convert_const handling conversions of an
1514 INTEGER_CST to another integer type. */
049e524f
RS
1515
1516static tree
ac545c64 1517fold_convert_const_int_from_int (tree type, const_tree arg1)
049e524f 1518{
c756af79 1519 tree t;
049e524f 1520
c756af79
RH
1521 /* Given an integer constant, make new constant with new type,
1522 appropriately sign-extended or truncated. */
9589f23e 1523 t = force_fit_type_double (type, tree_to_double_int (arg1),
9e9ef331 1524 !POINTER_TYPE_P (TREE_TYPE (arg1)),
b8fca551
RG
1525 (TREE_INT_CST_HIGH (arg1) < 0
1526 && (TYPE_UNSIGNED (type)
1527 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
d95787e6 1528 | TREE_OVERFLOW (arg1));
049e524f 1529
c756af79 1530 return t;
049e524f
RS
1531}
1532
c756af79
RH
1533/* A subroutine of fold_convert_const handling conversions a REAL_CST
1534 to an integer type. */
6d716ca8
RS
1535
1536static tree
ac545c64 1537fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
6d716ca8 1538{
649ff3b4 1539 int overflow = 0;
fdb33708
RS
1540 tree t;
1541
c756af79
RH
1542 /* The following code implements the floating point to integer
1543 conversion rules required by the Java Language Specification,
1544 that IEEE NaNs are mapped to zero and values that overflow
1545 the target precision saturate, i.e. values greater than
1546 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1547 are mapped to INT_MIN. These semantics are allowed by the
1548 C and C++ standards that simply state that the behavior of
1549 FP-to-integer conversion is unspecified upon overflow. */
6d716ca8 1550
2bd1333d 1551 double_int val;
c756af79
RH
1552 REAL_VALUE_TYPE r;
1553 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1554
1555 switch (code)
6d716ca8 1556 {
c756af79
RH
1557 case FIX_TRUNC_EXPR:
1558 real_trunc (&r, VOIDmode, &x);
1559 break;
1560
c756af79
RH
1561 default:
1562 gcc_unreachable ();
1563 }
1564
1565 /* If R is NaN, return zero and show we have an overflow. */
1566 if (REAL_VALUE_ISNAN (r))
1567 {
1568 overflow = 1;
2bd1333d 1569 val = double_int_zero;
c756af79
RH
1570 }
1571
1572 /* See if R is less than the lower bound or greater than the
1573 upper bound. */
1574
1575 if (! overflow)
1576 {
1577 tree lt = TYPE_MIN_VALUE (type);
1578 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1579 if (REAL_VALUES_LESS (r, l))
6d716ca8 1580 {
c756af79 1581 overflow = 1;
2bd1333d 1582 val = tree_to_double_int (lt);
6d716ca8 1583 }
c756af79
RH
1584 }
1585
1586 if (! overflow)
1587 {
1588 tree ut = TYPE_MAX_VALUE (type);
1589 if (ut)
6d716ca8 1590 {
c756af79
RH
1591 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1592 if (REAL_VALUES_LESS (u, r))
fdb33708 1593 {
c756af79 1594 overflow = 1;
2bd1333d 1595 val = tree_to_double_int (ut);
c756af79
RH
1596 }
1597 }
1598 }
fdb33708 1599
c756af79 1600 if (! overflow)
2bd1333d 1601 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
fdb33708 1602
9589f23e 1603 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
c756af79
RH
1604 return t;
1605}
fc627530 1606
325217ed
CF
1607/* A subroutine of fold_convert_const handling conversions of a
1608 FIXED_CST to an integer type. */
1609
1610static tree
ac545c64 1611fold_convert_const_int_from_fixed (tree type, const_tree arg1)
325217ed
CF
1612{
1613 tree t;
1614 double_int temp, temp_trunc;
1615 unsigned int mode;
1616
1617 /* Right shift FIXED_CST to temp by fbit. */
1618 temp = TREE_FIXED_CST (arg1).data;
1619 mode = TREE_FIXED_CST (arg1).mode;
1620 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1621 {
2bd1333d
AS
1622 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1623 HOST_BITS_PER_DOUBLE_INT,
1624 SIGNED_FIXED_POINT_MODE_P (mode));
325217ed
CF
1625
1626 /* Left shift temp to temp_trunc by fbit. */
2bd1333d
AS
1627 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1628 HOST_BITS_PER_DOUBLE_INT,
1629 SIGNED_FIXED_POINT_MODE_P (mode));
325217ed
CF
1630 }
1631 else
1632 {
2bd1333d
AS
1633 temp = double_int_zero;
1634 temp_trunc = double_int_zero;
325217ed
CF
1635 }
1636
1637 /* If FIXED_CST is negative, we need to round the value toward 0.
1638 By checking if the fractional bits are not zero to add 1 to temp. */
2bd1333d
AS
1639 if (SIGNED_FIXED_POINT_MODE_P (mode)
1640 && double_int_negative_p (temp_trunc)
325217ed 1641 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2bd1333d 1642 temp = double_int_add (temp, double_int_one);
325217ed
CF
1643
1644 /* Given a fixed-point constant, make new constant with new type,
1645 appropriately sign-extended or truncated. */
9589f23e 1646 t = force_fit_type_double (type, temp, -1,
2bd1333d 1647 (double_int_negative_p (temp)
325217ed
CF
1648 && (TYPE_UNSIGNED (type)
1649 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1650 | TREE_OVERFLOW (arg1));
1651
1652 return t;
1653}
1654
c756af79
RH
1655/* A subroutine of fold_convert_const handling conversions a REAL_CST
1656 to another floating point type. */
fdb33708 1657
c756af79 1658static tree
ac545c64 1659fold_convert_const_real_from_real (tree type, const_tree arg1)
c756af79 1660{
d284eb28 1661 REAL_VALUE_TYPE value;
c756af79 1662 tree t;
e1ee5cdc 1663
d284eb28
RS
1664 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1665 t = build_real (type, value);
875eda9c 1666
d33e4b70
SL
1667 /* If converting an infinity or NAN to a representation that doesn't
1668 have one, set the overflow bit so that we can produce some kind of
1669 error message at the appropriate point if necessary. It's not the
1670 most user-friendly message, but it's better than nothing. */
1671 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1672 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1673 TREE_OVERFLOW (t) = 1;
1674 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1675 && !MODE_HAS_NANS (TYPE_MODE (type)))
1676 TREE_OVERFLOW (t) = 1;
1677 /* Regular overflow, conversion produced an infinity in a mode that
1678 can't represent them. */
1679 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1680 && REAL_VALUE_ISINF (value)
1681 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1682 TREE_OVERFLOW (t) = 1;
1683 else
1684 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
c756af79
RH
1685 return t;
1686}
875eda9c 1687
325217ed
CF
1688/* A subroutine of fold_convert_const handling conversions a FIXED_CST
1689 to a floating point type. */
1690
1691static tree
ac545c64 1692fold_convert_const_real_from_fixed (tree type, const_tree arg1)
325217ed
CF
1693{
1694 REAL_VALUE_TYPE value;
1695 tree t;
1696
1697 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1698 t = build_real (type, value);
1699
1700 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
325217ed
CF
1701 return t;
1702}
1703
1704/* A subroutine of fold_convert_const handling conversions a FIXED_CST
1705 to another fixed-point type. */
1706
1707static tree
ac545c64 1708fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
325217ed
CF
1709{
1710 FIXED_VALUE_TYPE value;
1711 tree t;
1712 bool overflow_p;
1713
1714 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1715 TYPE_SATURATING (type));
1716 t = build_fixed (type, value);
1717
1718 /* Propagate overflow flags. */
1719 if (overflow_p | TREE_OVERFLOW (arg1))
28ddeea1 1720 TREE_OVERFLOW (t) = 1;
325217ed
CF
1721 return t;
1722}
1723
1724/* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1725 to a fixed-point type. */
1726
1727static tree
ac545c64 1728fold_convert_const_fixed_from_int (tree type, const_tree arg1)
325217ed
CF
1729{
1730 FIXED_VALUE_TYPE value;
1731 tree t;
1732 bool overflow_p;
1733
1734 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1735 TREE_INT_CST (arg1),
1736 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1737 TYPE_SATURATING (type));
1738 t = build_fixed (type, value);
1739
1740 /* Propagate overflow flags. */
1741 if (overflow_p | TREE_OVERFLOW (arg1))
28ddeea1 1742 TREE_OVERFLOW (t) = 1;
325217ed
CF
1743 return t;
1744}
1745
1746/* A subroutine of fold_convert_const handling conversions a REAL_CST
1747 to a fixed-point type. */
1748
1749static tree
ac545c64 1750fold_convert_const_fixed_from_real (tree type, const_tree arg1)
325217ed
CF
1751{
1752 FIXED_VALUE_TYPE value;
1753 tree t;
1754 bool overflow_p;
1755
1756 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1757 &TREE_REAL_CST (arg1),
1758 TYPE_SATURATING (type));
1759 t = build_fixed (type, value);
1760
1761 /* Propagate overflow flags. */
1762 if (overflow_p | TREE_OVERFLOW (arg1))
28ddeea1 1763 TREE_OVERFLOW (t) = 1;
325217ed
CF
1764 return t;
1765}
1766
c756af79
RH
1767/* Attempt to fold type conversion operation CODE of expression ARG1 to
1768 type TYPE. If no simplification can be done return NULL_TREE. */
875eda9c 1769
c756af79
RH
1770static tree
1771fold_convert_const (enum tree_code code, tree type, tree arg1)
1772{
1773 if (TREE_TYPE (arg1) == type)
1774 return arg1;
ca7a3bd7 1775
0e4b00d6
AP
1776 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1777 || TREE_CODE (type) == OFFSET_TYPE)
c756af79
RH
1778 {
1779 if (TREE_CODE (arg1) == INTEGER_CST)
1780 return fold_convert_const_int_from_int (type, arg1);
1781 else if (TREE_CODE (arg1) == REAL_CST)
1782 return fold_convert_const_int_from_real (code, type, arg1);
325217ed
CF
1783 else if (TREE_CODE (arg1) == FIXED_CST)
1784 return fold_convert_const_int_from_fixed (type, arg1);
6d716ca8
RS
1785 }
1786 else if (TREE_CODE (type) == REAL_TYPE)
1787 {
6d716ca8
RS
1788 if (TREE_CODE (arg1) == INTEGER_CST)
1789 return build_real_from_int_cst (type, arg1);
325217ed 1790 else if (TREE_CODE (arg1) == REAL_CST)
c756af79 1791 return fold_convert_const_real_from_real (type, arg1);
325217ed
CF
1792 else if (TREE_CODE (arg1) == FIXED_CST)
1793 return fold_convert_const_real_from_fixed (type, arg1);
1794 }
1795 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1796 {
1797 if (TREE_CODE (arg1) == FIXED_CST)
1798 return fold_convert_const_fixed_from_fixed (type, arg1);
1799 else if (TREE_CODE (arg1) == INTEGER_CST)
1800 return fold_convert_const_fixed_from_int (type, arg1);
1801 else if (TREE_CODE (arg1) == REAL_CST)
1802 return fold_convert_const_fixed_from_real (type, arg1);
6d716ca8 1803 }
fdb33708 1804 return NULL_TREE;
6d716ca8 1805}
088414c1 1806
c756af79
RH
1807/* Construct a vector of zero elements of vector type TYPE. */
1808
1809static tree
1810build_zero_vector (tree type)
1811{
b9acc9f1 1812 tree t;
b8698a0f 1813
b9acc9f1
NF
1814 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1815 return build_vector_from_val (type, t);
c756af79
RH
1816}
1817
3b357646
RG
1818/* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1819
1820bool
fa233e34 1821fold_convertible_p (const_tree type, const_tree arg)
3b357646
RG
1822{
1823 tree orig = TREE_TYPE (arg);
1824
1825 if (type == orig)
1826 return true;
1827
1828 if (TREE_CODE (arg) == ERROR_MARK
1829 || TREE_CODE (type) == ERROR_MARK
1830 || TREE_CODE (orig) == ERROR_MARK)
1831 return false;
1832
1833 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1834 return true;
1835
1836 switch (TREE_CODE (type))
1837 {
1838 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1839 case POINTER_TYPE: case REFERENCE_TYPE:
1840 case OFFSET_TYPE:
1841 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1842 || TREE_CODE (orig) == OFFSET_TYPE)
1843 return true;
1844 return (TREE_CODE (orig) == VECTOR_TYPE
1845 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1846
c17ee676
FXC
1847 case REAL_TYPE:
1848 case FIXED_POINT_TYPE:
1849 case COMPLEX_TYPE:
1850 case VECTOR_TYPE:
1851 case VOID_TYPE:
3b357646 1852 return TREE_CODE (type) == TREE_CODE (orig);
c17ee676
FXC
1853
1854 default:
1855 return false;
3b357646
RG
1856 }
1857}
1858
088414c1
RS
1859/* Convert expression ARG to type TYPE. Used by the middle-end for
1860 simple conversions in preference to calling the front-end's convert. */
1861
e419fe91 1862tree
db3927fb 1863fold_convert_loc (location_t loc, tree type, tree arg)
088414c1
RS
1864{
1865 tree orig = TREE_TYPE (arg);
1866 tree tem;
1867
1868 if (type == orig)
1869 return arg;
1870
1871 if (TREE_CODE (arg) == ERROR_MARK
1872 || TREE_CODE (type) == ERROR_MARK
1873 || TREE_CODE (orig) == ERROR_MARK)
1874 return error_mark_node;
1875
f4088621 1876 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
db3927fb 1877 return fold_build1_loc (loc, NOP_EXPR, type, arg);
088414c1 1878
0bccc606 1879 switch (TREE_CODE (type))
088414c1 1880 {
09e881c9
BE
1881 case POINTER_TYPE:
1882 case REFERENCE_TYPE:
1883 /* Handle conversions between pointers to different address spaces. */
1884 if (POINTER_TYPE_P (orig)
1885 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1886 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1887 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1888 /* fall through */
1889
71d59383 1890 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
0bccc606 1891 case OFFSET_TYPE:
088414c1
RS
1892 if (TREE_CODE (arg) == INTEGER_CST)
1893 {
1894 tem = fold_convert_const (NOP_EXPR, type, arg);
1895 if (tem != NULL_TREE)
1896 return tem;
1897 }
908d0773
AP
1898 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1899 || TREE_CODE (orig) == OFFSET_TYPE)
db3927fb 1900 return fold_build1_loc (loc, NOP_EXPR, type, arg);
088414c1 1901 if (TREE_CODE (orig) == COMPLEX_TYPE)
db3927fb
AH
1902 return fold_convert_loc (loc, type,
1903 fold_build1_loc (loc, REALPART_EXPR,
1904 TREE_TYPE (orig), arg));
0bccc606
NS
1905 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1906 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
db3927fb 1907 return fold_build1_loc (loc, NOP_EXPR, type, arg);
3e6688a7 1908
0bccc606 1909 case REAL_TYPE:
088414c1
RS
1910 if (TREE_CODE (arg) == INTEGER_CST)
1911 {
1912 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1913 if (tem != NULL_TREE)
1914 return tem;
1915 }
1916 else if (TREE_CODE (arg) == REAL_CST)
1917 {
1918 tem = fold_convert_const (NOP_EXPR, type, arg);
1919 if (tem != NULL_TREE)
1920 return tem;
1921 }
325217ed
CF
1922 else if (TREE_CODE (arg) == FIXED_CST)
1923 {
1924 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1925 if (tem != NULL_TREE)
1926 return tem;
1927 }
088414c1 1928
0bccc606 1929 switch (TREE_CODE (orig))
088414c1 1930 {
71d59383 1931 case INTEGER_TYPE:
0bccc606
NS
1932 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1933 case POINTER_TYPE: case REFERENCE_TYPE:
db3927fb 1934 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
3e6688a7 1935
0bccc606 1936 case REAL_TYPE:
db3927fb 1937 return fold_build1_loc (loc, NOP_EXPR, type, arg);
3e6688a7 1938
325217ed 1939 case FIXED_POINT_TYPE:
db3927fb 1940 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
325217ed
CF
1941
1942 case COMPLEX_TYPE:
db3927fb
AH
1943 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1944 return fold_convert_loc (loc, type, tem);
325217ed
CF
1945
1946 default:
1947 gcc_unreachable ();
1948 }
1949
1950 case FIXED_POINT_TYPE:
1951 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1952 || TREE_CODE (arg) == REAL_CST)
1953 {
1954 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1955 if (tem != NULL_TREE)
db3927fb 1956 goto fold_convert_exit;
325217ed
CF
1957 }
1958
1959 switch (TREE_CODE (orig))
1960 {
1961 case FIXED_POINT_TYPE:
1962 case INTEGER_TYPE:
1963 case ENUMERAL_TYPE:
1964 case BOOLEAN_TYPE:
1965 case REAL_TYPE:
db3927fb 1966 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
325217ed 1967
0bccc606 1968 case COMPLEX_TYPE:
db3927fb
AH
1969 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1970 return fold_convert_loc (loc, type, tem);
3e6688a7 1971
0bccc606
NS
1972 default:
1973 gcc_unreachable ();
088414c1 1974 }
3e6688a7 1975
0bccc606
NS
1976 case COMPLEX_TYPE:
1977 switch (TREE_CODE (orig))
1978 {
71d59383 1979 case INTEGER_TYPE:
0bccc606
NS
1980 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1981 case POINTER_TYPE: case REFERENCE_TYPE:
1982 case REAL_TYPE:
325217ed 1983 case FIXED_POINT_TYPE:
db3927fb
AH
1984 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1985 fold_convert_loc (loc, TREE_TYPE (type), arg),
1986 fold_convert_loc (loc, TREE_TYPE (type),
3111cce0 1987 integer_zero_node));
0bccc606
NS
1988 case COMPLEX_TYPE:
1989 {
1990 tree rpart, ipart;
3e6688a7 1991
0bccc606
NS
1992 if (TREE_CODE (arg) == COMPLEX_EXPR)
1993 {
db3927fb
AH
1994 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1995 TREE_OPERAND (arg, 0));
1996 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1997 TREE_OPERAND (arg, 1));
1998 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
0bccc606 1999 }
3e6688a7 2000
0bccc606 2001 arg = save_expr (arg);
db3927fb
AH
2002 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2003 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2004 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2005 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2006 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
0bccc606 2007 }
3e6688a7 2008
0bccc606
NS
2009 default:
2010 gcc_unreachable ();
2011 }
3e6688a7 2012
0bccc606 2013 case VECTOR_TYPE:
049e524f
RS
2014 if (integer_zerop (arg))
2015 return build_zero_vector (type);
0bccc606
NS
2016 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2017 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2018 || TREE_CODE (orig) == VECTOR_TYPE);
db3927fb 2019 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
088414c1 2020
0bccc606 2021 case VOID_TYPE:
bd7e4636 2022 tem = fold_ignored_result (arg);
726a989a 2023 if (TREE_CODE (tem) == MODIFY_EXPR)
db3927fb
AH
2024 goto fold_convert_exit;
2025 return fold_build1_loc (loc, NOP_EXPR, type, tem);
088414c1 2026
0bccc606
NS
2027 default:
2028 gcc_unreachable ();
088414c1 2029 }
db3927fb 2030 fold_convert_exit:
c9019218 2031 protected_set_expr_location_unshare (tem, loc);
db3927fb 2032 return tem;
088414c1 2033}
6d716ca8 2034\f
569b7f6a 2035/* Return false if expr can be assumed not to be an lvalue, true
283da5df 2036 otherwise. */
6d716ca8 2037
283da5df 2038static bool
ac545c64 2039maybe_lvalue_p (const_tree x)
6d716ca8 2040{
8d4a2ff6
RS
2041 /* We only need to wrap lvalue tree codes. */
2042 switch (TREE_CODE (x))
2043 {
2044 case VAR_DECL:
2045 case PARM_DECL:
2046 case RESULT_DECL:
2047 case LABEL_DECL:
2048 case FUNCTION_DECL:
2049 case SSA_NAME:
2050
2051 case COMPONENT_REF:
75421dcd 2052 case MEM_REF:
8d4a2ff6
RS
2053 case INDIRECT_REF:
2054 case ARRAY_REF:
44de5aeb 2055 case ARRAY_RANGE_REF:
8d4a2ff6 2056 case BIT_FIELD_REF:
0f59171d 2057 case OBJ_TYPE_REF:
8d4a2ff6
RS
2058
2059 case REALPART_EXPR:
2060 case IMAGPART_EXPR:
2061 case PREINCREMENT_EXPR:
2062 case PREDECREMENT_EXPR:
2063 case SAVE_EXPR:
8d4a2ff6
RS
2064 case TRY_CATCH_EXPR:
2065 case WITH_CLEANUP_EXPR:
2066 case COMPOUND_EXPR:
2067 case MODIFY_EXPR:
2068 case TARGET_EXPR:
2069 case COND_EXPR:
2070 case BIND_EXPR:
8d4a2ff6
RS
2071 break;
2072
2073 default:
2074 /* Assume the worst for front-end tree codes. */
2075 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2076 break;
283da5df 2077 return false;
8d4a2ff6 2078 }
283da5df
RS
2079
2080 return true;
2081}
2082
2083/* Return an expr equal to X but certainly not valid as an lvalue. */
2084
2085tree
db3927fb 2086non_lvalue_loc (location_t loc, tree x)
283da5df
RS
2087{
2088 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2089 us. */
2090 if (in_gimple_form)
2091 return x;
2092
2093 if (! maybe_lvalue_p (x))
2094 return x;
c9019218 2095 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
6d716ca8 2096}
a5e9b124 2097
e9866da3
JM
2098/* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2099 Zero means allow extended lvalues. */
2100
2101int pedantic_lvalues;
2102
a5e9b124
JW
2103/* When pedantic, return an expr equal to X but certainly not valid as a
2104 pedantic lvalue. Otherwise, return X. */
2105
49995c8e 2106static tree
db3927fb 2107pedantic_non_lvalue_loc (location_t loc, tree x)
a5e9b124 2108{
e9866da3 2109 if (pedantic_lvalues)
db3927fb 2110 return non_lvalue_loc (loc, x);
47f647e4 2111
c9019218 2112 return protected_set_expr_location_unshare (x, loc);
a5e9b124 2113}
c05a9b68
RS
2114\f
2115/* Given a tree comparison code, return the code that is the logical inverse
2116 of the given code. It is not safe to do this for floating-point
d1a7edaf
PB
2117 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2118 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
6d716ca8 2119
227858d1 2120enum tree_code
d1a7edaf 2121invert_tree_comparison (enum tree_code code, bool honor_nans)
c05a9b68 2122{
d1a7edaf
PB
2123 if (honor_nans && flag_trapping_math)
2124 return ERROR_MARK;
2125
c05a9b68
RS
2126 switch (code)
2127 {
2128 case EQ_EXPR:
2129 return NE_EXPR;
2130 case NE_EXPR:
2131 return EQ_EXPR;
2132 case GT_EXPR:
d1a7edaf 2133 return honor_nans ? UNLE_EXPR : LE_EXPR;
c05a9b68 2134 case GE_EXPR:
d1a7edaf 2135 return honor_nans ? UNLT_EXPR : LT_EXPR;
c05a9b68 2136 case LT_EXPR:
d1a7edaf 2137 return honor_nans ? UNGE_EXPR : GE_EXPR;
c05a9b68 2138 case LE_EXPR:
d1a7edaf
PB
2139 return honor_nans ? UNGT_EXPR : GT_EXPR;
2140 case LTGT_EXPR:
2141 return UNEQ_EXPR;
2142 case UNEQ_EXPR:
2143 return LTGT_EXPR;
2144 case UNGT_EXPR:
2145 return LE_EXPR;
2146 case UNGE_EXPR:
2147 return LT_EXPR;
2148 case UNLT_EXPR:
2149 return GE_EXPR;
2150 case UNLE_EXPR:
c05a9b68 2151 return GT_EXPR;
d1a7edaf
PB
2152 case ORDERED_EXPR:
2153 return UNORDERED_EXPR;
2154 case UNORDERED_EXPR:
2155 return ORDERED_EXPR;
c05a9b68 2156 default:
0bccc606 2157 gcc_unreachable ();
c05a9b68
RS
2158 }
2159}
2160
2161/* Similar, but return the comparison that results if the operands are
2162 swapped. This is safe for floating-point. */
2163
fd660b1b 2164enum tree_code
fa8db1f7 2165swap_tree_comparison (enum tree_code code)
c05a9b68
RS
2166{
2167 switch (code)
2168 {
2169 case EQ_EXPR:
2170 case NE_EXPR:
09b2f9e8
RS
2171 case ORDERED_EXPR:
2172 case UNORDERED_EXPR:
2173 case LTGT_EXPR:
2174 case UNEQ_EXPR:
c05a9b68
RS
2175 return code;
2176 case GT_EXPR:
2177 return LT_EXPR;
2178 case GE_EXPR:
2179 return LE_EXPR;
2180 case LT_EXPR:
2181 return GT_EXPR;
2182 case LE_EXPR:
2183 return GE_EXPR;
09b2f9e8
RS
2184 case UNGT_EXPR:
2185 return UNLT_EXPR;
2186 case UNGE_EXPR:
2187 return UNLE_EXPR;
2188 case UNLT_EXPR:
2189 return UNGT_EXPR;
2190 case UNLE_EXPR:
2191 return UNGE_EXPR;
c05a9b68 2192 default:
0bccc606 2193 gcc_unreachable ();
c05a9b68
RS
2194 }
2195}
61f275ff 2196
8dcb27ed
RS
2197
2198/* Convert a comparison tree code from an enum tree_code representation
2199 into a compcode bit-based encoding. This function is the inverse of
2200 compcode_to_comparison. */
2201
d1a7edaf 2202static enum comparison_code
fa8db1f7 2203comparison_to_compcode (enum tree_code code)
8dcb27ed
RS
2204{
2205 switch (code)
2206 {
2207 case LT_EXPR:
2208 return COMPCODE_LT;
2209 case EQ_EXPR:
2210 return COMPCODE_EQ;
2211 case LE_EXPR:
2212 return COMPCODE_LE;
2213 case GT_EXPR:
2214 return COMPCODE_GT;
2215 case NE_EXPR:
2216 return COMPCODE_NE;
2217 case GE_EXPR:
2218 return COMPCODE_GE;
d1a7edaf
PB
2219 case ORDERED_EXPR:
2220 return COMPCODE_ORD;
2221 case UNORDERED_EXPR:
2222 return COMPCODE_UNORD;
2223 case UNLT_EXPR:
2224 return COMPCODE_UNLT;
2225 case UNEQ_EXPR:
2226 return COMPCODE_UNEQ;
2227 case UNLE_EXPR:
2228 return COMPCODE_UNLE;
2229 case UNGT_EXPR:
2230 return COMPCODE_UNGT;
2231 case LTGT_EXPR:
2232 return COMPCODE_LTGT;
2233 case UNGE_EXPR:
2234 return COMPCODE_UNGE;
8dcb27ed 2235 default:
0bccc606 2236 gcc_unreachable ();
8dcb27ed
RS
2237 }
2238}
2239
2240/* Convert a compcode bit-based encoding of a comparison operator back
2241 to GCC's enum tree_code representation. This function is the
2242 inverse of comparison_to_compcode. */
2243
2244static enum tree_code
d1a7edaf 2245compcode_to_comparison (enum comparison_code code)
8dcb27ed
RS
2246{
2247 switch (code)
2248 {
2249 case COMPCODE_LT:
2250 return LT_EXPR;
2251 case COMPCODE_EQ:
2252 return EQ_EXPR;
2253 case COMPCODE_LE:
2254 return LE_EXPR;
2255 case COMPCODE_GT:
2256 return GT_EXPR;
2257 case COMPCODE_NE:
2258 return NE_EXPR;
2259 case COMPCODE_GE:
2260 return GE_EXPR;
d1a7edaf
PB
2261 case COMPCODE_ORD:
2262 return ORDERED_EXPR;
2263 case COMPCODE_UNORD:
2264 return UNORDERED_EXPR;
2265 case COMPCODE_UNLT:
2266 return UNLT_EXPR;
2267 case COMPCODE_UNEQ:
2268 return UNEQ_EXPR;
2269 case COMPCODE_UNLE:
2270 return UNLE_EXPR;
2271 case COMPCODE_UNGT:
2272 return UNGT_EXPR;
2273 case COMPCODE_LTGT:
2274 return LTGT_EXPR;
2275 case COMPCODE_UNGE:
2276 return UNGE_EXPR;
8dcb27ed 2277 default:
0bccc606 2278 gcc_unreachable ();
8dcb27ed
RS
2279 }
2280}
2281
d1a7edaf
PB
2282/* Return a tree for the comparison which is the combination of
2283 doing the AND or OR (depending on CODE) of the two operations LCODE
2284 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2285 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2286 if this makes the transformation invalid. */
2287
2288tree
db3927fb
AH
2289combine_comparisons (location_t loc,
2290 enum tree_code code, enum tree_code lcode,
d1a7edaf
PB
2291 enum tree_code rcode, tree truth_type,
2292 tree ll_arg, tree lr_arg)
2293{
2294 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2295 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2296 enum comparison_code rcompcode = comparison_to_compcode (rcode);
32e8bb8e 2297 int compcode;
d1a7edaf
PB
2298
2299 switch (code)
2300 {
2301 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2302 compcode = lcompcode & rcompcode;
2303 break;
2304
2305 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2306 compcode = lcompcode | rcompcode;
2307 break;
2308
2309 default:
2310 return NULL_TREE;
2311 }
2312
2313 if (!honor_nans)
2314 {
2315 /* Eliminate unordered comparisons, as well as LTGT and ORD
2316 which are not used unless the mode has NaNs. */
2317 compcode &= ~COMPCODE_UNORD;
2318 if (compcode == COMPCODE_LTGT)
2319 compcode = COMPCODE_NE;
2320 else if (compcode == COMPCODE_ORD)
2321 compcode = COMPCODE_TRUE;
2322 }
2323 else if (flag_trapping_math)
2324 {
d1822754 2325 /* Check that the original operation and the optimized ones will trap
d1a7edaf
PB
2326 under the same condition. */
2327 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2328 && (lcompcode != COMPCODE_EQ)
2329 && (lcompcode != COMPCODE_ORD);
2330 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2331 && (rcompcode != COMPCODE_EQ)
2332 && (rcompcode != COMPCODE_ORD);
2333 bool trap = (compcode & COMPCODE_UNORD) == 0
2334 && (compcode != COMPCODE_EQ)
2335 && (compcode != COMPCODE_ORD);
2336
2337 /* In a short-circuited boolean expression the LHS might be
2338 such that the RHS, if evaluated, will never trap. For
2339 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2340 if neither x nor y is NaN. (This is a mixed blessing: for
2341 example, the expression above will never trap, hence
2342 optimizing it to x < y would be invalid). */
2343 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2344 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2345 rtrap = false;
2346
2347 /* If the comparison was short-circuited, and only the RHS
2348 trapped, we may now generate a spurious trap. */
2349 if (rtrap && !ltrap
2350 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2351 return NULL_TREE;
2352
2353 /* If we changed the conditions that cause a trap, we lose. */
2354 if ((ltrap || rtrap) != trap)
2355 return NULL_TREE;
2356 }
2357
2358 if (compcode == COMPCODE_TRUE)
1b0f3e79 2359 return constant_boolean_node (true, truth_type);
d1a7edaf 2360 else if (compcode == COMPCODE_FALSE)
1b0f3e79 2361 return constant_boolean_node (false, truth_type);
d1a7edaf 2362 else
32e8bb8e
ILT
2363 {
2364 enum tree_code tcode;
2365
2366 tcode = compcode_to_comparison ((enum comparison_code) compcode);
db3927fb 2367 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
32e8bb8e 2368 }
d1a7edaf 2369}
c05a9b68 2370\f
fae111c1
RS
2371/* Return nonzero if two operands (typically of the same tree node)
2372 are necessarily equal. If either argument has side-effects this
1ea7e6ad 2373 function returns zero. FLAGS modifies behavior as follows:
fae111c1 2374
6de9cd9a 2375 If OEP_ONLY_CONST is set, only return nonzero for constants.
6a1746af
RS
2376 This function tests whether the operands are indistinguishable;
2377 it does not test whether they are equal using C's == operation.
2378 The distinction is important for IEEE floating point, because
2379 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
fae111c1
RS
2380 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2381
6de9cd9a 2382 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
fae111c1
RS
2383 even though it may hold multiple values during a function.
2384 This is because a GCC tree node guarantees that nothing else is
2385 executed between the evaluation of its "operands" (which may often
2386 be evaluated in arbitrary order). Hence if the operands themselves
2387 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3dd8069d
PB
2388 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2389 unset means assuming isochronic (or instantaneous) tree equivalence.
2390 Unless comparing arbitrary expression trees, such as from different
2391 statements, this flag can usually be left unset.
6de9cd9a
DN
2392
2393 If OEP_PURE_SAME is set, then pure functions with identical arguments
2394 are considered the same. It is used when the caller has other ways
2395 to ensure that global memory is unchanged in between. */
6d716ca8
RS
2396
2397int
fa233e34 2398operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
6d716ca8 2399{
8df83eae 2400 /* If either is ERROR_MARK, they aren't equal. */
2aac1924
JM
2401 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2402 || TREE_TYPE (arg0) == error_mark_node
2403 || TREE_TYPE (arg1) == error_mark_node)
8df83eae
RK
2404 return 0;
2405
56c47f22
RG
2406 /* Similar, if either does not have a type (like a released SSA name),
2407 they aren't equal. */
2408 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2409 return 0;
2410
ba2e1892
RG
2411 /* Check equality of integer constants before bailing out due to
2412 precision differences. */
2413 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2414 return tree_int_cst_equal (arg0, arg1);
2415
6d716ca8
RS
2416 /* If both types don't have the same signedness, then we can't consider
2417 them equal. We must check this before the STRIP_NOPS calls
b13e7b6c
RG
2418 because they may change the signedness of the arguments. As pointers
2419 strictly don't have a signedness, require either two pointers or
2420 two non-pointers as well. */
2421 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2422 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
6d716ca8
RS
2423 return 0;
2424
09e881c9
BE
2425 /* We cannot consider pointers to different address space equal. */
2426 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2427 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2428 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2429 return 0;
2430
096dce1b
RG
2431 /* If both types don't have the same precision, then it is not safe
2432 to strip NOPs. */
2433 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2434 return 0;
2435
6d716ca8
RS
2436 STRIP_NOPS (arg0);
2437 STRIP_NOPS (arg1);
2438
a04d8591
RG
2439 /* In case both args are comparisons but with different comparison
2440 code, try to swap the comparison operands of one arg to produce
2441 a match and compare that variant. */
2442 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2443 && COMPARISON_CLASS_P (arg0)
2444 && COMPARISON_CLASS_P (arg1))
2445 {
2446 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2447
2448 if (TREE_CODE (arg0) == swap_code)
2449 return operand_equal_p (TREE_OPERAND (arg0, 0),
2450 TREE_OPERAND (arg1, 1), flags)
2451 && operand_equal_p (TREE_OPERAND (arg0, 1),
2452 TREE_OPERAND (arg1, 0), flags);
2453 }
2454
c7cfe938
RK
2455 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2456 /* This is needed for conversions and for COMPONENT_REF.
2457 Might as well play it safe and always test this. */
e89a9554
ZW
2458 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2459 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
c7cfe938 2460 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
6d716ca8
RS
2461 return 0;
2462
c7cfe938
RK
2463 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2464 We don't care about side effects in that case because the SAVE_EXPR
2465 takes care of that for us. In all other cases, two expressions are
2466 equal if they have no side effects. If we have two identical
2467 expressions with side effects that should be treated the same due
2468 to the only side effects being identical SAVE_EXPR's, that will
2469 be detected in the recursive calls below. */
6de9cd9a 2470 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
c7cfe938
RK
2471 && (TREE_CODE (arg0) == SAVE_EXPR
2472 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
6d716ca8
RS
2473 return 1;
2474
c7cfe938
RK
2475 /* Next handle constant cases, those for which we can return 1 even
2476 if ONLY_CONST is set. */
2477 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2478 switch (TREE_CODE (arg0))
2479 {
2480 case INTEGER_CST:
85914552 2481 return tree_int_cst_equal (arg0, arg1);
c7cfe938 2482
325217ed
CF
2483 case FIXED_CST:
2484 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2485 TREE_FIXED_CST (arg1));
2486
c7cfe938 2487 case REAL_CST:
0446c9f3
ZD
2488 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2489 TREE_REAL_CST (arg1)))
2490 return 1;
2491
b8698a0f 2492
0446c9f3
ZD
2493 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2494 {
2495 /* If we do not distinguish between signed and unsigned zero,
2496 consider them equal. */
2497 if (real_zerop (arg0) && real_zerop (arg1))
2498 return 1;
2499 }
2500 return 0;
c7cfe938 2501
69ef87e2
AH
2502 case VECTOR_CST:
2503 {
2504 tree v1, v2;
2505
69ef87e2
AH
2506 v1 = TREE_VECTOR_CST_ELTS (arg0);
2507 v2 = TREE_VECTOR_CST_ELTS (arg1);
2508 while (v1 && v2)
2509 {
875427f0 2510 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
6de9cd9a 2511 flags))
69ef87e2
AH
2512 return 0;
2513 v1 = TREE_CHAIN (v1);
2514 v2 = TREE_CHAIN (v2);
2515 }
2516
40182dbf 2517 return v1 == v2;
69ef87e2
AH
2518 }
2519
c7cfe938
RK
2520 case COMPLEX_CST:
2521 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
6de9cd9a 2522 flags)
c7cfe938 2523 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
6de9cd9a 2524 flags));
c7cfe938
RK
2525
2526 case STRING_CST:
2527 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
71145810 2528 && ! memcmp (TREE_STRING_POINTER (arg0),
c7cfe938
RK
2529 TREE_STRING_POINTER (arg1),
2530 TREE_STRING_LENGTH (arg0)));
2531
2532 case ADDR_EXPR:
2533 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2534 0);
e9a25f70
JL
2535 default:
2536 break;
c7cfe938 2537 }
6d716ca8 2538
6de9cd9a 2539 if (flags & OEP_ONLY_CONST)
6d716ca8
RS
2540 return 0;
2541
38318b73 2542/* Define macros to test an operand from arg0 and arg1 for equality and a
624b15fa
RK
2543 variant that allows null and views null as being different from any
2544 non-null value. In the latter case, if either is null, the both
2545 must be; otherwise, do the normal comparison. */
2546#define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2547 TREE_OPERAND (arg1, N), flags)
2548
2549#define OP_SAME_WITH_NULL(N) \
2550 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2551 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2552
6d716ca8
RS
2553 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2554 {
6615c446 2555 case tcc_unary:
6d716ca8 2556 /* Two conversions are equal only if signedness and modes match. */
266bff3a
JJ
2557 switch (TREE_CODE (arg0))
2558 {
1043771b 2559 CASE_CONVERT:
266bff3a 2560 case FIX_TRUNC_EXPR:
266bff3a
JJ
2561 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2562 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2563 return 0;
2564 break;
2565 default:
2566 break;
2567 }
6d716ca8 2568
624b15fa
RK
2569 return OP_SAME (0);
2570
6d716ca8 2571
6615c446
JO
2572 case tcc_comparison:
2573 case tcc_binary:
624b15fa 2574 if (OP_SAME (0) && OP_SAME (1))
c7cfe938
RK
2575 return 1;
2576
2577 /* For commutative ops, allow the other order. */
3168cb99 2578 return (commutative_tree_code (TREE_CODE (arg0))
c7cfe938 2579 && operand_equal_p (TREE_OPERAND (arg0, 0),
6de9cd9a 2580 TREE_OPERAND (arg1, 1), flags)
6d716ca8 2581 && operand_equal_p (TREE_OPERAND (arg0, 1),
6de9cd9a 2582 TREE_OPERAND (arg1, 0), flags));
6d716ca8 2583
6615c446 2584 case tcc_reference:
21c43754
RS
2585 /* If either of the pointer (or reference) expressions we are
2586 dereferencing contain a side effect, these cannot be equal. */
05ca5990
GRK
2587 if (TREE_SIDE_EFFECTS (arg0)
2588 || TREE_SIDE_EFFECTS (arg1))
2589 return 0;
2590
6d716ca8
RS
2591 switch (TREE_CODE (arg0))
2592 {
2593 case INDIRECT_REF:
497be978
RH
2594 case REALPART_EXPR:
2595 case IMAGPART_EXPR:
624b15fa 2596 return OP_SAME (0);
6d716ca8 2597
70f34814 2598 case MEM_REF:
359bea1d
AO
2599 /* Require equal access sizes, and similar pointer types.
2600 We can have incomplete types for array references of
2601 variable-sized arrays from the Fortran frontent
2602 though. */
70f34814
RG
2603 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2604 || (TYPE_SIZE (TREE_TYPE (arg0))
2605 && TYPE_SIZE (TREE_TYPE (arg1))
2606 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2607 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
359bea1d
AO
2608 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2609 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
70f34814
RG
2610 && OP_SAME (0) && OP_SAME (1));
2611
6d716ca8 2612 case ARRAY_REF:
b4e3fabb 2613 case ARRAY_RANGE_REF:
5852948c
RG
2614 /* Operands 2 and 3 may be null.
2615 Compare the array index by value if it is constant first as we
2616 may have different types but same value here. */
624b15fa 2617 return (OP_SAME (0)
5852948c
RG
2618 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2619 TREE_OPERAND (arg1, 1))
2620 || OP_SAME (1))
624b15fa
RK
2621 && OP_SAME_WITH_NULL (2)
2622 && OP_SAME_WITH_NULL (3));
462fdcce
RK
2623
2624 case COMPONENT_REF:
78b76d08
SB
2625 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2626 may be NULL when we're called to compare MEM_EXPRs. */
2627 return OP_SAME_WITH_NULL (0)
2628 && OP_SAME (1)
2629 && OP_SAME_WITH_NULL (2);
a60749f5 2630
40b32ef8 2631 case BIT_FIELD_REF:
624b15fa
RK
2632 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2633
e9a25f70
JL
2634 default:
2635 return 0;
6d716ca8 2636 }
45f97e2e 2637
6615c446 2638 case tcc_expression:
1bfedcc8
JM
2639 switch (TREE_CODE (arg0))
2640 {
2641 case ADDR_EXPR:
2642 case TRUTH_NOT_EXPR:
624b15fa 2643 return OP_SAME (0);
1bfedcc8 2644
54d581a2
RS
2645 case TRUTH_ANDIF_EXPR:
2646 case TRUTH_ORIF_EXPR:
624b15fa 2647 return OP_SAME (0) && OP_SAME (1);
54d581a2 2648
180ed1b2
RH
2649 case FMA_EXPR:
2650 case WIDEN_MULT_PLUS_EXPR:
2651 case WIDEN_MULT_MINUS_EXPR:
2652 if (!OP_SAME (2))
2653 return 0;
2654 /* The multiplcation operands are commutative. */
2655 /* FALLTHRU */
2656
54d581a2
RS
2657 case TRUTH_AND_EXPR:
2658 case TRUTH_OR_EXPR:
2659 case TRUTH_XOR_EXPR:
624b15fa
RK
2660 if (OP_SAME (0) && OP_SAME (1))
2661 return 1;
2662
2663 /* Otherwise take into account this is a commutative operation. */
54d581a2 2664 return (operand_equal_p (TREE_OPERAND (arg0, 0),
624b15fa 2665 TREE_OPERAND (arg1, 1), flags)
54d581a2 2666 && operand_equal_p (TREE_OPERAND (arg0, 1),
624b15fa 2667 TREE_OPERAND (arg1, 0), flags));
54d581a2 2668
05f41289 2669 case COND_EXPR:
180ed1b2
RH
2670 case VEC_COND_EXPR:
2671 case DOT_PROD_EXPR:
05f41289 2672 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
b8698a0f 2673
5039610b
SL
2674 default:
2675 return 0;
2676 }
2677
2678 case tcc_vl_exp:
2679 switch (TREE_CODE (arg0))
2680 {
21c43754
RS
2681 case CALL_EXPR:
2682 /* If the CALL_EXPRs call different functions, then they
2683 clearly can not be equal. */
5039610b
SL
2684 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2685 flags))
21c43754
RS
2686 return 0;
2687
6de9cd9a
DN
2688 {
2689 unsigned int cef = call_expr_flags (arg0);
2690 if (flags & OEP_PURE_SAME)
2691 cef &= ECF_CONST | ECF_PURE;
2692 else
2693 cef &= ECF_CONST;
2694 if (!cef)
2695 return 0;
2696 }
21c43754 2697
5039610b
SL
2698 /* Now see if all the arguments are the same. */
2699 {
fa233e34
KG
2700 const_call_expr_arg_iterator iter0, iter1;
2701 const_tree a0, a1;
2702 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2703 a1 = first_const_call_expr_arg (arg1, &iter1);
5039610b 2704 a0 && a1;
fa233e34
KG
2705 a0 = next_const_call_expr_arg (&iter0),
2706 a1 = next_const_call_expr_arg (&iter1))
5039610b 2707 if (! operand_equal_p (a0, a1, flags))
21c43754
RS
2708 return 0;
2709
5039610b
SL
2710 /* If we get here and both argument lists are exhausted
2711 then the CALL_EXPRs are equal. */
2712 return ! (a0 || a1);
2713 }
1bfedcc8
JM
2714 default:
2715 return 0;
2716 }
b6cc0a72 2717
6615c446 2718 case tcc_declaration:
6de9cd9a
DN
2719 /* Consider __builtin_sqrt equal to sqrt. */
2720 return (TREE_CODE (arg0) == FUNCTION_DECL
2721 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2722 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2723 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
21c43754 2724
e9a25f70
JL
2725 default:
2726 return 0;
6d716ca8 2727 }
624b15fa
RK
2728
2729#undef OP_SAME
2730#undef OP_SAME_WITH_NULL
6d716ca8 2731}
c05a9b68
RS
2732\f
2733/* Similar to operand_equal_p, but see if ARG0 might have been made by
b6cc0a72 2734 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
6d716ca8 2735
6d716ca8
RS
2736 When in doubt, return 0. */
2737
b6cc0a72 2738static int
fa8db1f7 2739operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
6d716ca8 2740{
c05a9b68 2741 int unsignedp1, unsignedpo;
52de9b6c 2742 tree primarg0, primarg1, primother;
770ae6cc 2743 unsigned int correct_width;
6d716ca8 2744
c05a9b68 2745 if (operand_equal_p (arg0, arg1, 0))
6d716ca8
RS
2746 return 1;
2747
0982a4b8
JM
2748 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2749 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
6d716ca8
RS
2750 return 0;
2751
52de9b6c
RK
2752 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2753 and see if the inner values are the same. This removes any
2754 signedness comparison, which doesn't matter here. */
2755 primarg0 = arg0, primarg1 = arg1;
b6cc0a72
KH
2756 STRIP_NOPS (primarg0);
2757 STRIP_NOPS (primarg1);
52de9b6c
RK
2758 if (operand_equal_p (primarg0, primarg1, 0))
2759 return 1;
2760
c05a9b68
RS
2761 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2762 actual comparison operand, ARG0.
6d716ca8 2763
c05a9b68 2764 First throw away any conversions to wider types
6d716ca8 2765 already present in the operands. */
6d716ca8 2766
c05a9b68
RS
2767 primarg1 = get_narrower (arg1, &unsignedp1);
2768 primother = get_narrower (other, &unsignedpo);
2769
2770 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2771 if (unsignedp1 == unsignedpo
2772 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2773 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
6d716ca8 2774 {
c05a9b68 2775 tree type = TREE_TYPE (arg0);
6d716ca8
RS
2776
2777 /* Make sure shorter operand is extended the right way
2778 to match the longer operand. */
12753674 2779 primarg1 = fold_convert (signed_or_unsigned_type_for
088414c1 2780 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
6d716ca8 2781
088414c1 2782 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
6d716ca8
RS
2783 return 1;
2784 }
2785
2786 return 0;
2787}
2788\f
f72aed24 2789/* See if ARG is an expression that is either a comparison or is performing
c05a9b68
RS
2790 arithmetic on comparisons. The comparisons must only be comparing
2791 two different values, which will be stored in *CVAL1 and *CVAL2; if
cc2902df 2792 they are nonzero it means that some operands have already been found.
c05a9b68 2793 No variables may be used anywhere else in the expression except in the
35e66bd1
RK
2794 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2795 the expression and save_expr needs to be called with CVAL1 and CVAL2.
c05a9b68
RS
2796
2797 If this is true, return 1. Otherwise, return zero. */
2798
2799static int
fa8db1f7 2800twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
c05a9b68
RS
2801{
2802 enum tree_code code = TREE_CODE (arg);
82d6e6fc 2803 enum tree_code_class tclass = TREE_CODE_CLASS (code);
c05a9b68 2804
6615c446 2805 /* We can handle some of the tcc_expression cases here. */
82d6e6fc
KG
2806 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2807 tclass = tcc_unary;
2808 else if (tclass == tcc_expression
c05a9b68
RS
2809 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2810 || code == COMPOUND_EXPR))
82d6e6fc 2811 tclass = tcc_binary;
2315a5db 2812
82d6e6fc 2813 else if (tclass == tcc_expression && code == SAVE_EXPR
d4b60170 2814 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
35e66bd1
RK
2815 {
2816 /* If we've already found a CVAL1 or CVAL2, this expression is
2817 two complex to handle. */
2818 if (*cval1 || *cval2)
2819 return 0;
2820
82d6e6fc 2821 tclass = tcc_unary;
35e66bd1
RK
2822 *save_p = 1;
2823 }
c05a9b68 2824
82d6e6fc 2825 switch (tclass)
c05a9b68 2826 {
6615c446 2827 case tcc_unary:
35e66bd1 2828 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
c05a9b68 2829
6615c446 2830 case tcc_binary:
35e66bd1
RK
2831 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2832 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2833 cval1, cval2, save_p));
c05a9b68 2834
6615c446 2835 case tcc_constant:
c05a9b68
RS
2836 return 1;
2837
6615c446 2838 case tcc_expression:
c05a9b68 2839 if (code == COND_EXPR)
35e66bd1
RK
2840 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2841 cval1, cval2, save_p)
2842 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2843 cval1, cval2, save_p)
c05a9b68 2844 && twoval_comparison_p (TREE_OPERAND (arg, 2),
35e66bd1 2845 cval1, cval2, save_p));
c05a9b68 2846 return 0;
b6cc0a72 2847
6615c446 2848 case tcc_comparison:
c05a9b68
RS
2849 /* First see if we can handle the first operand, then the second. For
2850 the second operand, we know *CVAL1 can't be zero. It must be that
2851 one side of the comparison is each of the values; test for the
2852 case where this isn't true by failing if the two operands
2853 are the same. */
2854
2855 if (operand_equal_p (TREE_OPERAND (arg, 0),
2856 TREE_OPERAND (arg, 1), 0))
2857 return 0;
2858
2859 if (*cval1 == 0)
2860 *cval1 = TREE_OPERAND (arg, 0);
2861 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2862 ;
2863 else if (*cval2 == 0)
2864 *cval2 = TREE_OPERAND (arg, 0);
2865 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2866 ;
2867 else
2868 return 0;
2869
2870 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2871 ;
2872 else if (*cval2 == 0)
2873 *cval2 = TREE_OPERAND (arg, 1);
2874 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2875 ;
2876 else
2877 return 0;
2878
2879 return 1;
c05a9b68 2880
e9a25f70
JL
2881 default:
2882 return 0;
2883 }
c05a9b68
RS
2884}
2885\f
2886/* ARG is a tree that is known to contain just arithmetic operations and
2887 comparisons. Evaluate the operations in the tree substituting NEW0 for
f72aed24 2888 any occurrence of OLD0 as an operand of a comparison and likewise for
c05a9b68
RS
2889 NEW1 and OLD1. */
2890
2891static tree
db3927fb
AH
2892eval_subst (location_t loc, tree arg, tree old0, tree new0,
2893 tree old1, tree new1)
c05a9b68
RS
2894{
2895 tree type = TREE_TYPE (arg);
2896 enum tree_code code = TREE_CODE (arg);
82d6e6fc 2897 enum tree_code_class tclass = TREE_CODE_CLASS (code);
c05a9b68 2898
6615c446 2899 /* We can handle some of the tcc_expression cases here. */
82d6e6fc
KG
2900 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2901 tclass = tcc_unary;
2902 else if (tclass == tcc_expression
c05a9b68 2903 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
82d6e6fc 2904 tclass = tcc_binary;
c05a9b68 2905
82d6e6fc 2906 switch (tclass)
c05a9b68 2907 {
6615c446 2908 case tcc_unary:
db3927fb
AH
2909 return fold_build1_loc (loc, code, type,
2910 eval_subst (loc, TREE_OPERAND (arg, 0),
7f20a5b7 2911 old0, new0, old1, new1));
c05a9b68 2912
6615c446 2913 case tcc_binary:
db3927fb
AH
2914 return fold_build2_loc (loc, code, type,
2915 eval_subst (loc, TREE_OPERAND (arg, 0),
7f20a5b7 2916 old0, new0, old1, new1),
db3927fb 2917 eval_subst (loc, TREE_OPERAND (arg, 1),
7f20a5b7 2918 old0, new0, old1, new1));
c05a9b68 2919
6615c446 2920 case tcc_expression:
c05a9b68
RS
2921 switch (code)
2922 {
2923 case SAVE_EXPR:
db3927fb
AH
2924 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2925 old1, new1);
c05a9b68
RS
2926
2927 case COMPOUND_EXPR:
db3927fb
AH
2928 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2929 old1, new1);
c05a9b68
RS
2930
2931 case COND_EXPR:
db3927fb
AH
2932 return fold_build3_loc (loc, code, type,
2933 eval_subst (loc, TREE_OPERAND (arg, 0),
7f20a5b7 2934 old0, new0, old1, new1),
db3927fb 2935 eval_subst (loc, TREE_OPERAND (arg, 1),
7f20a5b7 2936 old0, new0, old1, new1),
db3927fb 2937 eval_subst (loc, TREE_OPERAND (arg, 2),
7f20a5b7 2938 old0, new0, old1, new1));
e9a25f70
JL
2939 default:
2940 break;
c05a9b68 2941 }
938d968e 2942 /* Fall through - ??? */
c05a9b68 2943
6615c446 2944 case tcc_comparison:
c05a9b68
RS
2945 {
2946 tree arg0 = TREE_OPERAND (arg, 0);
2947 tree arg1 = TREE_OPERAND (arg, 1);
2948
2949 /* We need to check both for exact equality and tree equality. The
2950 former will be true if the operand has a side-effect. In that
2951 case, we know the operand occurred exactly once. */
2952
2953 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2954 arg0 = new0;
2955 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2956 arg0 = new1;
2957
2958 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2959 arg1 = new0;
2960 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2961 arg1 = new1;
2962
db3927fb 2963 return fold_build2_loc (loc, code, type, arg0, arg1);
c05a9b68 2964 }
c05a9b68 2965
e9a25f70
JL
2966 default:
2967 return arg;
2968 }
c05a9b68
RS
2969}
2970\f
6d716ca8
RS
2971/* Return a tree for the case when the result of an expression is RESULT
2972 converted to TYPE and OMITTED was previously an operand of the expression
2973 but is now not needed (e.g., we folded OMITTED * 0).
2974
2975 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2976 the conversion of RESULT to TYPE. */
2977
c0a47a61 2978tree
db3927fb 2979omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
6d716ca8 2980{
db3927fb 2981 tree t = fold_convert_loc (loc, type, result);
6d716ca8 2982
15dc95cb 2983 /* If the resulting operand is an empty statement, just return the omitted
e057e0cd
AP
2984 statement casted to void. */
2985 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
c9019218
JJ
2986 return build1_loc (loc, NOP_EXPR, void_type_node,
2987 fold_ignored_result (omitted));
e057e0cd 2988
6d716ca8 2989 if (TREE_SIDE_EFFECTS (omitted))
c9019218
JJ
2990 return build2_loc (loc, COMPOUND_EXPR, type,
2991 fold_ignored_result (omitted), t);
db3927fb
AH
2992
2993 return non_lvalue_loc (loc, t);
6d716ca8 2994}
4ab3cb65
RK
2995
2996/* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2997
2998static tree
db3927fb
AH
2999pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3000 tree omitted)
4ab3cb65 3001{
db3927fb 3002 tree t = fold_convert_loc (loc, type, result);
4ab3cb65 3003
15dc95cb 3004 /* If the resulting operand is an empty statement, just return the omitted
e057e0cd
AP
3005 statement casted to void. */
3006 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
c9019218
JJ
3007 return build1_loc (loc, NOP_EXPR, void_type_node,
3008 fold_ignored_result (omitted));
e057e0cd 3009
4ab3cb65 3010 if (TREE_SIDE_EFFECTS (omitted))
c9019218
JJ
3011 return build2_loc (loc, COMPOUND_EXPR, type,
3012 fold_ignored_result (omitted), t);
4ab3cb65 3013
db3927fb 3014 return pedantic_non_lvalue_loc (loc, t);
4ab3cb65 3015}
08039bd8
RS
3016
3017/* Return a tree for the case when the result of an expression is RESULT
3018 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3019 of the expression but are now not needed.
3020
3021 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3022 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3023 evaluated before OMITTED2. Otherwise, if neither has side effects,
3024 just do the conversion of RESULT to TYPE. */
3025
3026tree
db3927fb 3027omit_two_operands_loc (location_t loc, tree type, tree result,
c9019218 3028 tree omitted1, tree omitted2)
08039bd8 3029{
db3927fb 3030 tree t = fold_convert_loc (loc, type, result);
08039bd8
RS
3031
3032 if (TREE_SIDE_EFFECTS (omitted2))
c9019218 3033 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
08039bd8 3034 if (TREE_SIDE_EFFECTS (omitted1))
c9019218 3035 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
08039bd8 3036
db3927fb 3037 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
08039bd8
RS
3038}
3039
6d716ca8 3040\f
3f783329
RS
3041/* Return a simplified tree node for the truth-negation of ARG. This
3042 never alters ARG itself. We assume that ARG is an operation that
d1a7edaf 3043 returns a truth value (0 or 1).
6d716ca8 3044
d1a7edaf
PB
3045 FIXME: one would think we would fold the result, but it causes
3046 problems with the dominator optimizer. */
d817ed3b 3047
6d716ca8 3048tree
db3927fb 3049fold_truth_not_expr (location_t loc, tree arg)
6d716ca8 3050{
c9019218 3051 tree type = TREE_TYPE (arg);
c05a9b68 3052 enum tree_code code = TREE_CODE (arg);
db3927fb 3053 location_t loc1, loc2;
6d716ca8 3054
c05a9b68
RS
3055 /* If this is a comparison, we can simply invert it, except for
3056 floating-point non-equality comparisons, in which case we just
3057 enclose a TRUTH_NOT_EXPR around what we have. */
6d716ca8 3058
6615c446 3059 if (TREE_CODE_CLASS (code) == tcc_comparison)
6d716ca8 3060 {
d1a7edaf
PB
3061 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3062 if (FLOAT_TYPE_P (op_type)
3063 && flag_trapping_math
3064 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3065 && code != NE_EXPR && code != EQ_EXPR)
d817ed3b 3066 return NULL_TREE;
ca80e52b
EB
3067
3068 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3069 if (code == ERROR_MARK)
3070 return NULL_TREE;
3071
c9019218
JJ
3072 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3073 TREE_OPERAND (arg, 1));
c05a9b68 3074 }
6d716ca8 3075
c05a9b68
RS
3076 switch (code)
3077 {
6d716ca8 3078 case INTEGER_CST:
9ace7f9e 3079 return constant_boolean_node (integer_zerop (arg), type);
6d716ca8
RS
3080
3081 case TRUTH_AND_EXPR:
db3927fb
AH
3082 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3083 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3084 if (loc1 == UNKNOWN_LOCATION)
3085 loc1 = loc;
3086 if (loc2 == UNKNOWN_LOCATION)
3087 loc2 = loc;
c9019218
JJ
3088 return build2_loc (loc, TRUTH_OR_EXPR, type,
3089 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3090 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
6d716ca8
RS
3091
3092 case TRUTH_OR_EXPR:
db3927fb
AH
3093 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3094 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3095 if (loc1 == UNKNOWN_LOCATION)
3096 loc1 = loc;
3097 if (loc2 == UNKNOWN_LOCATION)
3098 loc2 = loc;
c9019218
JJ
3099 return build2_loc (loc, TRUTH_AND_EXPR, type,
3100 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3101 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
6d716ca8 3102
772447c5
RK
3103 case TRUTH_XOR_EXPR:
3104 /* Here we can invert either operand. We invert the first operand
3105 unless the second operand is a TRUTH_NOT_EXPR in which case our
3106 result is the XOR of the first operand with the inside of the
3107 negation of the second operand. */
3108
3109 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
c9019218
JJ
3110 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3111 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
772447c5 3112 else
c9019218
JJ
3113 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3114 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3115 TREE_OPERAND (arg, 1));
772447c5 3116
6d716ca8 3117 case TRUTH_ANDIF_EXPR:
db3927fb
AH
3118 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3119 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3120 if (loc1 == UNKNOWN_LOCATION)
3121 loc1 = loc;
3122 if (loc2 == UNKNOWN_LOCATION)
3123 loc2 = loc;
c9019218
JJ
3124 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3125 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3126 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
6d716ca8
RS
3127
3128 case TRUTH_ORIF_EXPR:
db3927fb
AH
3129 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3130 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3131 if (loc1 == UNKNOWN_LOCATION)
3132 loc1 = loc;
3133 if (loc2 == UNKNOWN_LOCATION)
3134 loc2 = loc;
c9019218
JJ
3135 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3136 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3137 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
6d716ca8
RS
3138
3139 case TRUTH_NOT_EXPR:
3140 return TREE_OPERAND (arg, 0);
3141
3142 case COND_EXPR:
9ca4afb9
RG
3143 {
3144 tree arg1 = TREE_OPERAND (arg, 1);
3145 tree arg2 = TREE_OPERAND (arg, 2);
db3927fb
AH
3146
3147 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3148 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3149 if (loc1 == UNKNOWN_LOCATION)
3150 loc1 = loc;
3151 if (loc2 == UNKNOWN_LOCATION)
3152 loc2 = loc;
3153
9ca4afb9
RG
3154 /* A COND_EXPR may have a throw as one operand, which
3155 then has void type. Just leave void operands
3156 as they are. */
c9019218
JJ
3157 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3158 VOID_TYPE_P (TREE_TYPE (arg1))
3159 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3160 VOID_TYPE_P (TREE_TYPE (arg2))
3161 ? arg2 : invert_truthvalue_loc (loc2, arg2));
9ca4afb9 3162 }
6d716ca8 3163
ef9fe0da 3164 case COMPOUND_EXPR:
db3927fb
AH
3165 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3166 if (loc1 == UNKNOWN_LOCATION)
3167 loc1 = loc;
c9019218
JJ
3168 return build2_loc (loc, COMPOUND_EXPR, type,
3169 TREE_OPERAND (arg, 0),
3170 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
ef9fe0da 3171
6d716ca8 3172 case NON_LVALUE_EXPR:
db3927fb
AH
3173 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3174 if (loc1 == UNKNOWN_LOCATION)
3175 loc1 = loc;
3176 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
6d716ca8 3177
84fb43a1 3178 CASE_CONVERT:
6de9cd9a 3179 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
c9019218 3180 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
ca80e52b
EB
3181
3182 /* ... fall through ... */
6de9cd9a 3183
6d716ca8 3184 case FLOAT_EXPR:
db3927fb
AH
3185 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3186 if (loc1 == UNKNOWN_LOCATION)
3187 loc1 = loc;
c9019218
JJ
3188 return build1_loc (loc, TREE_CODE (arg), type,
3189 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
6d716ca8
RS
3190
3191 case BIT_AND_EXPR:
efc1a4d9 3192 if (!integer_onep (TREE_OPERAND (arg, 1)))
ca80e52b 3193 return NULL_TREE;
c9019218 3194 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
6d716ca8 3195
dfa90b42 3196 case SAVE_EXPR:
c9019218 3197 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
a25ee332
RK
3198
3199 case CLEANUP_POINT_EXPR:
db3927fb
AH
3200 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3201 if (loc1 == UNKNOWN_LOCATION)
3202 loc1 = loc;
c9019218
JJ
3203 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3204 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
e9a25f70
JL
3205
3206 default:
c9019218 3207 return NULL_TREE;
efc1a4d9 3208 }
d817ed3b
RG
3209}
3210
3211/* Return a simplified tree node for the truth-negation of ARG. This
3212 never alters ARG itself. We assume that ARG is an operation that
3213 returns a truth value (0 or 1).
3214
3215 FIXME: one would think we would fold the result, but it causes
3216 problems with the dominator optimizer. */
3217
3218tree
db3927fb 3219invert_truthvalue_loc (location_t loc, tree arg)
d817ed3b
RG
3220{
3221 tree tem;
3222
3223 if (TREE_CODE (arg) == ERROR_MARK)
3224 return arg;
3225
db3927fb 3226 tem = fold_truth_not_expr (loc, arg);
d817ed3b 3227 if (!tem)
c9019218 3228 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
d817ed3b
RG
3229
3230 return tem;
6d716ca8
RS
3231}
3232
3233/* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3234 operands are another bit-wise operation with a common input. If so,
3235 distribute the bit operations to save an operation and possibly two if
3236 constants are involved. For example, convert
fa8db1f7 3237 (A | B) & (A | C) into A | (B & C)
6d716ca8
RS
3238 Further simplification will occur if B and C are constants.
3239
3240 If this optimization cannot be done, 0 will be returned. */
3241
3242static tree
db3927fb
AH
3243distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3244 tree arg0, tree arg1)
6d716ca8
RS
3245{
3246 tree common;
3247 tree left, right;
3248
3249 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3250 || TREE_CODE (arg0) == code
fced8ba3
RS
3251 || (TREE_CODE (arg0) != BIT_AND_EXPR
3252 && TREE_CODE (arg0) != BIT_IOR_EXPR))
6d716ca8
RS
3253 return 0;
3254
3255 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3256 {
3257 common = TREE_OPERAND (arg0, 0);
3258 left = TREE_OPERAND (arg0, 1);
3259 right = TREE_OPERAND (arg1, 1);
3260 }
3261 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3262 {
3263 common = TREE_OPERAND (arg0, 0);
3264 left = TREE_OPERAND (arg0, 1);
3265 right = TREE_OPERAND (arg1, 0);
3266 }
3267 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3268 {
3269 common = TREE_OPERAND (arg0, 1);
3270 left = TREE_OPERAND (arg0, 0);
3271 right = TREE_OPERAND (arg1, 1);
3272 }
3273 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3274 {
3275 common = TREE_OPERAND (arg0, 1);
3276 left = TREE_OPERAND (arg0, 0);
3277 right = TREE_OPERAND (arg1, 0);
3278 }
3279 else
3280 return 0;
3281
db3927fb
AH
3282 common = fold_convert_loc (loc, type, common);
3283 left = fold_convert_loc (loc, type, left);
3284 right = fold_convert_loc (loc, type, right);
3285 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3286 fold_build2_loc (loc, code, type, left, right));
6d716ca8 3287}
f8912a55
PB
3288
3289/* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3290 with code CODE. This optimization is unsafe. */
3291static tree
db3927fb
AH
3292distribute_real_division (location_t loc, enum tree_code code, tree type,
3293 tree arg0, tree arg1)
f8912a55
PB
3294{
3295 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3296 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3297
3298 /* (A / C) +- (B / C) -> (A +- B) / C. */
3299 if (mul0 == mul1
3300 && operand_equal_p (TREE_OPERAND (arg0, 1),
3301 TREE_OPERAND (arg1, 1), 0))
db3927fb
AH
3302 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3303 fold_build2_loc (loc, code, type,
f8912a55
PB
3304 TREE_OPERAND (arg0, 0),
3305 TREE_OPERAND (arg1, 0)),
3306 TREE_OPERAND (arg0, 1));
3307
3308 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3309 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3310 TREE_OPERAND (arg1, 0), 0)
3311 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3312 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3313 {
3314 REAL_VALUE_TYPE r0, r1;
3315 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3316 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3317 if (!mul0)
3318 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3319 if (!mul1)
3320 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3321 real_arithmetic (&r0, code, &r0, &r1);
db3927fb 3322 return fold_build2_loc (loc, MULT_EXPR, type,
f8912a55
PB
3323 TREE_OPERAND (arg0, 0),
3324 build_real (type, r0));
3325 }
3326
3327 return NULL_TREE;
3328}
6d716ca8 3329\f
45dc13b9
JJ
3330/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3331 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3332
3333static tree
db3927fb
AH
3334make_bit_field_ref (location_t loc, tree inner, tree type,
3335 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
45dc13b9
JJ
3336{
3337 tree result, bftype;
3338
3339 if (bitpos == 0)
3340 {
3341 tree size = TYPE_SIZE (TREE_TYPE (inner));
3342 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3343 || POINTER_TYPE_P (TREE_TYPE (inner)))
b8698a0f 3344 && host_integerp (size, 0)
45dc13b9 3345 && tree_low_cst (size, 0) == bitsize)
db3927fb 3346 return fold_convert_loc (loc, type, inner);
45dc13b9
JJ
3347 }
3348
3349 bftype = type;
3350 if (TYPE_PRECISION (bftype) != bitsize
3351 || TYPE_UNSIGNED (bftype) == !unsignedp)
3352 bftype = build_nonstandard_integer_type (bitsize, 0);
3353
c9019218
JJ
3354 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3355 size_int (bitsize), bitsize_int (bitpos));
45dc13b9
JJ
3356
3357 if (bftype != type)
db3927fb 3358 result = fold_convert_loc (loc, type, result);
45dc13b9
JJ
3359
3360 return result;
3361}
3362
3363/* Optimize a bit-field compare.
3364
3365 There are two cases: First is a compare against a constant and the
3366 second is a comparison of two items where the fields are at the same
3367 bit position relative to the start of a chunk (byte, halfword, word)
3368 large enough to contain it. In these cases we can avoid the shift
3369 implicit in bitfield extractions.
3370
3371 For constants, we emit a compare of the shifted constant with the
3372 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3373 compared. For two fields at the same position, we do the ANDs with the
3374 similar mask and compare the result of the ANDs.
3375
3376 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3377 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3378 are the left and right operands of the comparison, respectively.
3379
3380 If the optimization described above can be done, we return the resulting
3381 tree. Otherwise we return zero. */
3382
3383static tree
db3927fb
AH
3384optimize_bit_field_compare (location_t loc, enum tree_code code,
3385 tree compare_type, tree lhs, tree rhs)
45dc13b9
JJ
3386{
3387 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3388 tree type = TREE_TYPE (lhs);
3389 tree signed_type, unsigned_type;
3390 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3391 enum machine_mode lmode, rmode, nmode;
3392 int lunsignedp, runsignedp;
3393 int lvolatilep = 0, rvolatilep = 0;
3394 tree linner, rinner = NULL_TREE;
3395 tree mask;
3396 tree offset;
3397
3398 /* Get all the information about the extractions being done. If the bit size
3399 if the same as the size of the underlying object, we aren't doing an
3400 extraction at all and so can do nothing. We also don't want to
3401 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3402 then will no longer be able to replace it. */
3403 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3404 &lunsignedp, &lvolatilep, false);
3405 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3406 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3407 return 0;
3408
3409 if (!const_p)
3410 {
3411 /* If this is not a constant, we can only do something if bit positions,
3412 sizes, and signedness are the same. */
3413 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3414 &runsignedp, &rvolatilep, false);
3415
3416 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3417 || lunsignedp != runsignedp || offset != 0
3418 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3419 return 0;
3420 }
3421
3422 /* See if we can find a mode to refer to this field. We should be able to,
3423 but fail if we can't. */
6a78b724
DD
3424 if (lvolatilep
3425 && GET_MODE_BITSIZE (lmode) > 0
3426 && flag_strict_volatile_bitfields > 0)
3427 nmode = lmode;
3428 else
3429 nmode = get_best_mode (lbitsize, lbitpos,
3430 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3431 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3432 TYPE_ALIGN (TREE_TYPE (rinner))),
3433 word_mode, lvolatilep || rvolatilep);
45dc13b9
JJ
3434 if (nmode == VOIDmode)
3435 return 0;
3436
3437 /* Set signed and unsigned types of the precision of this mode for the
3438 shifts below. */
3439 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3440 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3441
3442 /* Compute the bit position and size for the new reference and our offset
3443 within it. If the new reference is the same size as the original, we
3444 won't optimize anything, so return zero. */
3445 nbitsize = GET_MODE_BITSIZE (nmode);
3446 nbitpos = lbitpos & ~ (nbitsize - 1);
3447 lbitpos -= nbitpos;
3448 if (nbitsize == lbitsize)
3449 return 0;
3450
3451 if (BYTES_BIG_ENDIAN)
3452 lbitpos = nbitsize - lbitsize - lbitpos;
3453
3454 /* Make the mask to be used against the extracted field. */
3455 mask = build_int_cst_type (unsigned_type, -1);
43a5d30b 3456 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
45dc13b9 3457 mask = const_binop (RSHIFT_EXPR, mask,
43a5d30b 3458 size_int (nbitsize - lbitsize - lbitpos));
45dc13b9
JJ
3459
3460 if (! const_p)
3461 /* If not comparing with constant, just rework the comparison
3462 and return. */
db3927fb
AH
3463 return fold_build2_loc (loc, code, compare_type,
3464 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3465 make_bit_field_ref (loc, linner,
45dc13b9
JJ
3466 unsigned_type,
3467 nbitsize, nbitpos,
3468 1),
3469 mask),
db3927fb
AH
3470 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3471 make_bit_field_ref (loc, rinner,
45dc13b9
JJ
3472 unsigned_type,
3473 nbitsize, nbitpos,
3474 1),
3475 mask));
3476
3477 /* Otherwise, we are handling the constant case. See if the constant is too
3478 big for the field. Warn and return a tree of for 0 (false) if so. We do
3479 this not only for its own sake, but to avoid having to test for this
3480 error case below. If we didn't, we might generate wrong code.
3481
3482 For unsigned fields, the constant shifted right by the field length should
3483 be all zero. For signed fields, the high-order bits should agree with
3484 the sign bit. */
3485
3486 if (lunsignedp)
3487 {
3488 if (! integer_zerop (const_binop (RSHIFT_EXPR,
db3927fb
AH
3489 fold_convert_loc (loc,
3490 unsigned_type, rhs),
43a5d30b 3491 size_int (lbitsize))))
45dc13b9
JJ
3492 {
3493 warning (0, "comparison is always %d due to width of bit-field",
3494 code == NE_EXPR);
3495 return constant_boolean_node (code == NE_EXPR, compare_type);
3496 }
3497 }
3498 else
3499 {
db3927fb
AH
3500 tree tem = const_binop (RSHIFT_EXPR,
3501 fold_convert_loc (loc, signed_type, rhs),
43a5d30b 3502 size_int (lbitsize - 1));
45dc13b9
JJ
3503 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3504 {
3505 warning (0, "comparison is always %d due to width of bit-field",
3506 code == NE_EXPR);
3507 return constant_boolean_node (code == NE_EXPR, compare_type);
3508 }
3509 }
3510
3511 /* Single-bit compares should always be against zero. */
3512 if (lbitsize == 1 && ! integer_zerop (rhs))
3513 {
3514 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3515 rhs = build_int_cst (type, 0);
3516 }
3517
3518 /* Make a new bitfield reference, shift the constant over the
3519 appropriate number of bits and mask it with the computed mask
3520 (in case this was a signed field). If we changed it, make a new one. */
db3927fb 3521 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
45dc13b9
JJ
3522 if (lvolatilep)
3523 {
3524 TREE_SIDE_EFFECTS (lhs) = 1;
3525 TREE_THIS_VOLATILE (lhs) = 1;
3526 }
3527
3528 rhs = const_binop (BIT_AND_EXPR,
3529 const_binop (LSHIFT_EXPR,
db3927fb 3530 fold_convert_loc (loc, unsigned_type, rhs),
43a5d30b
AS
3531 size_int (lbitpos)),
3532 mask);
45dc13b9 3533
c9019218
JJ
3534 lhs = build2_loc (loc, code, compare_type,
3535 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
db3927fb 3536 return lhs;
45dc13b9
JJ
3537}
3538\f
b2215d83 3539/* Subroutine for fold_truthop: decode a field reference.
6d716ca8
RS
3540
3541 If EXP is a comparison reference, we return the innermost reference.
3542
3543 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3544 set to the starting bit number.
3545
3546 If the innermost field can be completely contained in a mode-sized
3547 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3548
3549 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3550 otherwise it is not changed.
3551
3552 *PUNSIGNEDP is set to the signedness of the field.
3553
3554 *PMASK is set to the mask used. This is either contained in a
3555 BIT_AND_EXPR or derived from the width of the field.
3556
38e01259 3557 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
d4453ee5 3558
6d716ca8
RS
3559 Return 0 if this is not a component reference or is one that we can't
3560 do anything with. */
3561
3562static tree
db3927fb 3563decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
75040a04
AJ
3564 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3565 int *punsignedp, int *pvolatilep,
fa8db1f7 3566 tree *pmask, tree *pand_mask)
6d716ca8 3567{
1a8c4ca6 3568 tree outer_type = 0;
6d9f1f5f
RK
3569 tree and_mask = 0;
3570 tree mask, inner, offset;
3571 tree unsigned_type;
770ae6cc 3572 unsigned int precision;
6d716ca8 3573
b6cc0a72 3574 /* All the optimizations using this function assume integer fields.
772ae9f0
RK
3575 There are problems with FP fields since the type_for_size call
3576 below can fail for, e.g., XFmode. */
3577 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3578 return 0;
3579
1a8c4ca6
EB
3580 /* We are interested in the bare arrangement of bits, so strip everything
3581 that doesn't affect the machine mode. However, record the type of the
3582 outermost expression if it may matter below. */
1043771b 3583 if (CONVERT_EXPR_P (exp)
1a8c4ca6
EB
3584 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3585 outer_type = TREE_TYPE (exp);
df7fb8f9 3586 STRIP_NOPS (exp);
6d716ca8
RS
3587
3588 if (TREE_CODE (exp) == BIT_AND_EXPR)
3589 {
6d9f1f5f 3590 and_mask = TREE_OPERAND (exp, 1);
6d716ca8 3591 exp = TREE_OPERAND (exp, 0);
6d9f1f5f
RK
3592 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3593 if (TREE_CODE (and_mask) != INTEGER_CST)
6d716ca8
RS
3594 return 0;
3595 }
3596
f1e60ec6 3597 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2614034e 3598 punsignedp, pvolatilep, false);
02103577 3599 if ((inner == exp && and_mask == 0)
14a774a9
RK
3600 || *pbitsize < 0 || offset != 0
3601 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
c05a9b68 3602 return 0;
b6cc0a72 3603
1a8c4ca6
EB
3604 /* If the number of bits in the reference is the same as the bitsize of
3605 the outer type, then the outer type gives the signedness. Otherwise
3606 (in case of a small bitfield) the signedness is unchanged. */
fae1b38d 3607 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
8df83eae 3608 *punsignedp = TYPE_UNSIGNED (outer_type);
1a8c4ca6 3609
6d9f1f5f 3610 /* Compute the mask to access the bitfield. */
5785c7de 3611 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
6d9f1f5f
RK
3612 precision = TYPE_PRECISION (unsigned_type);
3613
2ac7cbb5 3614 mask = build_int_cst_type (unsigned_type, -1);
3e6688a7 3615
43a5d30b
AS
3616 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3617 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
6d9f1f5f
RK
3618
3619 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3620 if (and_mask != 0)
db3927fb
AH
3621 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3622 fold_convert_loc (loc, unsigned_type, and_mask), mask);
6d716ca8
RS
3623
3624 *pmask = mask;
d4453ee5 3625 *pand_mask = and_mask;
6d716ca8
RS
3626 return inner;
3627}
3628
45dc13b9
JJ
3629/* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3630 bit positions. */
3631
3632static int
3633all_ones_mask_p (const_tree mask, int size)
3634{
3635 tree type = TREE_TYPE (mask);
3636 unsigned int precision = TYPE_PRECISION (type);
3637 tree tmask;
3638
3639 tmask = build_int_cst_type (signed_type_for (type), -1);
3640
3641 return
3642 tree_int_cst_equal (mask,
3643 const_binop (RSHIFT_EXPR,
3644 const_binop (LSHIFT_EXPR, tmask,
43a5d30b
AS
3645 size_int (precision - size)),
3646 size_int (precision - size)));
45dc13b9
JJ
3647}
3648
1f77b5da
RS
3649/* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3650 represents the sign bit of EXP's type. If EXP represents a sign
3651 or zero extension, also test VAL against the unextended type.
3652 The return value is the (sub)expression whose sign bit is VAL,
3653 or NULL_TREE otherwise. */
3654
3655static tree
ac545c64 3656sign_bit_p (tree exp, const_tree val)
1f77b5da 3657{
c87d821b
KH
3658 unsigned HOST_WIDE_INT mask_lo, lo;
3659 HOST_WIDE_INT mask_hi, hi;
1f77b5da
RS
3660 int width;
3661 tree t;
3662
68e82b83 3663 /* Tree EXP must have an integral type. */
1f77b5da
RS
3664 t = TREE_TYPE (exp);
3665 if (! INTEGRAL_TYPE_P (t))
3666 return NULL_TREE;
3667
3668 /* Tree VAL must be an integer constant. */
3669 if (TREE_CODE (val) != INTEGER_CST
455f14dd 3670 || TREE_OVERFLOW (val))
1f77b5da
RS
3671 return NULL_TREE;
3672
3673 width = TYPE_PRECISION (t);
3674 if (width > HOST_BITS_PER_WIDE_INT)
3675 {
3676 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3677 lo = 0;
c87d821b
KH
3678
3679 mask_hi = ((unsigned HOST_WIDE_INT) -1
3680 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3681 mask_lo = -1;
1f77b5da
RS
3682 }
3683 else
3684 {
3685 hi = 0;
3686 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
c87d821b
KH
3687
3688 mask_hi = 0;
3689 mask_lo = ((unsigned HOST_WIDE_INT) -1
3690 >> (HOST_BITS_PER_WIDE_INT - width));
1f77b5da
RS
3691 }
3692
c87d821b
KH
3693 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3694 treat VAL as if it were unsigned. */
3695 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3696 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
1f77b5da
RS
3697 return exp;
3698
3699 /* Handle extension from a narrower type. */
3700 if (TREE_CODE (exp) == NOP_EXPR
3701 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3702 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3703
3704 return NULL_TREE;
3705}
3706
b2215d83
TW
3707/* Subroutine for fold_truthop: determine if an operand is simple enough
3708 to be evaluated unconditionally. */
3709
b6cc0a72 3710static int
ac545c64 3711simple_operand_p (const_tree exp)
b2215d83
TW
3712{
3713 /* Strip any conversions that don't change the machine mode. */
1d481ba8 3714 STRIP_NOPS (exp);
b2215d83 3715
6615c446 3716 return (CONSTANT_CLASS_P (exp)
1d481ba8 3717 || TREE_CODE (exp) == SSA_NAME
2f939d94 3718 || (DECL_P (exp)
b2215d83
TW
3719 && ! TREE_ADDRESSABLE (exp)
3720 && ! TREE_THIS_VOLATILE (exp)
8227896c
TW
3721 && ! DECL_NONLOCAL (exp)
3722 /* Don't regard global variables as simple. They may be
3723 allocated in ways unknown to the compiler (shared memory,
3724 #pragma weak, etc). */
3725 && ! TREE_PUBLIC (exp)
3726 && ! DECL_EXTERNAL (exp)
3727 /* Loading a static variable is unduly expensive, but global
3728 registers aren't expensive. */
3729 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
b2215d83 3730}
6d716ca8 3731\f
ebde8a27
RK
3732/* The following functions are subroutines to fold_range_test and allow it to
3733 try to change a logical combination of comparisons into a range test.
3734
3735 For example, both
fa8db1f7 3736 X == 2 || X == 3 || X == 4 || X == 5
ebde8a27 3737 and
fa8db1f7 3738 X >= 2 && X <= 5
ebde8a27
RK
3739 are converted to
3740 (unsigned) (X - 2) <= 3
3741
956d6950 3742 We describe each set of comparisons as being either inside or outside
ebde8a27
RK
3743 a range, using a variable named like IN_P, and then describe the
3744 range with a lower and upper bound. If one of the bounds is omitted,
3745 it represents either the highest or lowest value of the type.
3746
3747 In the comments below, we represent a range by two numbers in brackets
956d6950 3748 preceded by a "+" to designate being inside that range, or a "-" to
ebde8a27
RK
3749 designate being outside that range, so the condition can be inverted by
3750 flipping the prefix. An omitted bound is represented by a "-". For
3751 example, "- [-, 10]" means being outside the range starting at the lowest
3752 possible value and ending at 10, in other words, being greater than 10.
3753 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3754 always false.
3755
3756 We set up things so that the missing bounds are handled in a consistent
3757 manner so neither a missing bound nor "true" and "false" need to be
3758 handled using a special case. */
3759
3760/* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3761 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3762 and UPPER1_P are nonzero if the respective argument is an upper bound
3763 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3764 must be specified for a comparison. ARG1 will be converted to ARG0's
3765 type if both are specified. */
ef659ec0 3766
ebde8a27 3767static tree
75040a04
AJ
3768range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3769 tree arg1, int upper1_p)
ebde8a27 3770{
27bae8e5 3771 tree tem;
ebde8a27
RK
3772 int result;
3773 int sgn0, sgn1;
ef659ec0 3774
ebde8a27
RK
3775 /* If neither arg represents infinity, do the normal operation.
3776 Else, if not a comparison, return infinity. Else handle the special
3777 comparison rules. Note that most of the cases below won't occur, but
3778 are handled for consistency. */
ef659ec0 3779
ebde8a27 3780 if (arg0 != 0 && arg1 != 0)
27bae8e5 3781 {
7f20a5b7
KH
3782 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3783 arg0, fold_convert (TREE_TYPE (arg0), arg1));
27bae8e5
RK
3784 STRIP_NOPS (tem);
3785 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3786 }
ef659ec0 3787
6615c446 3788 if (TREE_CODE_CLASS (code) != tcc_comparison)
ebde8a27
RK
3789 return 0;
3790
3791 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
d7b3ea38
NS
3792 for neither. In real maths, we cannot assume open ended ranges are
3793 the same. But, this is computer arithmetic, where numbers are finite.
3794 We can therefore make the transformation of any unbounded range with
3795 the value Z, Z being greater than any representable number. This permits
30f7a378 3796 us to treat unbounded ranges as equal. */
ebde8a27 3797 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4e644c93 3798 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
ebde8a27
RK
3799 switch (code)
3800 {
d7b3ea38
NS
3801 case EQ_EXPR:
3802 result = sgn0 == sgn1;
3803 break;
3804 case NE_EXPR:
3805 result = sgn0 != sgn1;
ebde8a27 3806 break;
d7b3ea38 3807 case LT_EXPR:
ebde8a27
RK
3808 result = sgn0 < sgn1;
3809 break;
d7b3ea38
NS
3810 case LE_EXPR:
3811 result = sgn0 <= sgn1;
3812 break;
3813 case GT_EXPR:
ebde8a27
RK
3814 result = sgn0 > sgn1;
3815 break;
d7b3ea38
NS
3816 case GE_EXPR:
3817 result = sgn0 >= sgn1;
3818 break;
e9a25f70 3819 default:
0bccc606 3820 gcc_unreachable ();
ebde8a27
RK
3821 }
3822
1b0f3e79 3823 return constant_boolean_node (result, type);
ebde8a27 3824}
b6cc0a72 3825\f
ebde8a27
RK
3826/* Given EXP, a logical expression, set the range it is testing into
3827 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
6ac01510
ILT
3828 actually being tested. *PLOW and *PHIGH will be made of the same
3829 type as the returned expression. If EXP is not a comparison, we
3830 will most likely not be returning a useful value and range. Set
3831 *STRICT_OVERFLOW_P to true if the return value is only valid
3832 because signed overflow is undefined; otherwise, do not change
3833 *STRICT_OVERFLOW_P. */
ef659ec0 3834
a243fb4a 3835tree
6ac01510
ILT
3836make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3837 bool *strict_overflow_p)
ef659ec0 3838{
ebde8a27 3839 enum tree_code code;
d1822754
EC
3840 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3841 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
ebde8a27
RK
3842 int in_p, n_in_p;
3843 tree low, high, n_low, n_high;
db3927fb 3844 location_t loc = EXPR_LOCATION (exp);
ef659ec0 3845
ebde8a27
RK
3846 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3847 and see if we can refine the range. Some of the cases below may not
3848 happen, but it doesn't seem worth worrying about this. We "continue"
3849 the outer loop when we've changed something; otherwise we "break"
3850 the switch, which will "break" the while. */
ef659ec0 3851
088414c1 3852 in_p = 0;
57decb7e 3853 low = high = build_int_cst (TREE_TYPE (exp), 0);
ebde8a27
RK
3854
3855 while (1)
ef659ec0 3856 {
ebde8a27 3857 code = TREE_CODE (exp);
d1822754 3858 exp_type = TREE_TYPE (exp);
30d68b86
MM
3859
3860 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3861 {
5039610b 3862 if (TREE_OPERAND_LENGTH (exp) > 0)
d17811fd 3863 arg0 = TREE_OPERAND (exp, 0);
6615c446
JO
3864 if (TREE_CODE_CLASS (code) == tcc_comparison
3865 || TREE_CODE_CLASS (code) == tcc_unary
3866 || TREE_CODE_CLASS (code) == tcc_binary)
d1822754 3867 arg0_type = TREE_TYPE (arg0);
6615c446
JO
3868 if (TREE_CODE_CLASS (code) == tcc_binary
3869 || TREE_CODE_CLASS (code) == tcc_comparison
3870 || (TREE_CODE_CLASS (code) == tcc_expression
5039610b 3871 && TREE_OPERAND_LENGTH (exp) > 1))
30d68b86
MM
3872 arg1 = TREE_OPERAND (exp, 1);
3873 }
ef659ec0 3874
ebde8a27
RK
3875 switch (code)
3876 {
3877 case TRUTH_NOT_EXPR:
3878 in_p = ! in_p, exp = arg0;
3879 continue;
3880
3881 case EQ_EXPR: case NE_EXPR:
3882 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3883 /* We can only do something if the range is testing for zero
3884 and if the second operand is an integer constant. Note that
3885 saying something is "in" the range we make is done by
3886 complementing IN_P since it will set in the initial case of
3887 being not equal to zero; "out" is leaving it alone. */
3888 if (low == 0 || high == 0
3889 || ! integer_zerop (low) || ! integer_zerop (high)
3890 || TREE_CODE (arg1) != INTEGER_CST)
3891 break;
ef659ec0 3892
ebde8a27
RK
3893 switch (code)
3894 {
3895 case NE_EXPR: /* - [c, c] */
3896 low = high = arg1;
3897 break;
3898 case EQ_EXPR: /* + [c, c] */
3899 in_p = ! in_p, low = high = arg1;
3900 break;
3901 case GT_EXPR: /* - [-, c] */
3902 low = 0, high = arg1;
3903 break;
3904 case GE_EXPR: /* + [c, -] */
3905 in_p = ! in_p, low = arg1, high = 0;
3906 break;
3907 case LT_EXPR: /* - [c, -] */
3908 low = arg1, high = 0;
3909 break;
3910 case LE_EXPR: /* + [-, c] */
3911 in_p = ! in_p, low = 0, high = arg1;
3912 break;
e9a25f70 3913 default:
0bccc606 3914 gcc_unreachable ();
ebde8a27 3915 }
ef659ec0 3916
7f423031 3917 /* If this is an unsigned comparison, we also know that EXP is
0e1c7fc7
RK
3918 greater than or equal to zero. We base the range tests we make
3919 on that fact, so we record it here so we can parse existing
d1822754
EC
3920 range tests. We test arg0_type since often the return type
3921 of, e.g. EQ_EXPR, is boolean. */
3922 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
ebde8a27 3923 {
e9ea8bd5
RS
3924 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3925 in_p, low, high, 1,
57decb7e 3926 build_int_cst (arg0_type, 0),
0e1c7fc7 3927 NULL_TREE))
ebde8a27 3928 break;
ef659ec0 3929
ebde8a27 3930 in_p = n_in_p, low = n_low, high = n_high;
0e1c7fc7 3931
368ebcd6 3932 /* If the high bound is missing, but we have a nonzero low
1358cdc5
RK
3933 bound, reverse the range so it goes from zero to the low bound
3934 minus 1. */
3935 if (high == 0 && low && ! integer_zerop (low))
0e1c7fc7
RK
3936 {
3937 in_p = ! in_p;
3938 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3939 integer_one_node, 0);
57decb7e 3940 low = build_int_cst (arg0_type, 0);
0e1c7fc7 3941 }
ebde8a27 3942 }
d1822754
EC
3943
3944 exp = arg0;
ebde8a27
RK
3945 continue;
3946
3947 case NEGATE_EXPR:
3948 /* (-x) IN [a,b] -> x in [-b, -a] */
d1822754 3949 n_low = range_binop (MINUS_EXPR, exp_type,
57decb7e 3950 build_int_cst (exp_type, 0),
088414c1 3951 0, high, 1);
d1822754 3952 n_high = range_binop (MINUS_EXPR, exp_type,
57decb7e 3953 build_int_cst (exp_type, 0),
088414c1 3954 0, low, 0);
a8c56818
JJ
3955 if (n_high != 0 && TREE_OVERFLOW (n_high))
3956 break;
3957 goto normalize;
ebde8a27
RK
3958
3959 case BIT_NOT_EXPR:
3960 /* ~ X -> -X - 1 */
c9019218
JJ
3961 exp = build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3962 build_int_cst (exp_type, 1));
ebde8a27
RK
3963 continue;
3964
3965 case PLUS_EXPR: case MINUS_EXPR:
3966 if (TREE_CODE (arg1) != INTEGER_CST)
3967 break;
3968
c078a437
KH
3969 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3970 move a constant to the other side. */
eeef0e45
ILT
3971 if (!TYPE_UNSIGNED (arg0_type)
3972 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
c078a437
KH
3973 break;
3974
ebde8a27
RK
3975 /* If EXP is signed, any overflow in the computation is undefined,
3976 so we don't worry about it so long as our computations on
3977 the bounds don't overflow. For unsigned, overflow is defined
3978 and this is exactly the right thing. */
3979 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
d1822754 3980 arg0_type, low, 0, arg1, 0);
ebde8a27 3981 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
d1822754 3982 arg0_type, high, 1, arg1, 0);
ebde8a27
RK
3983 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3984 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3985 break;
3986
6ac01510
ILT
3987 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3988 *strict_overflow_p = true;
3989
a8c56818 3990 normalize:
3c00684e
JL
3991 /* Check for an unsigned range which has wrapped around the maximum
3992 value thus making n_high < n_low, and normalize it. */
5a9d82a6 3993 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3c00684e 3994 {
d1822754 3995 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
0e1c7fc7 3996 integer_one_node, 0);
d1822754 3997 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
c2b63960
AO
3998 integer_one_node, 0);
3999
4000 /* If the range is of the form +/- [ x+1, x ], we won't
4001 be able to normalize it. But then, it represents the
4002 whole range or the empty set, so make it
4003 +/- [ -, - ]. */
4004 if (tree_int_cst_equal (n_low, low)
4005 && tree_int_cst_equal (n_high, high))
4006 low = high = 0;
4007 else
4008 in_p = ! in_p;
3c00684e 4009 }
5a9d82a6
JW
4010 else
4011 low = n_low, high = n_high;
27bae8e5 4012
ebde8a27
RK
4013 exp = arg0;
4014 continue;
4015
1043771b 4016 CASE_CONVERT: case NON_LVALUE_EXPR:
d1822754 4017 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
7d12cee1
JL
4018 break;
4019
d1822754
EC
4020 if (! INTEGRAL_TYPE_P (arg0_type)
4021 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4022 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
ebde8a27
RK
4023 break;
4024
ce2157a1 4025 n_low = low, n_high = high;
ebde8a27 4026
ce2157a1 4027 if (n_low != 0)
db3927fb 4028 n_low = fold_convert_loc (loc, arg0_type, n_low);
ce2157a1
JL
4029
4030 if (n_high != 0)
db3927fb 4031 n_high = fold_convert_loc (loc, arg0_type, n_high);
ce2157a1 4032
ce2157a1 4033
d1822754 4034 /* If we're converting arg0 from an unsigned type, to exp,
61ada8ae 4035 a signed type, we will be doing the comparison as unsigned.
d1822754
EC
4036 The tests above have already verified that LOW and HIGH
4037 are both positive.
4038
4039 So we have to ensure that we will handle large unsigned
4040 values the same way that the current signed bounds treat
4041 negative values. */
4042
4043 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
ce2157a1 4044 {
e1ee5cdc 4045 tree high_positive;
325217ed
CF
4046 tree equiv_type;
4047 /* For fixed-point modes, we need to pass the saturating flag
4048 as the 2nd parameter. */
4049 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4050 equiv_type = lang_hooks.types.type_for_mode
4051 (TYPE_MODE (arg0_type),
4052 TYPE_SATURATING (arg0_type));
4053 else
4054 equiv_type = lang_hooks.types.type_for_mode
4055 (TYPE_MODE (arg0_type), 1);
e1ee5cdc
RH
4056
4057 /* A range without an upper bound is, naturally, unbounded.
4058 Since convert would have cropped a very large value, use
14a774a9
RK
4059 the max value for the destination type. */
4060 high_positive
4061 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
d1822754 4062 : TYPE_MAX_VALUE (arg0_type);
e1ee5cdc 4063
d1822754 4064 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
db3927fb
AH
4065 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4066 fold_convert_loc (loc, arg0_type,
4067 high_positive),
000d8d44 4068 build_int_cst (arg0_type, 1));
b6cc0a72 4069
ce2157a1
JL
4070 /* If the low bound is specified, "and" the range with the
4071 range for which the original unsigned value will be
4072 positive. */
4073 if (low != 0)
4074 {
4075 if (! merge_ranges (&n_in_p, &n_low, &n_high,
088414c1 4076 1, n_low, n_high, 1,
db3927fb
AH
4077 fold_convert_loc (loc, arg0_type,
4078 integer_zero_node),
ce2157a1
JL
4079 high_positive))
4080 break;
4081
4082 in_p = (n_in_p == in_p);
4083 }
4084 else
4085 {
4086 /* Otherwise, "or" the range with the range of the input
4087 that will be interpreted as negative. */
4088 if (! merge_ranges (&n_in_p, &n_low, &n_high,
088414c1 4089 0, n_low, n_high, 1,
db3927fb
AH
4090 fold_convert_loc (loc, arg0_type,
4091 integer_zero_node),
ce2157a1
JL
4092 high_positive))
4093 break;
4094
4095 in_p = (in_p != n_in_p);
4096 }
4097 }
ebde8a27
RK
4098
4099 exp = arg0;
ce2157a1 4100 low = n_low, high = n_high;
ebde8a27 4101 continue;
ce2157a1
JL
4102
4103 default:
4104 break;
ef659ec0 4105 }
ebde8a27
RK
4106
4107 break;
ef659ec0 4108 }
ebde8a27 4109
80906567
RK
4110 /* If EXP is a constant, we can evaluate whether this is true or false. */
4111 if (TREE_CODE (exp) == INTEGER_CST)
4112 {
4113 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4114 exp, 0, low, 0))
4115 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4116 exp, 1, high, 1)));
4117 low = high = 0;
4118 exp = 0;
4119 }
4120
ebde8a27
RK
4121 *pin_p = in_p, *plow = low, *phigh = high;
4122 return exp;
4123}
4124\f
4125/* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4126 type, TYPE, return an expression to test if EXP is in (or out of, depending
e1af8299 4127 on IN_P) the range. Return 0 if the test couldn't be created. */
ebde8a27 4128
a243fb4a 4129tree
db3927fb
AH
4130build_range_check (location_t loc, tree type, tree exp, int in_p,
4131 tree low, tree high)
ebde8a27 4132{
849d624b 4133 tree etype = TREE_TYPE (exp), value;
ebde8a27 4134
f60c951c
JDA
4135#ifdef HAVE_canonicalize_funcptr_for_compare
4136 /* Disable this optimization for function pointer expressions
4137 on targets that require function pointer canonicalization. */
4138 if (HAVE_canonicalize_funcptr_for_compare
4139 && TREE_CODE (etype) == POINTER_TYPE
4140 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4141 return NULL_TREE;
4142#endif
4143
e1af8299
JJ
4144 if (! in_p)
4145 {
db3927fb 4146 value = build_range_check (loc, type, exp, 1, low, high);
e1af8299 4147 if (value != 0)
db3927fb 4148 return invert_truthvalue_loc (loc, value);
e1af8299
JJ
4149
4150 return 0;
4151 }
ebde8a27 4152
dbfb1116 4153 if (low == 0 && high == 0)
57decb7e 4154 return build_int_cst (type, 1);
ebde8a27 4155
dbfb1116 4156 if (low == 0)
db3927fb
AH
4157 return fold_build2_loc (loc, LE_EXPR, type, exp,
4158 fold_convert_loc (loc, etype, high));
ebde8a27 4159
dbfb1116 4160 if (high == 0)
db3927fb
AH
4161 return fold_build2_loc (loc, GE_EXPR, type, exp,
4162 fold_convert_loc (loc, etype, low));
ebde8a27 4163
dbfb1116 4164 if (operand_equal_p (low, high, 0))
db3927fb
AH
4165 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4166 fold_convert_loc (loc, etype, low));
ebde8a27 4167
dbfb1116 4168 if (integer_zerop (low))
ef659ec0 4169 {
8df83eae 4170 if (! TYPE_UNSIGNED (etype))
dd3f0101 4171 {
ca5ba2a3 4172 etype = unsigned_type_for (etype);
db3927fb
AH
4173 high = fold_convert_loc (loc, etype, high);
4174 exp = fold_convert_loc (loc, etype, exp);
dd3f0101 4175 }
db3927fb 4176 return build_range_check (loc, type, exp, 1, 0, high);
ebde8a27 4177 }
ef659ec0 4178
dbfb1116
RS
4179 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4180 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4181 {
4182 unsigned HOST_WIDE_INT lo;
4183 HOST_WIDE_INT hi;
4184 int prec;
4185
4186 prec = TYPE_PRECISION (etype);
4187 if (prec <= HOST_BITS_PER_WIDE_INT)
dd3f0101
KH
4188 {
4189 hi = 0;
4190 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4191 }
dbfb1116 4192 else
dd3f0101
KH
4193 {
4194 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4195 lo = (unsigned HOST_WIDE_INT) -1;
4196 }
dbfb1116
RS
4197
4198 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
dd3f0101 4199 {
8df83eae 4200 if (TYPE_UNSIGNED (etype))
dd3f0101 4201 {
972afb58
JJ
4202 tree signed_etype = signed_type_for (etype);
4203 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4204 etype
4205 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4206 else
4207 etype = signed_etype;
db3927fb 4208 exp = fold_convert_loc (loc, etype, exp);
dd3f0101 4209 }
db3927fb 4210 return fold_build2_loc (loc, GT_EXPR, type, exp,
57decb7e 4211 build_int_cst (etype, 0));
dd3f0101 4212 }
dbfb1116
RS
4213 }
4214
f8fe0545 4215 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
84fb43a1
EB
4216 This requires wrap-around arithmetics for the type of the expression.
4217 First make sure that arithmetics in this type is valid, then make sure
4218 that it wraps around. */
4219 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4220 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4221 TYPE_UNSIGNED (etype));
f8fe0545 4222
84fb43a1 4223 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
e1af8299
JJ
4224 {
4225 tree utype, minv, maxv;
4226
4227 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4228 for the type in question, as we rely on this here. */
ca5ba2a3 4229 utype = unsigned_type_for (etype);
db3927fb 4230 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
f8fe0545
EB
4231 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4232 integer_one_node, 1);
db3927fb 4233 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
f8fe0545
EB
4234
4235 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4236 minv, 1, maxv, 1)))
4237 etype = utype;
4238 else
4239 return 0;
e1af8299
JJ
4240 }
4241
db3927fb
AH
4242 high = fold_convert_loc (loc, etype, high);
4243 low = fold_convert_loc (loc, etype, low);
4244 exp = fold_convert_loc (loc, etype, exp);
438090c3 4245
43a5d30b 4246 value = const_binop (MINUS_EXPR, high, low);
f8fe0545 4247
5be014d5
AP
4248
4249 if (POINTER_TYPE_P (etype))
4250 {
4251 if (value != 0 && !TREE_OVERFLOW (value))
4252 {
db3927fb
AH
4253 low = fold_convert_loc (loc, sizetype, low);
4254 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
4255 return build_range_check (loc, type,
4256 fold_build2_loc (loc, POINTER_PLUS_EXPR,
4257 etype, exp, low),
5be014d5
AP
4258 1, build_int_cst (etype, 0), value);
4259 }
4260 return 0;
4261 }
4262
f8fe0545 4263 if (value != 0 && !TREE_OVERFLOW (value))
db3927fb
AH
4264 return build_range_check (loc, type,
4265 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
f8fe0545 4266 1, build_int_cst (etype, 0), value);
dbfb1116
RS
4267
4268 return 0;
ebde8a27
RK
4269}
4270\f
2f96b754
EB
4271/* Return the predecessor of VAL in its type, handling the infinite case. */
4272
4273static tree
4274range_predecessor (tree val)
4275{
4276 tree type = TREE_TYPE (val);
4277
1464eeb8
EB
4278 if (INTEGRAL_TYPE_P (type)
4279 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
2f96b754
EB
4280 return 0;
4281 else
4282 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4283}
4284
4285/* Return the successor of VAL in its type, handling the infinite case. */
4286
4287static tree
4288range_successor (tree val)
4289{
4290 tree type = TREE_TYPE (val);
4291
1464eeb8
EB
4292 if (INTEGRAL_TYPE_P (type)
4293 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
2f96b754
EB
4294 return 0;
4295 else
4296 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4297}
4298
b6cc0a72 4299/* Given two ranges, see if we can merge them into one. Return 1 if we
ebde8a27 4300 can, 0 if we can't. Set the output range into the specified parameters. */
ef659ec0 4301
a243fb4a 4302bool
75040a04
AJ
4303merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4304 tree high0, int in1_p, tree low1, tree high1)
ebde8a27
RK
4305{
4306 int no_overlap;
4307 int subset;
4308 int temp;
4309 tree tem;
4310 int in_p;
4311 tree low, high;
ce2157a1
JL
4312 int lowequal = ((low0 == 0 && low1 == 0)
4313 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4314 low0, 0, low1, 0)));
4315 int highequal = ((high0 == 0 && high1 == 0)
4316 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4317 high0, 1, high1, 1)));
4318
4319 /* Make range 0 be the range that starts first, or ends last if they
4320 start at the same value. Swap them if it isn't. */
b6cc0a72 4321 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
ebde8a27 4322 low0, 0, low1, 0))
ce2157a1 4323 || (lowequal
ebde8a27 4324 && integer_onep (range_binop (GT_EXPR, integer_type_node,
ce2157a1 4325 high1, 1, high0, 1))))
ebde8a27
RK
4326 {
4327 temp = in0_p, in0_p = in1_p, in1_p = temp;
4328 tem = low0, low0 = low1, low1 = tem;
4329 tem = high0, high0 = high1, high1 = tem;
4330 }
ef659ec0 4331
ebde8a27
RK
4332 /* Now flag two cases, whether the ranges are disjoint or whether the
4333 second range is totally subsumed in the first. Note that the tests
4334 below are simplified by the ones above. */
4335 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4336 high0, 1, low1, 0));
5df8a1f2 4337 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
ebde8a27
RK
4338 high1, 1, high0, 1));
4339
4340 /* We now have four cases, depending on whether we are including or
4341 excluding the two ranges. */
4342 if (in0_p && in1_p)
4343 {
4344 /* If they don't overlap, the result is false. If the second range
4345 is a subset it is the result. Otherwise, the range is from the start
4346 of the second to the end of the first. */
4347 if (no_overlap)
4348 in_p = 0, low = high = 0;
4349 else if (subset)
4350 in_p = 1, low = low1, high = high1;
4351 else
4352 in_p = 1, low = low1, high = high0;
4353 }
ef659ec0 4354
ebde8a27
RK
4355 else if (in0_p && ! in1_p)
4356 {
ce2157a1
JL
4357 /* If they don't overlap, the result is the first range. If they are
4358 equal, the result is false. If the second range is a subset of the
4359 first, and the ranges begin at the same place, we go from just after
f8fe0545 4360 the end of the second range to the end of the first. If the second
ce2157a1
JL
4361 range is not a subset of the first, or if it is a subset and both
4362 ranges end at the same place, the range starts at the start of the
4363 first range and ends just before the second range.
4364 Otherwise, we can't describe this as a single range. */
ebde8a27
RK
4365 if (no_overlap)
4366 in_p = 1, low = low0, high = high0;
ce2157a1 4367 else if (lowequal && highequal)
405862dd 4368 in_p = 0, low = high = 0;
ce2157a1
JL
4369 else if (subset && lowequal)
4370 {
f8fe0545
EB
4371 low = range_successor (high1);
4372 high = high0;
39ac2ffc
ILT
4373 in_p = 1;
4374 if (low == 0)
4375 {
4376 /* We are in the weird situation where high0 > high1 but
4377 high1 has no successor. Punt. */
4378 return 0;
4379 }
ce2157a1
JL
4380 }
4381 else if (! subset || highequal)
ebde8a27 4382 {
f8fe0545
EB
4383 low = low0;
4384 high = range_predecessor (low1);
39ac2ffc
ILT
4385 in_p = 1;
4386 if (high == 0)
4387 {
4388 /* low0 < low1 but low1 has no predecessor. Punt. */
4389 return 0;
4390 }
ebde8a27 4391 }
ce2157a1
JL
4392 else
4393 return 0;
ebde8a27 4394 }
ef659ec0 4395
ebde8a27
RK
4396 else if (! in0_p && in1_p)
4397 {
4398 /* If they don't overlap, the result is the second range. If the second
4399 is a subset of the first, the result is false. Otherwise,
4400 the range starts just after the first range and ends at the
4401 end of the second. */
4402 if (no_overlap)
4403 in_p = 1, low = low1, high = high1;
14a774a9 4404 else if (subset || highequal)
ebde8a27
RK
4405 in_p = 0, low = high = 0;
4406 else
4407 {
f8fe0545
EB
4408 low = range_successor (high0);
4409 high = high1;
39ac2ffc
ILT
4410 in_p = 1;
4411 if (low == 0)
4412 {
4413 /* high1 > high0 but high0 has no successor. Punt. */
4414 return 0;
4415 }
ef659ec0
TW
4416 }
4417 }
4418
ebde8a27
RK
4419 else
4420 {
4421 /* The case where we are excluding both ranges. Here the complex case
4422 is if they don't overlap. In that case, the only time we have a
4423 range is if they are adjacent. If the second is a subset of the
4424 first, the result is the first. Otherwise, the range to exclude
4425 starts at the beginning of the first range and ends at the end of the
4426 second. */
4427 if (no_overlap)
4428 {
4429 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
f8fe0545 4430 range_successor (high0),
ebde8a27
RK
4431 1, low1, 0)))
4432 in_p = 0, low = low0, high = high1;
4433 else
e1af8299
JJ
4434 {
4435 /* Canonicalize - [min, x] into - [-, x]. */
4436 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4437 switch (TREE_CODE (TREE_TYPE (low0)))
4438 {
4439 case ENUMERAL_TYPE:
4440 if (TYPE_PRECISION (TREE_TYPE (low0))
4441 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4442 break;
4443 /* FALLTHROUGH */
4444 case INTEGER_TYPE:
e1af8299
JJ
4445 if (tree_int_cst_equal (low0,
4446 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4447 low0 = 0;
4448 break;
4449 case POINTER_TYPE:
4450 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4451 && integer_zerop (low0))
4452 low0 = 0;
4453 break;
4454 default:
4455 break;
4456 }
4457
4458 /* Canonicalize - [x, max] into - [x, -]. */
4459 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4460 switch (TREE_CODE (TREE_TYPE (high1)))
4461 {
4462 case ENUMERAL_TYPE:
4463 if (TYPE_PRECISION (TREE_TYPE (high1))
4464 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4465 break;
4466 /* FALLTHROUGH */
4467 case INTEGER_TYPE:
e1af8299
JJ
4468 if (tree_int_cst_equal (high1,
4469 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4470 high1 = 0;
4471 break;
4472 case POINTER_TYPE:
4473 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4474 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4475 high1, 1,
4476 integer_one_node, 1)))
4477 high1 = 0;
4478 break;
4479 default:
4480 break;
4481 }
4482
4483 /* The ranges might be also adjacent between the maximum and
4484 minimum values of the given type. For
4485 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4486 return + [x + 1, y - 1]. */
4487 if (low0 == 0 && high1 == 0)
4488 {
2f96b754
EB
4489 low = range_successor (high0);
4490 high = range_predecessor (low1);
e1af8299
JJ
4491 if (low == 0 || high == 0)
4492 return 0;
4493
4494 in_p = 1;
4495 }
4496 else
4497 return 0;
4498 }
ebde8a27
RK
4499 }
4500 else if (subset)
4501 in_p = 0, low = low0, high = high0;
4502 else
4503 in_p = 0, low = low0, high = high1;
4504 }
f5902869 4505
ebde8a27
RK
4506 *pin_p = in_p, *plow = low, *phigh = high;
4507 return 1;
4508}
2c486ea7
PB
4509\f
4510
4511/* Subroutine of fold, looking inside expressions of the form
2851dd68
PB
4512 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4513 of the COND_EXPR. This function is being used also to optimize
4514 A op B ? C : A, by reversing the comparison first.
2c486ea7
PB
4515
4516 Return a folded expression whose code is not a COND_EXPR
4517 anymore, or NULL_TREE if no folding opportunity is found. */
4518
4519static tree
db3927fb
AH
4520fold_cond_expr_with_comparison (location_t loc, tree type,
4521 tree arg0, tree arg1, tree arg2)
2c486ea7
PB
4522{
4523 enum tree_code comp_code = TREE_CODE (arg0);
4524 tree arg00 = TREE_OPERAND (arg0, 0);
4525 tree arg01 = TREE_OPERAND (arg0, 1);
2851dd68 4526 tree arg1_type = TREE_TYPE (arg1);
2c486ea7 4527 tree tem;
2851dd68
PB
4528
4529 STRIP_NOPS (arg1);
2c486ea7
PB
4530 STRIP_NOPS (arg2);
4531
4532 /* If we have A op 0 ? A : -A, consider applying the following
4533 transformations:
4534
4535 A == 0? A : -A same as -A
4536 A != 0? A : -A same as A
4537 A >= 0? A : -A same as abs (A)
4538 A > 0? A : -A same as abs (A)
4539 A <= 0? A : -A same as -abs (A)
4540 A < 0? A : -A same as -abs (A)
4541
4542 None of these transformations work for modes with signed
4543 zeros. If A is +/-0, the first two transformations will
4544 change the sign of the result (from +0 to -0, or vice
4545 versa). The last four will fix the sign of the result,
4546 even though the original expressions could be positive or
4547 negative, depending on the sign of A.
4548
4549 Note that all these transformations are correct if A is
4550 NaN, since the two alternatives (A and -A) are also NaNs. */
5ce0e197
UB
4551 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4552 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4553 ? real_zerop (arg01)
4554 : integer_zerop (arg01))
a10d70ba
PH
4555 && ((TREE_CODE (arg2) == NEGATE_EXPR
4556 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4557 /* In the case that A is of the form X-Y, '-A' (arg2) may
4558 have already been folded to Y-X, check for that. */
4559 || (TREE_CODE (arg1) == MINUS_EXPR
4560 && TREE_CODE (arg2) == MINUS_EXPR
4561 && operand_equal_p (TREE_OPERAND (arg1, 0),
4562 TREE_OPERAND (arg2, 1), 0)
4563 && operand_equal_p (TREE_OPERAND (arg1, 1),
4564 TREE_OPERAND (arg2, 0), 0))))
2c486ea7
PB
4565 switch (comp_code)
4566 {
4567 case EQ_EXPR:
3ae472c2 4568 case UNEQ_EXPR:
db3927fb
AH
4569 tem = fold_convert_loc (loc, arg1_type, arg1);
4570 return pedantic_non_lvalue_loc (loc,
4571 fold_convert_loc (loc, type,
4572 negate_expr (tem)));
2c486ea7 4573 case NE_EXPR:
3ae472c2 4574 case LTGT_EXPR:
db3927fb 4575 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
3ae472c2
RS
4576 case UNGE_EXPR:
4577 case UNGT_EXPR:
4578 if (flag_trapping_math)
4579 break;
4580 /* Fall through. */
2c486ea7
PB
4581 case GE_EXPR:
4582 case GT_EXPR:
2851dd68 4583 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
db3927fb 4584 arg1 = fold_convert_loc (loc, signed_type_for
2851dd68 4585 (TREE_TYPE (arg1)), arg1);
db3927fb
AH
4586 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4587 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
3ae472c2
RS
4588 case UNLE_EXPR:
4589 case UNLT_EXPR:
4590 if (flag_trapping_math)
4591 break;
2c486ea7
PB
4592 case LE_EXPR:
4593 case LT_EXPR:
2851dd68 4594 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
db3927fb 4595 arg1 = fold_convert_loc (loc, signed_type_for
2851dd68 4596 (TREE_TYPE (arg1)), arg1);
db3927fb
AH
4597 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4598 return negate_expr (fold_convert_loc (loc, type, tem));
2c486ea7 4599 default:
6615c446 4600 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
3ae472c2 4601 break;
2c486ea7
PB
4602 }
4603
4604 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4605 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4606 both transformations are correct when A is NaN: A != 0
4607 is then true, and A == 0 is false. */
4608
5ce0e197
UB
4609 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4610 && integer_zerop (arg01) && integer_zerop (arg2))
2c486ea7
PB
4611 {
4612 if (comp_code == NE_EXPR)
db3927fb 4613 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
2c486ea7 4614 else if (comp_code == EQ_EXPR)
57decb7e 4615 return build_int_cst (type, 0);
2c486ea7
PB
4616 }
4617
4618 /* Try some transformations of A op B ? A : B.
4619
4620 A == B? A : B same as B
4621 A != B? A : B same as A
4622 A >= B? A : B same as max (A, B)
4623 A > B? A : B same as max (B, A)
4624 A <= B? A : B same as min (A, B)
4625 A < B? A : B same as min (B, A)
4626
4627 As above, these transformations don't work in the presence
4628 of signed zeros. For example, if A and B are zeros of
4629 opposite sign, the first two transformations will change
4630 the sign of the result. In the last four, the original
4631 expressions give different results for (A=+0, B=-0) and
4632 (A=-0, B=+0), but the transformed expressions do not.
4633
4634 The first two transformations are correct if either A or B
4635 is a NaN. In the first transformation, the condition will
4636 be false, and B will indeed be chosen. In the case of the
4637 second transformation, the condition A != B will be true,
4638 and A will be chosen.
4639
4640 The conversions to max() and min() are not correct if B is
4641 a number and A is not. The conditions in the original
4642 expressions will be false, so all four give B. The min()
4643 and max() versions would give a NaN instead. */
5ce0e197
UB
4644 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4645 && operand_equal_for_comparison_p (arg01, arg2, arg00)
283da5df
RS
4646 /* Avoid these transformations if the COND_EXPR may be used
4647 as an lvalue in the C++ front-end. PR c++/19199. */
4648 && (in_gimple_form
6b4e9576
FJ
4649 || (strcmp (lang_hooks.name, "GNU C++") != 0
4650 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
283da5df
RS
4651 || ! maybe_lvalue_p (arg1)
4652 || ! maybe_lvalue_p (arg2)))
2c486ea7
PB
4653 {
4654 tree comp_op0 = arg00;
4655 tree comp_op1 = arg01;
4656 tree comp_type = TREE_TYPE (comp_op0);
4657
4658 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4659 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4660 {
4661 comp_type = type;
2851dd68 4662 comp_op0 = arg1;
2c486ea7
PB
4663 comp_op1 = arg2;
4664 }
4665
4666 switch (comp_code)
4667 {
4668 case EQ_EXPR:
db3927fb 4669 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
2c486ea7 4670 case NE_EXPR:
db3927fb 4671 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
2c486ea7
PB
4672 case LE_EXPR:
4673 case LT_EXPR:
3ae472c2
RS
4674 case UNLE_EXPR:
4675 case UNLT_EXPR:
2c486ea7
PB
4676 /* In C++ a ?: expression can be an lvalue, so put the
4677 operand which will be used if they are equal first
4678 so that we can convert this back to the
4679 corresponding COND_EXPR. */
2851dd68 4680 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
e9ea8bd5 4681 {
db3927fb
AH
4682 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4683 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
3ae472c2 4684 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
db3927fb
AH
4685 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4686 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4687 comp_op1, comp_op0);
4688 return pedantic_non_lvalue_loc (loc,
4689 fold_convert_loc (loc, type, tem));
e9ea8bd5 4690 }
2c486ea7
PB
4691 break;
4692 case GE_EXPR:
4693 case GT_EXPR:
3ae472c2
RS
4694 case UNGE_EXPR:
4695 case UNGT_EXPR:
2851dd68 4696 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
e9ea8bd5 4697 {
db3927fb
AH
4698 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4699 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
3ae472c2 4700 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
db3927fb
AH
4701 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4702 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4703 comp_op1, comp_op0);
4704 return pedantic_non_lvalue_loc (loc,
4705 fold_convert_loc (loc, type, tem));
e9ea8bd5 4706 }
2c486ea7 4707 break;
3ae472c2
RS
4708 case UNEQ_EXPR:
4709 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
db3927fb
AH
4710 return pedantic_non_lvalue_loc (loc,
4711 fold_convert_loc (loc, type, arg2));
3ae472c2
RS
4712 break;
4713 case LTGT_EXPR:
4714 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
db3927fb
AH
4715 return pedantic_non_lvalue_loc (loc,
4716 fold_convert_loc (loc, type, arg1));
3ae472c2 4717 break;
2c486ea7 4718 default:
6615c446 4719 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
3ae472c2 4720 break;
2c486ea7
PB
4721 }
4722 }
4723
4724 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4725 we might still be able to simplify this. For example,
4726 if C1 is one less or one more than C2, this might have started
4727 out as a MIN or MAX and been transformed by this function.
4728 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4729
4730 if (INTEGRAL_TYPE_P (type)
4731 && TREE_CODE (arg01) == INTEGER_CST
4732 && TREE_CODE (arg2) == INTEGER_CST)
4733 switch (comp_code)
4734 {
4735 case EQ_EXPR:
b9da76de
JJ
4736 if (TREE_CODE (arg1) == INTEGER_CST)
4737 break;
2c486ea7 4738 /* We can replace A with C1 in this case. */
db3927fb
AH
4739 arg1 = fold_convert_loc (loc, type, arg01);
4740 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
2c486ea7
PB
4741
4742 case LT_EXPR:
b4e4232d
JJ
4743 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4744 MIN_EXPR, to preserve the signedness of the comparison. */
2c486ea7
PB
4745 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4746 OEP_ONLY_CONST)
4747 && operand_equal_p (arg01,
4748 const_binop (PLUS_EXPR, arg2,
43a5d30b 4749 build_int_cst (type, 1)),
2c486ea7 4750 OEP_ONLY_CONST))
b4e4232d 4751 {
db3927fb
AH
4752 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4753 fold_convert_loc (loc, TREE_TYPE (arg00),
4754 arg2));
b8698a0f 4755 return pedantic_non_lvalue_loc (loc,
db3927fb 4756 fold_convert_loc (loc, type, tem));
b4e4232d 4757 }
2c486ea7
PB
4758 break;
4759
4760 case LE_EXPR:
b4e4232d
JJ
4761 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4762 as above. */
2c486ea7
PB
4763 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4764 OEP_ONLY_CONST)
4765 && operand_equal_p (arg01,
4766 const_binop (MINUS_EXPR, arg2,
43a5d30b 4767 build_int_cst (type, 1)),
2c486ea7 4768 OEP_ONLY_CONST))
b4e4232d 4769 {
db3927fb
AH
4770 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4771 fold_convert_loc (loc, TREE_TYPE (arg00),
4772 arg2));
4773 return pedantic_non_lvalue_loc (loc,
4774 fold_convert_loc (loc, type, tem));
b4e4232d 4775 }
2c486ea7
PB
4776 break;
4777
4778 case GT_EXPR:
30349c74
PB
4779 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4780 MAX_EXPR, to preserve the signedness of the comparison. */
2c486ea7
PB
4781 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4782 OEP_ONLY_CONST)
4783 && operand_equal_p (arg01,
4784 const_binop (MINUS_EXPR, arg2,
43a5d30b 4785 build_int_cst (type, 1)),
2c486ea7 4786 OEP_ONLY_CONST))
b4e4232d 4787 {
db3927fb
AH
4788 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4789 fold_convert_loc (loc, TREE_TYPE (arg00),
4790 arg2));
4791 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
b4e4232d 4792 }
2c486ea7
PB
4793 break;
4794
4795 case GE_EXPR:
30349c74 4796 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
2c486ea7
PB
4797 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4798 OEP_ONLY_CONST)
4799 && operand_equal_p (arg01,
4800 const_binop (PLUS_EXPR, arg2,
43a5d30b 4801 build_int_cst (type, 1)),
2c486ea7 4802 OEP_ONLY_CONST))
b4e4232d 4803 {
db3927fb
AH
4804 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4805 fold_convert_loc (loc, TREE_TYPE (arg00),
4806 arg2));
4807 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
b4e4232d 4808 }
2c486ea7
PB
4809 break;
4810 case NE_EXPR:
4811 break;
4812 default:
0bccc606 4813 gcc_unreachable ();
2c486ea7
PB
4814 }
4815
4816 return NULL_TREE;
4817}
4818
4819
ebde8a27 4820\f
b8610a53 4821#ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
3a4fd356 4822#define LOGICAL_OP_NON_SHORT_CIRCUIT \
7f4b6d20 4823 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
3a4fd356 4824 false) >= 2)
85e50b6b
DE
4825#endif
4826
ebde8a27
RK
4827/* EXP is some logical combination of boolean tests. See if we can
4828 merge it into some range test. Return the new tree if so. */
ef659ec0 4829
ebde8a27 4830static tree
db3927fb
AH
4831fold_range_test (location_t loc, enum tree_code code, tree type,
4832 tree op0, tree op1)
ebde8a27 4833{
e1f04615
KH
4834 int or_op = (code == TRUTH_ORIF_EXPR
4835 || code == TRUTH_OR_EXPR);
ebde8a27
RK
4836 int in0_p, in1_p, in_p;
4837 tree low0, low1, low, high0, high1, high;
6ac01510
ILT
4838 bool strict_overflow_p = false;
4839 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4840 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
ebde8a27 4841 tree tem;
6ac01510
ILT
4842 const char * const warnmsg = G_("assuming signed overflow does not occur "
4843 "when simplifying range test");
ef659ec0 4844
ebde8a27
RK
4845 /* If this is an OR operation, invert both sides; we will invert
4846 again at the end. */
4847 if (or_op)
4848 in0_p = ! in0_p, in1_p = ! in1_p;
4849
4850 /* If both expressions are the same, if we can merge the ranges, and we
80906567
RK
4851 can build the range test, return it or it inverted. If one of the
4852 ranges is always true or always false, consider it to be the same
4853 expression as the other. */
4854 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
ebde8a27
RK
4855 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4856 in1_p, low1, high1)
db3927fb 4857 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
80906567
RK
4858 lhs != 0 ? lhs
4859 : rhs != 0 ? rhs : integer_zero_node,
ebde8a27 4860 in_p, low, high))))
6ac01510
ILT
4861 {
4862 if (strict_overflow_p)
4863 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
db3927fb 4864 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6ac01510 4865 }
ebde8a27
RK
4866
4867 /* On machines where the branch cost is expensive, if this is a
4868 short-circuited branch and the underlying object on both sides
4869 is the same, make a non-short-circuit operation. */
b8610a53 4870 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
7cf5c9e1 4871 && lhs != 0 && rhs != 0
e1f04615
KH
4872 && (code == TRUTH_ANDIF_EXPR
4873 || code == TRUTH_ORIF_EXPR)
ebde8a27 4874 && operand_equal_p (lhs, rhs, 0))
ef659ec0 4875 {
f0eebf28 4876 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
9ec36da5
JL
4877 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4878 which cases we can't do this. */
ebde8a27 4879 if (simple_operand_p (lhs))
c9019218
JJ
4880 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4881 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4882 type, op0, op1);
f0eebf28 4883
5785c7de 4884 else if (lang_hooks.decls.global_bindings_p () == 0
7a6cdb44 4885 && ! CONTAINS_PLACEHOLDER_P (lhs))
ebde8a27
RK
4886 {
4887 tree common = save_expr (lhs);
4888
db3927fb 4889 if (0 != (lhs = build_range_check (loc, type, common,
ebde8a27
RK
4890 or_op ? ! in0_p : in0_p,
4891 low0, high0))
db3927fb 4892 && (0 != (rhs = build_range_check (loc, type, common,
ebde8a27
RK
4893 or_op ? ! in1_p : in1_p,
4894 low1, high1))))
6ac01510
ILT
4895 {
4896 if (strict_overflow_p)
4897 fold_overflow_warning (warnmsg,
4898 WARN_STRICT_OVERFLOW_COMPARISON);
c9019218
JJ
4899 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4900 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4901 type, lhs, rhs);
6ac01510 4902 }
ebde8a27 4903 }
ef659ec0 4904 }
de153e82 4905
de153e82 4906 return 0;
ef659ec0
TW
4907}
4908\f
02103577 4909/* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
25216284 4910 bit value. Arrange things so the extra bits will be set to zero if and
d4453ee5
RK
4911 only if C is signed-extended to its full width. If MASK is nonzero,
4912 it is an INTEGER_CST that should be AND'ed with the extra bits. */
02103577
RK
4913
4914static tree
fa8db1f7 4915unextend (tree c, int p, int unsignedp, tree mask)
02103577
RK
4916{
4917 tree type = TREE_TYPE (c);
4918 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4919 tree temp;
4920
4921 if (p == modesize || unsignedp)
4922 return c;
4923
02103577 4924 /* We work by getting just the sign bit into the low-order bit, then
9faa82d8 4925 into the high-order bit, then sign-extend. We then XOR that value
02103577 4926 with C. */
43a5d30b
AS
4927 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4928 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
cf85c69b
JW
4929
4930 /* We must use a signed type in order to get an arithmetic right shift.
4931 However, we must also avoid introducing accidental overflows, so that
b6cc0a72 4932 a subsequent call to integer_zerop will work. Hence we must
cf85c69b
JW
4933 do the type conversion here. At this point, the constant is either
4934 zero or one, and the conversion to a signed type can never overflow.
4935 We could get an overflow if this conversion is done anywhere else. */
8df83eae 4936 if (TYPE_UNSIGNED (type))
12753674 4937 temp = fold_convert (signed_type_for (type), temp);
cf85c69b 4938
43a5d30b
AS
4939 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4940 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
d4453ee5 4941 if (mask != 0)
088414c1 4942 temp = const_binop (BIT_AND_EXPR, temp,
43a5d30b 4943 fold_convert (TREE_TYPE (c), mask));
cf85c69b 4944 /* If necessary, convert the type back to match the type of C. */
8df83eae 4945 if (TYPE_UNSIGNED (type))
088414c1 4946 temp = fold_convert (type, temp);
d4453ee5 4947
43a5d30b 4948 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
02103577
RK
4949}
4950\f
27d0d96a
BS
4951/* For an expression that has the form
4952 (A && B) || ~B
4953 or
4954 (A || B) && ~B,
4955 we can drop one of the inner expressions and simplify to
4956 A || ~B
4957 or
4958 A && ~B
4959 LOC is the location of the resulting expression. OP is the inner
4960 logical operation; the left-hand side in the examples above, while CMPOP
4961 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4962 removing a condition that guards another, as in
4963 (A != NULL && A->...) || A == NULL
4964 which we must not transform. If RHS_ONLY is true, only eliminate the
4965 right-most operand of the inner logical operation. */
4966
4967static tree
4968merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4969 bool rhs_only)
4970{
4971 tree type = TREE_TYPE (cmpop);
4972 enum tree_code code = TREE_CODE (cmpop);
4973 enum tree_code truthop_code = TREE_CODE (op);
4974 tree lhs = TREE_OPERAND (op, 0);
4975 tree rhs = TREE_OPERAND (op, 1);
4976 tree orig_lhs = lhs, orig_rhs = rhs;
4977 enum tree_code rhs_code = TREE_CODE (rhs);
4978 enum tree_code lhs_code = TREE_CODE (lhs);
4979 enum tree_code inv_code;
4980
4981 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4982 return NULL_TREE;
4983
4984 if (TREE_CODE_CLASS (code) != tcc_comparison)
4985 return NULL_TREE;
4986
4987 if (rhs_code == truthop_code)
4988 {
4989 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4990 if (newrhs != NULL_TREE)
4991 {
4992 rhs = newrhs;
4993 rhs_code = TREE_CODE (rhs);
4994 }
4995 }
4996 if (lhs_code == truthop_code && !rhs_only)
4997 {
4998 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
4999 if (newlhs != NULL_TREE)
5000 {
5001 lhs = newlhs;
5002 lhs_code = TREE_CODE (lhs);
5003 }
5004 }
5005
5006 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5007 if (inv_code == rhs_code
5008 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5009 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5010 return lhs;
5011 if (!rhs_only && inv_code == lhs_code
5012 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5013 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5014 return rhs;
5015 if (rhs != orig_rhs || lhs != orig_lhs)
5016 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5017 lhs, rhs);
5018 return NULL_TREE;
5019}
5020
b2215d83
TW
5021/* Find ways of folding logical expressions of LHS and RHS:
5022 Try to merge two comparisons to the same innermost item.
5023 Look for range tests like "ch >= '0' && ch <= '9'".
5024 Look for combinations of simple terms on machines with expensive branches
5025 and evaluate the RHS unconditionally.
6d716ca8
RS
5026
5027 For example, if we have p->a == 2 && p->b == 4 and we can make an
5028 object large enough to span both A and B, we can do this with a comparison
5029 against the object ANDed with the a mask.
5030
5031 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5032 operations to do this with one comparison.
5033
5034 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5035 function and the one above.
5036
5037 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5038 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5039
5040 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5041 two operands.
5042
5043 We return the simplified tree or 0 if no optimization is possible. */
5044
5045static tree
db3927fb
AH
5046fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5047 tree lhs, tree rhs)
6d716ca8 5048{
f42ef510 5049 /* If this is the "or" of two comparisons, we can do something if
6d716ca8 5050 the comparisons are NE_EXPR. If this is the "and", we can do something
b6cc0a72 5051 if the comparisons are EQ_EXPR. I.e.,
fa8db1f7 5052 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6d716ca8
RS
5053
5054 WANTED_CODE is this operation code. For single bit fields, we can
5055 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5056 comparison for one-bit fields. */
5057
b2215d83 5058 enum tree_code wanted_code;
6d716ca8 5059 enum tree_code lcode, rcode;
b2215d83 5060 tree ll_arg, lr_arg, rl_arg, rr_arg;
6d716ca8 5061 tree ll_inner, lr_inner, rl_inner, rr_inner;
770ae6cc
RK
5062 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5063 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
45dc13b9
JJ
5064 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5065 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6d716ca8
RS
5066 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5067 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
45dc13b9 5068 enum machine_mode lnmode, rnmode;
6d716ca8 5069 tree ll_mask, lr_mask, rl_mask, rr_mask;
d4453ee5 5070 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
b2215d83 5071 tree l_const, r_const;
45dc13b9
JJ
5072 tree lntype, rntype, result;
5073 HOST_WIDE_INT first_bit, end_bit;
b2215d83 5074 int volatilep;
47392a21
MM
5075 tree orig_lhs = lhs, orig_rhs = rhs;
5076 enum tree_code orig_code = code;
6d716ca8 5077
ebde8a27
RK
5078 /* Start by getting the comparison codes. Fail if anything is volatile.
5079 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5080 it were surrounded with a NE_EXPR. */
6d716ca8 5081
ebde8a27 5082 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
b2215d83
TW
5083 return 0;
5084
6d716ca8
RS
5085 lcode = TREE_CODE (lhs);
5086 rcode = TREE_CODE (rhs);
ef659ec0 5087
96d4cf0a 5088 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
59ce6d6b 5089 {
e9ea8bd5 5090 lhs = build2 (NE_EXPR, truth_type, lhs,
57decb7e 5091 build_int_cst (TREE_TYPE (lhs), 0));
59ce6d6b
RS
5092 lcode = NE_EXPR;
5093 }
96d4cf0a
RK
5094
5095 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
59ce6d6b 5096 {
e9ea8bd5 5097 rhs = build2 (NE_EXPR, truth_type, rhs,
57decb7e 5098 build_int_cst (TREE_TYPE (rhs), 0));
59ce6d6b
RS
5099 rcode = NE_EXPR;
5100 }
96d4cf0a 5101
6615c446
JO
5102 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5103 || TREE_CODE_CLASS (rcode) != tcc_comparison)
ef659ec0
TW
5104 return 0;
5105
b2215d83
TW
5106 ll_arg = TREE_OPERAND (lhs, 0);
5107 lr_arg = TREE_OPERAND (lhs, 1);
5108 rl_arg = TREE_OPERAND (rhs, 0);
5109 rr_arg = TREE_OPERAND (rhs, 1);
b6cc0a72 5110
8dcb27ed
RS
5111 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5112 if (simple_operand_p (ll_arg)
d1a7edaf 5113 && simple_operand_p (lr_arg))
8dcb27ed 5114 {
8dcb27ed
RS
5115 if (operand_equal_p (ll_arg, rl_arg, 0)
5116 && operand_equal_p (lr_arg, rr_arg, 0))
d1a7edaf 5117 {
db3927fb 5118 result = combine_comparisons (loc, code, lcode, rcode,
d1a7edaf
PB
5119 truth_type, ll_arg, lr_arg);
5120 if (result)
5121 return result;
5122 }
8dcb27ed
RS
5123 else if (operand_equal_p (ll_arg, rr_arg, 0)
5124 && operand_equal_p (lr_arg, rl_arg, 0))
d1a7edaf 5125 {
db3927fb 5126 result = combine_comparisons (loc, code, lcode,
d1a7edaf
PB
5127 swap_tree_comparison (rcode),
5128 truth_type, ll_arg, lr_arg);
5129 if (result)
5130 return result;
5131 }
8dcb27ed
RS
5132 }
5133
d1a7edaf
PB
5134 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5135 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5136
8227896c 5137 /* If the RHS can be evaluated unconditionally and its operands are
b2215d83
TW
5138 simple, it wins to evaluate the RHS unconditionally on machines
5139 with expensive branches. In this case, this isn't a comparison
1d691c53
RK
5140 that can be merged. Avoid doing this if the RHS is a floating-point
5141 comparison since those can trap. */
b2215d83 5142
7f4b6d20 5143 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
3a4fd356 5144 false) >= 2
1d691c53 5145 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
b2215d83 5146 && simple_operand_p (rl_arg)
8227896c 5147 && simple_operand_p (rr_arg))
01c58f26
RS
5148 {
5149 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5150 if (code == TRUTH_OR_EXPR
5151 && lcode == NE_EXPR && integer_zerop (lr_arg)
5152 && rcode == NE_EXPR && integer_zerop (rr_arg)
87a72aa8
AP
5153 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5154 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
c9019218 5155 return build2_loc (loc, NE_EXPR, truth_type,
db3927fb
AH
5156 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5157 ll_arg, rl_arg),
5158 build_int_cst (TREE_TYPE (ll_arg), 0));
01c58f26
RS
5159
5160 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5161 if (code == TRUTH_AND_EXPR
5162 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5163 && rcode == EQ_EXPR && integer_zerop (rr_arg)
87a72aa8
AP
5164 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5165 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
c9019218 5166 return build2_loc (loc, EQ_EXPR, truth_type,
db3927fb
AH
5167 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5168 ll_arg, rl_arg),
5169 build_int_cst (TREE_TYPE (ll_arg), 0));
01c58f26 5170
b8610a53 5171 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
47392a21
MM
5172 {
5173 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
c9019218 5174 return build2_loc (loc, code, truth_type, lhs, rhs);
47392a21
MM
5175 return NULL_TREE;
5176 }
01c58f26 5177 }
b2215d83 5178
ef659ec0
TW
5179 /* See if the comparisons can be merged. Then get all the parameters for
5180 each side. */
5181
6d716ca8 5182 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
ef659ec0 5183 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6d716ca8
RS
5184 return 0;
5185
b2215d83 5186 volatilep = 0;
db3927fb 5187 ll_inner = decode_field_reference (loc, ll_arg,
6d716ca8 5188 &ll_bitsize, &ll_bitpos, &ll_mode,
d4453ee5
RK
5189 &ll_unsignedp, &volatilep, &ll_mask,
5190 &ll_and_mask);
db3927fb 5191 lr_inner = decode_field_reference (loc, lr_arg,
6d716ca8 5192 &lr_bitsize, &lr_bitpos, &lr_mode,
d4453ee5
RK
5193 &lr_unsignedp, &volatilep, &lr_mask,
5194 &lr_and_mask);
db3927fb 5195 rl_inner = decode_field_reference (loc, rl_arg,
6d716ca8 5196 &rl_bitsize, &rl_bitpos, &rl_mode,
d4453ee5
RK
5197 &rl_unsignedp, &volatilep, &rl_mask,
5198 &rl_and_mask);
db3927fb 5199 rr_inner = decode_field_reference (loc, rr_arg,
6d716ca8 5200 &rr_bitsize, &rr_bitpos, &rr_mode,
d4453ee5
RK
5201 &rr_unsignedp, &volatilep, &rr_mask,
5202 &rr_and_mask);
6d716ca8
RS
5203
5204 /* It must be true that the inner operation on the lhs of each
5205 comparison must be the same if we are to be able to do anything.
5206 Then see if we have constants. If not, the same must be true for
5207 the rhs's. */
5208 if (volatilep || ll_inner == 0 || rl_inner == 0
5209 || ! operand_equal_p (ll_inner, rl_inner, 0))
5210 return 0;
5211
b2215d83
TW
5212 if (TREE_CODE (lr_arg) == INTEGER_CST
5213 && TREE_CODE (rr_arg) == INTEGER_CST)
5214 l_const = lr_arg, r_const = rr_arg;
6d716ca8
RS
5215 else if (lr_inner == 0 || rr_inner == 0
5216 || ! operand_equal_p (lr_inner, rr_inner, 0))
5217 return 0;
b2215d83
TW
5218 else
5219 l_const = r_const = 0;
6d716ca8
RS
5220
5221 /* If either comparison code is not correct for our logical operation,
5222 fail. However, we can convert a one-bit comparison against zero into
5223 the opposite comparison against that bit being set in the field. */
b2215d83 5224
9c0ae98b 5225 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6d716ca8
RS
5226 if (lcode != wanted_code)
5227 {
5228 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5a6b3365 5229 {
2bd21a02
AS
5230 /* Make the left operand unsigned, since we are only interested
5231 in the value of one bit. Otherwise we are doing the wrong
5232 thing below. */
5233 ll_unsignedp = 1;
71a874cd 5234 l_const = ll_mask;
5a6b3365 5235 }
6d716ca8
RS
5236 else
5237 return 0;
5238 }
5239
71a874cd 5240 /* This is analogous to the code for l_const above. */
6d716ca8
RS
5241 if (rcode != wanted_code)
5242 {
5243 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5a6b3365 5244 {
2bd21a02 5245 rl_unsignedp = 1;
71a874cd 5246 r_const = rl_mask;
5a6b3365 5247 }
6d716ca8
RS
5248 else
5249 return 0;
5250 }
5251
5252 /* See if we can find a mode that contains both fields being compared on
5253 the left. If we can't, fail. Otherwise, update all constants and masks
5254 to be relative to a field of that size. */
5255 first_bit = MIN (ll_bitpos, rl_bitpos);
5256 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5257 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5258 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5259 volatilep);
5260 if (lnmode == VOIDmode)
5261 return 0;
5262
5263 lnbitsize = GET_MODE_BITSIZE (lnmode);
5264 lnbitpos = first_bit & ~ (lnbitsize - 1);
5785c7de 5265 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6d716ca8
RS
5266 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5267
f76b9db2
ILT
5268 if (BYTES_BIG_ENDIAN)
5269 {
5270 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5271 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5272 }
6d716ca8 5273
db3927fb 5274 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
43a5d30b 5275 size_int (xll_bitpos));
db3927fb 5276 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
43a5d30b 5277 size_int (xrl_bitpos));
6d716ca8 5278
6d716ca8
RS
5279 if (l_const)
5280 {
db3927fb 5281 l_const = fold_convert_loc (loc, lntype, l_const);
b6cc0a72 5282 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
43a5d30b 5283 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
02103577 5284 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
db3927fb 5285 fold_build1_loc (loc, BIT_NOT_EXPR,
43a5d30b 5286 lntype, ll_mask))))
02103577 5287 {
d4ee4d25 5288 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
b6cc0a72 5289
1b0f3e79 5290 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
02103577 5291 }
6d716ca8
RS
5292 }
5293 if (r_const)
5294 {
db3927fb 5295 r_const = fold_convert_loc (loc, lntype, r_const);
d4453ee5 5296 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
43a5d30b 5297 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
02103577 5298 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
db3927fb 5299 fold_build1_loc (loc, BIT_NOT_EXPR,
43a5d30b 5300 lntype, rl_mask))))
02103577 5301 {
d4ee4d25 5302 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
ab87f8c8 5303
1b0f3e79 5304 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
02103577 5305 }
6d716ca8
RS
5306 }
5307
45dc13b9
JJ
5308 /* If the right sides are not constant, do the same for it. Also,
5309 disallow this optimization if a size or signedness mismatch occurs
5310 between the left and right sides. */
5311 if (l_const == 0)
5312 {
5313 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5314 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5315 /* Make sure the two fields on the right
5316 correspond to the left without being swapped. */
5317 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5318 return 0;
5319
5320 first_bit = MIN (lr_bitpos, rr_bitpos);
5321 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5322 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5323 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5324 volatilep);
5325 if (rnmode == VOIDmode)
5326 return 0;
5327
5328 rnbitsize = GET_MODE_BITSIZE (rnmode);
5329 rnbitpos = first_bit & ~ (rnbitsize - 1);
5330 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5331 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5332
5333 if (BYTES_BIG_ENDIAN)
5334 {
5335 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5336 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5337 }
5338
db3927fb
AH
5339 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5340 rntype, lr_mask),
43a5d30b 5341 size_int (xlr_bitpos));
db3927fb
AH
5342 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5343 rntype, rr_mask),
43a5d30b 5344 size_int (xrr_bitpos));
45dc13b9
JJ
5345
5346 /* Make a mask that corresponds to both fields being compared.
5347 Do this for both items being compared. If the operands are the
5348 same size and the bits being compared are in the same position
5349 then we can do this by masking both and comparing the masked
5350 results. */
43a5d30b
AS
5351 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5352 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
45dc13b9
JJ
5353 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5354 {
db3927fb 5355 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
45dc13b9
JJ
5356 ll_unsignedp || rl_unsignedp);
5357 if (! all_ones_mask_p (ll_mask, lnbitsize))
5358 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5359
db3927fb 5360 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
45dc13b9
JJ
5361 lr_unsignedp || rr_unsignedp);
5362 if (! all_ones_mask_p (lr_mask, rnbitsize))
5363 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5364
c9019218 5365 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
45dc13b9
JJ
5366 }
5367
5368 /* There is still another way we can do something: If both pairs of
5369 fields being compared are adjacent, we may be able to make a wider
5370 field containing them both.
5371
5372 Note that we still must mask the lhs/rhs expressions. Furthermore,
5373 the mask must be shifted to account for the shift done by
5374 make_bit_field_ref. */
5375 if ((ll_bitsize + ll_bitpos == rl_bitpos
5376 && lr_bitsize + lr_bitpos == rr_bitpos)
5377 || (ll_bitpos == rl_bitpos + rl_bitsize
5378 && lr_bitpos == rr_bitpos + rr_bitsize))
5379 {
5380 tree type;
5381
db3927fb
AH
5382 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5383 ll_bitsize + rl_bitsize,
45dc13b9 5384 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
db3927fb
AH
5385 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5386 lr_bitsize + rr_bitsize,
45dc13b9
JJ
5387 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5388
5389 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
43a5d30b 5390 size_int (MIN (xll_bitpos, xrl_bitpos)));
45dc13b9 5391 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
43a5d30b 5392 size_int (MIN (xlr_bitpos, xrr_bitpos)));
45dc13b9
JJ
5393
5394 /* Convert to the smaller type before masking out unwanted bits. */
5395 type = lntype;
5396 if (lntype != rntype)
5397 {
5398 if (lnbitsize > rnbitsize)
5399 {
db3927fb
AH
5400 lhs = fold_convert_loc (loc, rntype, lhs);
5401 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
45dc13b9
JJ
5402 type = rntype;
5403 }
5404 else if (lnbitsize < rnbitsize)
5405 {
db3927fb
AH
5406 rhs = fold_convert_loc (loc, lntype, rhs);
5407 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
45dc13b9
JJ
5408 type = lntype;
5409 }
5410 }
5411
5412 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5413 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5414
5415 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5416 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5417
c9019218 5418 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
45dc13b9
JJ
5419 }
5420
5421 return 0;
5422 }
5423
6d716ca8
RS
5424 /* Handle the case of comparisons with constants. If there is something in
5425 common between the masks, those bits of the constants must be the same.
5426 If not, the condition is always false. Test for this to avoid generating
5427 incorrect code below. */
43a5d30b 5428 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6d716ca8 5429 if (! integer_zerop (result)
43a5d30b
AS
5430 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5431 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6d716ca8
RS
5432 {
5433 if (wanted_code == NE_EXPR)
5434 {
d4ee4d25 5435 warning (0, "%<or%> of unmatched not-equal tests is always 1");
1b0f3e79 5436 return constant_boolean_node (true, truth_type);
6d716ca8
RS
5437 }
5438 else
5439 {
d4ee4d25 5440 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
1b0f3e79 5441 return constant_boolean_node (false, truth_type);
6d716ca8
RS
5442 }
5443 }
5444
45dc13b9
JJ
5445 /* Construct the expression we will return. First get the component
5446 reference we will make. Unless the mask is all ones the width of
5447 that field, perform the mask operation. Then compare with the
5448 merged constant. */
db3927fb 5449 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
45dc13b9
JJ
5450 ll_unsignedp || rl_unsignedp);
5451
43a5d30b 5452 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
45dc13b9 5453 if (! all_ones_mask_p (ll_mask, lnbitsize))
c9019218 5454 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
45dc13b9 5455
c9019218
JJ
5456 return build2_loc (loc, wanted_code, truth_type, result,
5457 const_binop (BIT_IOR_EXPR, l_const, r_const));
6d716ca8
RS
5458}
5459\f
b6cc0a72 5460/* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
14a774a9
RK
5461 constant. */
5462
5463static tree
db3927fb
AH
5464optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5465 tree op0, tree op1)
14a774a9 5466{
d7e5b287 5467 tree arg0 = op0;
14a774a9 5468 enum tree_code op_code;
c071e8bc 5469 tree comp_const;
14a774a9
RK
5470 tree minmax_const;
5471 int consts_equal, consts_lt;
5472 tree inner;
5473
5474 STRIP_SIGN_NOPS (arg0);
5475
5476 op_code = TREE_CODE (arg0);
5477 minmax_const = TREE_OPERAND (arg0, 1);
db3927fb 5478 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
14a774a9
RK
5479 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5480 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5481 inner = TREE_OPERAND (arg0, 0);
5482
5483 /* If something does not permit us to optimize, return the original tree. */
5484 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5485 || TREE_CODE (comp_const) != INTEGER_CST
455f14dd 5486 || TREE_OVERFLOW (comp_const)
14a774a9 5487 || TREE_CODE (minmax_const) != INTEGER_CST
455f14dd 5488 || TREE_OVERFLOW (minmax_const))
d7e5b287 5489 return NULL_TREE;
14a774a9
RK
5490
5491 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5492 and GT_EXPR, doing the rest with recursive calls using logical
5493 simplifications. */
d7e5b287 5494 switch (code)
14a774a9
RK
5495 {
5496 case NE_EXPR: case LT_EXPR: case LE_EXPR:
d7e5b287 5497 {
db3927fb
AH
5498 tree tem
5499 = optimize_minmax_comparison (loc,
5500 invert_tree_comparison (code, false),
5501 type, op0, op1);
d817ed3b 5502 if (tem)
db3927fb 5503 return invert_truthvalue_loc (loc, tem);
d817ed3b 5504 return NULL_TREE;
d7e5b287 5505 }
14a774a9
RK
5506
5507 case GE_EXPR:
5508 return
db3927fb 5509 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
7f20a5b7 5510 optimize_minmax_comparison
db3927fb 5511 (loc, EQ_EXPR, type, arg0, comp_const),
7f20a5b7 5512 optimize_minmax_comparison
db3927fb 5513 (loc, GT_EXPR, type, arg0, comp_const));
14a774a9
RK
5514
5515 case EQ_EXPR:
5516 if (op_code == MAX_EXPR && consts_equal)
5517 /* MAX (X, 0) == 0 -> X <= 0 */
db3927fb 5518 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
14a774a9
RK
5519
5520 else if (op_code == MAX_EXPR && consts_lt)
5521 /* MAX (X, 0) == 5 -> X == 5 */
db3927fb 5522 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
14a774a9
RK
5523
5524 else if (op_code == MAX_EXPR)
5525 /* MAX (X, 0) == -1 -> false */
db3927fb 5526 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
14a774a9
RK
5527
5528 else if (consts_equal)
5529 /* MIN (X, 0) == 0 -> X >= 0 */
db3927fb 5530 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
14a774a9
RK
5531
5532 else if (consts_lt)
5533 /* MIN (X, 0) == 5 -> false */
db3927fb 5534 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
14a774a9
RK
5535
5536 else
5537 /* MIN (X, 0) == -1 -> X == -1 */
db3927fb 5538 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
14a774a9
RK
5539
5540 case GT_EXPR:
5541 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5542 /* MAX (X, 0) > 0 -> X > 0
5543 MAX (X, 0) > 5 -> X > 5 */
db3927fb 5544 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
14a774a9
RK
5545
5546 else if (op_code == MAX_EXPR)
5547 /* MAX (X, 0) > -1 -> true */
db3927fb 5548 return omit_one_operand_loc (loc, type, integer_one_node, inner);
14a774a9
RK
5549
5550 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5551 /* MIN (X, 0) > 0 -> false
5552 MIN (X, 0) > 5 -> false */
db3927fb 5553 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
14a774a9
RK
5554
5555 else
5556 /* MIN (X, 0) > -1 -> X > -1 */
db3927fb 5557 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
14a774a9
RK
5558
5559 default:
d7e5b287 5560 return NULL_TREE;
14a774a9
RK
5561 }
5562}
5563\f
1baa375f
RK
5564/* T is an integer expression that is being multiplied, divided, or taken a
5565 modulus (CODE says which and what kind of divide or modulus) by a
5566 constant C. See if we can eliminate that operation by folding it with
5567 other operations already in T. WIDE_TYPE, if non-null, is a type that
5568 should be used for the computation if wider than our type.
5569
cff27795
EB
5570 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5571 (X * 2) + (Y * 4). We must, however, be assured that either the original
8e1ca098
RH
5572 expression would not overflow or that overflow is undefined for the type
5573 in the language in question.
5574
1baa375f 5575 If we return a non-null expression, it is an equivalent form of the
6ac01510
ILT
5576 original computation, but need not be in the original type.
5577
5578 We set *STRICT_OVERFLOW_P to true if the return values depends on
5579 signed overflow being undefined. Otherwise we do not change
5580 *STRICT_OVERFLOW_P. */
1baa375f
RK
5581
5582static tree
6ac01510
ILT
5583extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5584 bool *strict_overflow_p)
cdd4b0d4
AB
5585{
5586 /* To avoid exponential search depth, refuse to allow recursion past
5587 three levels. Beyond that (1) it's highly unlikely that we'll find
5588 something interesting and (2) we've probably processed it before
5589 when we built the inner expression. */
5590
5591 static int depth;
5592 tree ret;
5593
5594 if (depth > 3)
5595 return NULL;
5596
5597 depth++;
6ac01510 5598 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
cdd4b0d4
AB
5599 depth--;
5600
5601 return ret;
5602}
5603
5604static tree
6ac01510
ILT
5605extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5606 bool *strict_overflow_p)
1baa375f
RK
5607{
5608 tree type = TREE_TYPE (t);
5609 enum tree_code tcode = TREE_CODE (t);
b6cc0a72 5610 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
1baa375f
RK
5611 > GET_MODE_SIZE (TYPE_MODE (type)))
5612 ? wide_type : type);
5613 tree t1, t2;
5614 int same_p = tcode == code;
9d0878fd 5615 tree op0 = NULL_TREE, op1 = NULL_TREE;
6ac01510 5616 bool sub_strict_overflow_p;
1baa375f
RK
5617
5618 /* Don't deal with constants of zero here; they confuse the code below. */
5619 if (integer_zerop (c))
8e1ca098 5620 return NULL_TREE;
1baa375f 5621
6615c446 5622 if (TREE_CODE_CLASS (tcode) == tcc_unary)
1baa375f
RK
5623 op0 = TREE_OPERAND (t, 0);
5624
6615c446 5625 if (TREE_CODE_CLASS (tcode) == tcc_binary)
1baa375f
RK
5626 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5627
5628 /* Note that we need not handle conditional operations here since fold
5629 already handles those cases. So just do arithmetic here. */
5630 switch (tcode)
5631 {
5632 case INTEGER_CST:
5633 /* For a constant, we can always simplify if we are a multiply
5634 or (for divide and modulus) if it is a multiple of our constant. */
5635 if (code == MULT_EXPR
43a5d30b 5636 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
088414c1 5637 return const_binop (code, fold_convert (ctype, t),
43a5d30b 5638 fold_convert (ctype, c));
1baa375f
RK
5639 break;
5640
1043771b 5641 CASE_CONVERT: case NON_LVALUE_EXPR:
43e4a9d8 5642 /* If op0 is an expression ... */
6615c446
JO
5643 if ((COMPARISON_CLASS_P (op0)
5644 || UNARY_CLASS_P (op0)
5645 || BINARY_CLASS_P (op0)
5039610b 5646 || VL_EXP_CLASS_P (op0)
6615c446 5647 || EXPRESSION_CLASS_P (op0))
fcb4587e
RG
5648 /* ... and has wrapping overflow, and its type is smaller
5649 than ctype, then we cannot pass through as widening. */
5650 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
43e4a9d8
EB
5651 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5652 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
fcb4587e
RG
5653 && (TYPE_PRECISION (ctype)
5654 > TYPE_PRECISION (TREE_TYPE (op0))))
a0fac73d
RS
5655 /* ... or this is a truncation (t is narrower than op0),
5656 then we cannot pass through this narrowing. */
fcb4587e
RG
5657 || (TYPE_PRECISION (type)
5658 < TYPE_PRECISION (TREE_TYPE (op0)))
068d2c9d
MM
5659 /* ... or signedness changes for division or modulus,
5660 then we cannot pass through this conversion. */
5661 || (code != MULT_EXPR
8df83eae 5662 && (TYPE_UNSIGNED (ctype)
ac029795
RG
5663 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5664 /* ... or has undefined overflow while the converted to
5665 type has not, we cannot do the operation in the inner type
5666 as that would introduce undefined overflow. */
5667 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5668 && !TYPE_OVERFLOW_UNDEFINED (type))))
eff9c80d
RH
5669 break;
5670
1baa375f 5671 /* Pass the constant down and see if we can make a simplification. If
59adecfa
RK
5672 we can, replace this expression with the inner simplification for
5673 possible later conversion to our or some other type. */
088414c1 5674 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
3cd58fd7 5675 && TREE_CODE (t2) == INTEGER_CST
455f14dd 5676 && !TREE_OVERFLOW (t2)
3cd58fd7
OH
5677 && (0 != (t1 = extract_muldiv (op0, t2, code,
5678 code == MULT_EXPR
6ac01510
ILT
5679 ? ctype : NULL_TREE,
5680 strict_overflow_p))))
1baa375f
RK
5681 return t1;
5682 break;
5683
47d42ce2
JJ
5684 case ABS_EXPR:
5685 /* If widening the type changes it from signed to unsigned, then we
5686 must avoid building ABS_EXPR itself as unsigned. */
5687 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5688 {
12753674 5689 tree cstype = (*signed_type_for) (ctype);
6ac01510
ILT
5690 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5691 != 0)
47d42ce2 5692 {
7f20a5b7 5693 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
47d42ce2
JJ
5694 return fold_convert (ctype, t1);
5695 }
5696 break;
5697 }
a0857153
RG
5698 /* If the constant is negative, we cannot simplify this. */
5699 if (tree_int_cst_sgn (c) == -1)
5700 break;
47d42ce2
JJ
5701 /* FALLTHROUGH */
5702 case NEGATE_EXPR:
6ac01510
ILT
5703 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5704 != 0)
7f20a5b7 5705 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
1baa375f
RK
5706 break;
5707
5708 case MIN_EXPR: case MAX_EXPR:
13393c8a
JW
5709 /* If widening the type changes the signedness, then we can't perform
5710 this optimization as that changes the result. */
8df83eae 5711 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
13393c8a
JW
5712 break;
5713
1baa375f 5714 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6ac01510
ILT
5715 sub_strict_overflow_p = false;
5716 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5717 &sub_strict_overflow_p)) != 0
5718 && (t2 = extract_muldiv (op1, c, code, wide_type,
5719 &sub_strict_overflow_p)) != 0)
59adecfa
RK
5720 {
5721 if (tree_int_cst_sgn (c) < 0)
5722 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6ac01510
ILT
5723 if (sub_strict_overflow_p)
5724 *strict_overflow_p = true;
7f20a5b7
KH
5725 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5726 fold_convert (ctype, t2));
59adecfa 5727 }
1baa375f
RK
5728 break;
5729
1baa375f
RK
5730 case LSHIFT_EXPR: case RSHIFT_EXPR:
5731 /* If the second operand is constant, this is a multiplication
5732 or floor division, by a power of two, so we can treat it that
9e629a80
JM
5733 way unless the multiplier or divisor overflows. Signed
5734 left-shift overflow is implementation-defined rather than
5735 undefined in C90, so do not convert signed left shift into
5736 multiplication. */
1baa375f 5737 if (TREE_CODE (op1) == INTEGER_CST
9e629a80 5738 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
d08230fe
NC
5739 /* const_binop may not detect overflow correctly,
5740 so check for it explicitly here. */
5741 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5742 && TREE_INT_CST_HIGH (op1) == 0
088414c1
RS
5743 && 0 != (t1 = fold_convert (ctype,
5744 const_binop (LSHIFT_EXPR,
5745 size_one_node,
43a5d30b 5746 op1)))
455f14dd 5747 && !TREE_OVERFLOW (t1))
59ce6d6b
RS
5748 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5749 ? MULT_EXPR : FLOOR_DIV_EXPR,
db3927fb
AH
5750 ctype,
5751 fold_convert (ctype, op0),
5752 t1),
6ac01510 5753 c, code, wide_type, strict_overflow_p);
1baa375f
RK
5754 break;
5755
5756 case PLUS_EXPR: case MINUS_EXPR:
5757 /* See if we can eliminate the operation on both sides. If we can, we
5758 can return a new PLUS or MINUS. If we can't, the only remaining
5759 cases where we can do anything are if the second operand is a
5760 constant. */
6ac01510
ILT
5761 sub_strict_overflow_p = false;
5762 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5763 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
fba2c0cd
JJ
5764 if (t1 != 0 && t2 != 0
5765 && (code == MULT_EXPR
b77f3744
CE
5766 /* If not multiplication, we can only do this if both operands
5767 are divisible by c. */
5768 || (multiple_of_p (ctype, op0, c)
5769 && multiple_of_p (ctype, op1, c))))
6ac01510
ILT
5770 {
5771 if (sub_strict_overflow_p)
5772 *strict_overflow_p = true;
5773 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5774 fold_convert (ctype, t2));
5775 }
1baa375f 5776
59adecfa
RK
5777 /* If this was a subtraction, negate OP1 and set it to be an addition.
5778 This simplifies the logic below. */
5779 if (tcode == MINUS_EXPR)
ffaf6f25
EB
5780 {
5781 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5782 /* If OP1 was not easily negatable, the constant may be OP0. */
5783 if (TREE_CODE (op0) == INTEGER_CST)
5784 {
5785 tree tem = op0;
5786 op0 = op1;
5787 op1 = tem;
5788 tem = t1;
5789 t1 = t2;
5790 t2 = tem;
5791 }
5792 }
59adecfa 5793
f9011d04
RK
5794 if (TREE_CODE (op1) != INTEGER_CST)
5795 break;
5796
59adecfa
RK
5797 /* If either OP1 or C are negative, this optimization is not safe for
5798 some of the division and remainder types while for others we need
5799 to change the code. */
5800 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5801 {
5802 if (code == CEIL_DIV_EXPR)
5803 code = FLOOR_DIV_EXPR;
59adecfa
RK
5804 else if (code == FLOOR_DIV_EXPR)
5805 code = CEIL_DIV_EXPR;
0629440f
RK
5806 else if (code != MULT_EXPR
5807 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
59adecfa
RK
5808 break;
5809 }
5810
12644a9a
TM
5811 /* If it's a multiply or a division/modulus operation of a multiple
5812 of our constant, do the operation and verify it doesn't overflow. */
5813 if (code == MULT_EXPR
43a5d30b 5814 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
dd3f0101 5815 {
088414c1 5816 op1 = const_binop (code, fold_convert (ctype, op1),
43a5d30b 5817 fold_convert (ctype, c));
41ba7ed7
RS
5818 /* We allow the constant to overflow with wrapping semantics. */
5819 if (op1 == 0
eeef0e45 5820 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
dd3f0101
KH
5821 break;
5822 }
12644a9a 5823 else
dd3f0101 5824 break;
59adecfa 5825
23cdce68
RH
5826 /* If we have an unsigned type is not a sizetype, we cannot widen
5827 the operation since it will change the result if the original
5828 computation overflowed. */
8df83eae 5829 if (TYPE_UNSIGNED (ctype)
7393c642 5830 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
23cdce68
RH
5831 && ctype != type)
5832 break;
5833
1baa375f 5834 /* If we were able to eliminate our operation from the first side,
59adecfa
RK
5835 apply our operation to the second side and reform the PLUS. */
5836 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
7f20a5b7 5837 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
1baa375f
RK
5838
5839 /* The last case is if we are a multiply. In that case, we can
5840 apply the distributive law to commute the multiply and addition
30f7a378 5841 if the multiplication of the constants doesn't overflow. */
59adecfa 5842 if (code == MULT_EXPR)
7f20a5b7
KH
5843 return fold_build2 (tcode, ctype,
5844 fold_build2 (code, ctype,
5845 fold_convert (ctype, op0),
5846 fold_convert (ctype, c)),
5847 op1);
1baa375f
RK
5848
5849 break;
5850
5851 case MULT_EXPR:
5852 /* We have a special case here if we are doing something like
5853 (C * 8) % 4 since we know that's zero. */
5854 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5855 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
beeab17c
RG
5856 /* If the multiplication can overflow we cannot optimize this.
5857 ??? Until we can properly mark individual operations as
5858 not overflowing we need to treat sizetype special here as
5859 stor-layout relies on this opimization to make
5860 DECL_FIELD_BIT_OFFSET always a constant. */
5861 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5862 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5863 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
1baa375f 5864 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
43a5d30b 5865 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
beeab17c
RG
5866 {
5867 *strict_overflow_p = true;
5868 return omit_one_operand (type, integer_zero_node, op0);
5869 }
1baa375f 5870
30f7a378 5871 /* ... fall through ... */
1baa375f
RK
5872
5873 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5874 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5875 /* If we can extract our operation from the LHS, do so and return a
5876 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5877 do something only if the second operand is a constant. */
5878 if (same_p
6ac01510
ILT
5879 && (t1 = extract_muldiv (op0, c, code, wide_type,
5880 strict_overflow_p)) != 0)
7f20a5b7
KH
5881 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5882 fold_convert (ctype, op1));
1baa375f 5883 else if (tcode == MULT_EXPR && code == MULT_EXPR
6ac01510
ILT
5884 && (t1 = extract_muldiv (op1, c, code, wide_type,
5885 strict_overflow_p)) != 0)
7f20a5b7
KH
5886 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5887 fold_convert (ctype, t1));
1baa375f
RK
5888 else if (TREE_CODE (op1) != INTEGER_CST)
5889 return 0;
5890
5891 /* If these are the same operation types, we can associate them
5892 assuming no overflow. */
5893 if (tcode == code
db3927fb
AH
5894 && 0 != (t1 = int_const_binop (MULT_EXPR,
5895 fold_convert (ctype, op1),
81ad578e 5896 fold_convert (ctype, c), 1))
9589f23e 5897 && 0 != (t1 = force_fit_type_double (ctype, tree_to_double_int (t1),
81ad578e
RG
5898 (TYPE_UNSIGNED (ctype)
5899 && tcode != MULT_EXPR) ? -1 : 1,
5900 TREE_OVERFLOW (t1)))
455f14dd 5901 && !TREE_OVERFLOW (t1))
7f20a5b7 5902 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
1baa375f
RK
5903
5904 /* If these operations "cancel" each other, we have the main
5905 optimizations of this pass, which occur when either constant is a
5906 multiple of the other, in which case we replace this with either an
b6cc0a72 5907 operation or CODE or TCODE.
8e1ca098 5908
f5143c46 5909 If we have an unsigned type that is not a sizetype, we cannot do
8e1ca098
RH
5910 this since it will change the result if the original computation
5911 overflowed. */
eeef0e45 5912 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
7393c642 5913 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
8e1ca098
RH
5914 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5915 || (tcode == MULT_EXPR
5916 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
e6ebd07f
ZD
5917 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5918 && code != MULT_EXPR)))
1baa375f 5919 {
43a5d30b 5920 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6ac01510
ILT
5921 {
5922 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5923 *strict_overflow_p = true;
5924 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5925 fold_convert (ctype,
5926 const_binop (TRUNC_DIV_EXPR,
43a5d30b 5927 op1, c)));
6ac01510 5928 }
43a5d30b 5929 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6ac01510
ILT
5930 {
5931 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5932 *strict_overflow_p = true;
5933 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5934 fold_convert (ctype,
5935 const_binop (TRUNC_DIV_EXPR,
43a5d30b 5936 c, op1)));
6ac01510 5937 }
1baa375f
RK
5938 }
5939 break;
5940
5941 default:
5942 break;
5943 }
5944
5945 return 0;
5946}
5947\f
f628873f
MM
5948/* Return a node which has the indicated constant VALUE (either 0 or
5949 1), and is of the indicated TYPE. */
5950
e9ea8bd5 5951tree
fa8db1f7 5952constant_boolean_node (int value, tree type)
f628873f
MM
5953{
5954 if (type == integer_type_node)
5955 return value ? integer_one_node : integer_zero_node;
9bb80bb2
RS
5956 else if (type == boolean_type_node)
5957 return value ? boolean_true_node : boolean_false_node;
b6cc0a72 5958 else
7d60be94 5959 return build_int_cst (type, value);
f628873f
MM
5960}
5961
020d90ee 5962
1f77b5da 5963/* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
68626d4f
MM
5964 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5965 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
cc2902df 5966 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
68626d4f
MM
5967 COND is the first argument to CODE; otherwise (as in the example
5968 given here), it is the second argument. TYPE is the type of the
2b8a92de 5969 original expression. Return NULL_TREE if no simplification is
b3e65ebb 5970 possible. */
68626d4f
MM
5971
5972static tree
db3927fb
AH
5973fold_binary_op_with_conditional_arg (location_t loc,
5974 enum tree_code code,
e9da788c
KH
5975 tree type, tree op0, tree op1,
5976 tree cond, tree arg, int cond_first_p)
68626d4f 5977{
e9da788c 5978 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
92db3ec9 5979 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
68626d4f
MM
5980 tree test, true_value, false_value;
5981 tree lhs = NULL_TREE;
5982 tree rhs = NULL_TREE;
b3e65ebb 5983
68626d4f
MM
5984 if (TREE_CODE (cond) == COND_EXPR)
5985 {
5986 test = TREE_OPERAND (cond, 0);
5987 true_value = TREE_OPERAND (cond, 1);
5988 false_value = TREE_OPERAND (cond, 2);
5989 /* If this operand throws an expression, then it does not make
5990 sense to try to perform a logical or arithmetic operation
f4085d4c 5991 involving it. */
68626d4f 5992 if (VOID_TYPE_P (TREE_TYPE (true_value)))
f4085d4c 5993 lhs = true_value;
68626d4f 5994 if (VOID_TYPE_P (TREE_TYPE (false_value)))
f4085d4c 5995 rhs = false_value;
68626d4f
MM
5996 }
5997 else
5998 {
5999 tree testtype = TREE_TYPE (cond);
6000 test = cond;
1b0f3e79
RS
6001 true_value = constant_boolean_node (true, testtype);
6002 false_value = constant_boolean_node (false, testtype);
68626d4f 6003 }
dd3f0101 6004
9e9ef331
EB
6005 /* This transformation is only worthwhile if we don't have to wrap ARG
6006 in a SAVE_EXPR and the operation can be simplified on at least one
6007 of the branches once its pushed inside the COND_EXPR. */
6008 if (!TREE_CONSTANT (arg)
6009 && (TREE_SIDE_EFFECTS (arg)
6010 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6011 return NULL_TREE;
6012
db3927fb 6013 arg = fold_convert_loc (loc, arg_type, arg);
68626d4f 6014 if (lhs == 0)
3b70b82a 6015 {
db3927fb 6016 true_value = fold_convert_loc (loc, cond_type, true_value);
6405f32f 6017 if (cond_first_p)
db3927fb 6018 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6405f32f 6019 else
db3927fb 6020 lhs = fold_build2_loc (loc, code, type, arg, true_value);
3b70b82a 6021 }
68626d4f 6022 if (rhs == 0)
3b70b82a 6023 {
db3927fb 6024 false_value = fold_convert_loc (loc, cond_type, false_value);
6405f32f 6025 if (cond_first_p)
db3927fb 6026 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6405f32f 6027 else
db3927fb 6028 rhs = fold_build2_loc (loc, code, type, arg, false_value);
3b70b82a 6029 }
f4085d4c 6030
9e9ef331
EB
6031 /* Check that we have simplified at least one of the branches. */
6032 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6033 return NULL_TREE;
6034
6035 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
68626d4f
MM
6036}
6037
ab87f8c8 6038\f
71925bc0
RS
6039/* Subroutine of fold() that checks for the addition of +/- 0.0.
6040
6041 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6042 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6043 ADDEND is the same as X.
6044
cc2902df 6045 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
71925bc0
RS
6046 and finite. The problematic cases are when X is zero, and its mode
6047 has signed zeros. In the case of rounding towards -infinity,
6048 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6049 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6050
2dc0f633 6051bool
ac545c64 6052fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
71925bc0
RS
6053{
6054 if (!real_zerop (addend))
6055 return false;
6056
3bc400cd
RS
6057 /* Don't allow the fold with -fsignaling-nans. */
6058 if (HONOR_SNANS (TYPE_MODE (type)))
6059 return false;
6060
71925bc0
RS
6061 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6062 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6063 return true;
6064
6065 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6066 if (TREE_CODE (addend) == REAL_CST
6067 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6068 negate = !negate;
6069
6070 /* The mode has signed zeros, and we have to honor their sign.
6071 In this situation, there is only one case we can return true for.
6072 X - 0 is the same as X unless rounding towards -infinity is
6073 supported. */
6074 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6075}
6076
c876997f
RS
6077/* Subroutine of fold() that checks comparisons of built-in math
6078 functions against real constants.
6079
6080 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6081 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6082 is the type of the result and ARG0 and ARG1 are the operands of the
6083 comparison. ARG1 must be a TREE_REAL_CST.
6084
6085 The function returns the constant folded tree if a simplification
6086 can be made, and NULL_TREE otherwise. */
6087
6088static tree
db3927fb
AH
6089fold_mathfn_compare (location_t loc,
6090 enum built_in_function fcode, enum tree_code code,
75040a04 6091 tree type, tree arg0, tree arg1)
c876997f
RS
6092{
6093 REAL_VALUE_TYPE c;
6094
82b4201f 6095 if (BUILTIN_SQRT_P (fcode))
c876997f 6096 {
5039610b 6097 tree arg = CALL_EXPR_ARG (arg0, 0);
c876997f
RS
6098 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6099
6100 c = TREE_REAL_CST (arg1);
6101 if (REAL_VALUE_NEGATIVE (c))
6102 {
6103 /* sqrt(x) < y is always false, if y is negative. */
6104 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
db3927fb 6105 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
c876997f
RS
6106
6107 /* sqrt(x) > y is always true, if y is negative and we
6108 don't care about NaNs, i.e. negative values of x. */
6109 if (code == NE_EXPR || !HONOR_NANS (mode))
db3927fb 6110 return omit_one_operand_loc (loc, type, integer_one_node, arg);
c876997f
RS
6111
6112 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
db3927fb 6113 return fold_build2_loc (loc, GE_EXPR, type, arg,
7f20a5b7 6114 build_real (TREE_TYPE (arg), dconst0));
c876997f
RS
6115 }
6116 else if (code == GT_EXPR || code == GE_EXPR)
6117 {
6118 REAL_VALUE_TYPE c2;
6119
6120 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6121 real_convert (&c2, mode, &c2);
6122
6123 if (REAL_VALUE_ISINF (c2))
6124 {
6125 /* sqrt(x) > y is x == +Inf, when y is very large. */
6126 if (HONOR_INFINITIES (mode))
db3927fb 6127 return fold_build2_loc (loc, EQ_EXPR, type, arg,
7f20a5b7 6128 build_real (TREE_TYPE (arg), c2));
c876997f
RS
6129
6130 /* sqrt(x) > y is always false, when y is very large
6131 and we don't care about infinities. */
db3927fb 6132 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
c876997f
RS
6133 }
6134
6135 /* sqrt(x) > c is the same as x > c*c. */
db3927fb 6136 return fold_build2_loc (loc, code, type, arg,
7f20a5b7 6137 build_real (TREE_TYPE (arg), c2));
c876997f
RS
6138 }
6139 else if (code == LT_EXPR || code == LE_EXPR)
6140 {
6141 REAL_VALUE_TYPE c2;
6142
6143 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6144 real_convert (&c2, mode, &c2);
6145
6146 if (REAL_VALUE_ISINF (c2))
6147 {
6148 /* sqrt(x) < y is always true, when y is a very large
6149 value and we don't care about NaNs or Infinities. */
6150 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
db3927fb 6151 return omit_one_operand_loc (loc, type, integer_one_node, arg);
c876997f
RS
6152
6153 /* sqrt(x) < y is x != +Inf when y is very large and we
6154 don't care about NaNs. */
6155 if (! HONOR_NANS (mode))
db3927fb 6156 return fold_build2_loc (loc, NE_EXPR, type, arg,
7f20a5b7 6157 build_real (TREE_TYPE (arg), c2));
c876997f
RS
6158
6159 /* sqrt(x) < y is x >= 0 when y is very large and we
6160 don't care about Infinities. */
6161 if (! HONOR_INFINITIES (mode))
db3927fb 6162 return fold_build2_loc (loc, GE_EXPR, type, arg,
7f20a5b7 6163 build_real (TREE_TYPE (arg), dconst0));
c876997f
RS
6164
6165 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5785c7de 6166 if (lang_hooks.decls.global_bindings_p () != 0
7a6cdb44 6167 || CONTAINS_PLACEHOLDER_P (arg))
c876997f
RS
6168 return NULL_TREE;
6169
6170 arg = save_expr (arg);
db3927fb
AH
6171 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6172 fold_build2_loc (loc, GE_EXPR, type, arg,
7f20a5b7
KH
6173 build_real (TREE_TYPE (arg),
6174 dconst0)),
db3927fb 6175 fold_build2_loc (loc, NE_EXPR, type, arg,
7f20a5b7
KH
6176 build_real (TREE_TYPE (arg),
6177 c2)));
c876997f
RS
6178 }
6179
6180 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6181 if (! HONOR_NANS (mode))
db3927fb 6182 return fold_build2_loc (loc, code, type, arg,
7f20a5b7 6183 build_real (TREE_TYPE (arg), c2));
c876997f
RS
6184
6185 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5785c7de 6186 if (lang_hooks.decls.global_bindings_p () == 0
7a6cdb44 6187 && ! CONTAINS_PLACEHOLDER_P (arg))
c876997f
RS
6188 {
6189 arg = save_expr (arg);
db3927fb
AH
6190 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6191 fold_build2_loc (loc, GE_EXPR, type, arg,
7f20a5b7
KH
6192 build_real (TREE_TYPE (arg),
6193 dconst0)),
db3927fb 6194 fold_build2_loc (loc, code, type, arg,
7f20a5b7
KH
6195 build_real (TREE_TYPE (arg),
6196 c2)));
c876997f
RS
6197 }
6198 }
6199 }
6200
6201 return NULL_TREE;
6202}
6203
9ddae796
RS
6204/* Subroutine of fold() that optimizes comparisons against Infinities,
6205 either +Inf or -Inf.
6206
6207 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6208 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6209 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6210
6211 The function returns the constant folded tree if a simplification
6212 can be made, and NULL_TREE otherwise. */
6213
6214static tree
db3927fb
AH
6215fold_inf_compare (location_t loc, enum tree_code code, tree type,
6216 tree arg0, tree arg1)
9ddae796 6217{
18c2511c
RS
6218 enum machine_mode mode;
6219 REAL_VALUE_TYPE max;
6220 tree temp;
6221 bool neg;
6222
6223 mode = TYPE_MODE (TREE_TYPE (arg0));
6224
9ddae796 6225 /* For negative infinity swap the sense of the comparison. */
18c2511c
RS
6226 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6227 if (neg)
9ddae796
RS
6228 code = swap_tree_comparison (code);
6229
6230 switch (code)
6231 {
6232 case GT_EXPR:
6233 /* x > +Inf is always false, if with ignore sNANs. */
18c2511c 6234 if (HONOR_SNANS (mode))
9ddae796 6235 return NULL_TREE;
db3927fb 6236 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9ddae796
RS
6237
6238 case LE_EXPR:
6239 /* x <= +Inf is always true, if we don't case about NaNs. */
18c2511c 6240 if (! HONOR_NANS (mode))
db3927fb 6241 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9ddae796
RS
6242
6243 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5785c7de 6244 if (lang_hooks.decls.global_bindings_p () == 0
7a6cdb44 6245 && ! CONTAINS_PLACEHOLDER_P (arg0))
9ddae796
RS
6246 {
6247 arg0 = save_expr (arg0);
db3927fb 6248 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
9ddae796
RS
6249 }
6250 break;
6251
18c2511c
RS
6252 case EQ_EXPR:
6253 case GE_EXPR:
6254 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6255 real_maxval (&max, neg, mode);
db3927fb 6256 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
7f20a5b7 6257 arg0, build_real (TREE_TYPE (arg0), max));
18c2511c
RS
6258
6259 case LT_EXPR:
6260 /* x < +Inf is always equal to x <= DBL_MAX. */
6261 real_maxval (&max, neg, mode);
db3927fb 6262 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
7f20a5b7 6263 arg0, build_real (TREE_TYPE (arg0), max));
18c2511c
RS
6264
6265 case NE_EXPR:
6266 /* x != +Inf is always equal to !(x > DBL_MAX). */
6267 real_maxval (&max, neg, mode);
6268 if (! HONOR_NANS (mode))
db3927fb 6269 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
7f20a5b7 6270 arg0, build_real (TREE_TYPE (arg0), max));
3100d647 6271
db3927fb 6272 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
7f20a5b7 6273 arg0, build_real (TREE_TYPE (arg0), max));
db3927fb 6274 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
9ddae796
RS
6275
6276 default:
6277 break;
6278 }
6279
6280 return NULL_TREE;
6281}
71925bc0 6282
8dc2384c 6283/* Subroutine of fold() that optimizes comparisons of a division by
1ea7e6ad 6284 a nonzero integer constant against an integer constant, i.e.
8dc2384c
RS
6285 X/C1 op C2.
6286
6287 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6288 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6289 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6290
6291 The function returns the constant folded tree if a simplification
6292 can be made, and NULL_TREE otherwise. */
6293
6294static tree
db3927fb
AH
6295fold_div_compare (location_t loc,
6296 enum tree_code code, tree type, tree arg0, tree arg1)
8dc2384c
RS
6297{
6298 tree prod, tmp, hi, lo;
6299 tree arg00 = TREE_OPERAND (arg0, 0);
6300 tree arg01 = TREE_OPERAND (arg0, 1);
9589f23e 6301 double_int val;
6b7283ac 6302 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
d56ee62b 6303 bool neg_overflow;
8dc2384c
RS
6304 int overflow;
6305
6306 /* We have to do this the hard way to detect unsigned overflow.
6307 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6b7283ac
EB
6308 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6309 TREE_INT_CST_HIGH (arg01),
6310 TREE_INT_CST_LOW (arg1),
6311 TREE_INT_CST_HIGH (arg1),
9589f23e
AS
6312 &val.low, &val.high, unsigned_p);
6313 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
d56ee62b 6314 neg_overflow = false;
8dc2384c 6315
6b7283ac 6316 if (unsigned_p)
8dc2384c 6317 {
000d8d44
RS
6318 tmp = int_const_binop (MINUS_EXPR, arg01,
6319 build_int_cst (TREE_TYPE (arg01), 1), 0);
8dc2384c
RS
6320 lo = prod;
6321
6322 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6b7283ac
EB
6323 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6324 TREE_INT_CST_HIGH (prod),
6325 TREE_INT_CST_LOW (tmp),
6326 TREE_INT_CST_HIGH (tmp),
9589f23e
AS
6327 &val.low, &val.high, unsigned_p);
6328 hi = force_fit_type_double (TREE_TYPE (arg00), val,
d95787e6 6329 -1, overflow | TREE_OVERFLOW (prod));
8dc2384c
RS
6330 }
6331 else if (tree_int_cst_sgn (arg01) >= 0)
6332 {
000d8d44
RS
6333 tmp = int_const_binop (MINUS_EXPR, arg01,
6334 build_int_cst (TREE_TYPE (arg01), 1), 0);
8dc2384c
RS
6335 switch (tree_int_cst_sgn (arg1))
6336 {
6337 case -1:
d56ee62b 6338 neg_overflow = true;
8dc2384c
RS
6339 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6340 hi = prod;
6341 break;
6342
6343 case 0:
6344 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6345 hi = tmp;
6346 break;
6347
6348 case 1:
6349 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6350 lo = prod;
6351 break;
6352
6353 default:
0bccc606 6354 gcc_unreachable ();
8dc2384c
RS
6355 }
6356 }
6357 else
6358 {
d2e74f6f
RS
6359 /* A negative divisor reverses the relational operators. */
6360 code = swap_tree_comparison (code);
6361
000d8d44
RS
6362 tmp = int_const_binop (PLUS_EXPR, arg01,
6363 build_int_cst (TREE_TYPE (arg01), 1), 0);
8dc2384c
RS
6364 switch (tree_int_cst_sgn (arg1))
6365 {
6366 case -1:
6367 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6368 lo = prod;
6369 break;
6370
6371 case 0:
6372 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6373 lo = tmp;
6374 break;
6375
6376 case 1:
d56ee62b
RS
6377 neg_overflow = true;
6378 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
8dc2384c
RS
6379 hi = prod;
6380 break;
6381
6382 default:
0bccc606 6383 gcc_unreachable ();
8dc2384c
RS
6384 }
6385 }
6386
6387 switch (code)
6388 {
6389 case EQ_EXPR:
6390 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
db3927fb 6391 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
8dc2384c 6392 if (TREE_OVERFLOW (hi))
db3927fb 6393 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
8dc2384c 6394 if (TREE_OVERFLOW (lo))
db3927fb
AH
6395 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6396 return build_range_check (loc, type, arg00, 1, lo, hi);
8dc2384c
RS
6397
6398 case NE_EXPR:
6399 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
db3927fb 6400 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
8dc2384c 6401 if (TREE_OVERFLOW (hi))
db3927fb 6402 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
8dc2384c 6403 if (TREE_OVERFLOW (lo))
db3927fb
AH
6404 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6405 return build_range_check (loc, type, arg00, 0, lo, hi);
8dc2384c
RS
6406
6407 case LT_EXPR:
6408 if (TREE_OVERFLOW (lo))
d56ee62b
RS
6409 {
6410 tmp = neg_overflow ? integer_zero_node : integer_one_node;
db3927fb 6411 return omit_one_operand_loc (loc, type, tmp, arg00);
d56ee62b 6412 }
db3927fb 6413 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
8dc2384c
RS
6414
6415 case LE_EXPR:
6416 if (TREE_OVERFLOW (hi))
d56ee62b
RS
6417 {
6418 tmp = neg_overflow ? integer_zero_node : integer_one_node;
db3927fb 6419 return omit_one_operand_loc (loc, type, tmp, arg00);
d56ee62b 6420 }
db3927fb 6421 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
8dc2384c
RS
6422
6423 case GT_EXPR:
6424 if (TREE_OVERFLOW (hi))
d56ee62b
RS
6425 {
6426 tmp = neg_overflow ? integer_one_node : integer_zero_node;
db3927fb 6427 return omit_one_operand_loc (loc, type, tmp, arg00);
d56ee62b 6428 }
db3927fb 6429 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
8dc2384c
RS
6430
6431 case GE_EXPR:
6432 if (TREE_OVERFLOW (lo))
d56ee62b
RS
6433 {
6434 tmp = neg_overflow ? integer_one_node : integer_zero_node;
db3927fb 6435 return omit_one_operand_loc (loc, type, tmp, arg00);
d56ee62b 6436 }
db3927fb 6437 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
8dc2384c
RS
6438
6439 default:
6440 break;
6441 }
6442
6443 return NULL_TREE;
6444}
6445
6446
7960bf22 6447/* If CODE with arguments ARG0 and ARG1 represents a single bit
a94400fd
KH
6448 equality/inequality test, then return a simplified form of the test
6449 using a sign testing. Otherwise return NULL. TYPE is the desired
6450 result type. */
d1822754 6451
a94400fd 6452static tree
db3927fb
AH
6453fold_single_bit_test_into_sign_test (location_t loc,
6454 enum tree_code code, tree arg0, tree arg1,
a94400fd 6455 tree result_type)
7960bf22 6456{
7960bf22
JL
6457 /* If this is testing a single bit, we can optimize the test. */
6458 if ((code == NE_EXPR || code == EQ_EXPR)
6459 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6460 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6461 {
7960bf22
JL
6462 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6463 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
a94400fd
KH
6464 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6465
1f7a8dcc
RS
6466 if (arg00 != NULL_TREE
6467 /* This is only a win if casting to a signed type is cheap,
6468 i.e. when arg00's type is not a partial mode. */
6469 && TYPE_PRECISION (TREE_TYPE (arg00))
6470 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
7960bf22 6471 {
12753674 6472 tree stype = signed_type_for (TREE_TYPE (arg00));
db3927fb
AH
6473 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6474 result_type,
6475 fold_convert_loc (loc, stype, arg00),
57decb7e 6476 build_int_cst (stype, 0));
7960bf22 6477 }
a94400fd
KH
6478 }
6479
6480 return NULL_TREE;
6481}
6482
6483/* If CODE with arguments ARG0 and ARG1 represents a single bit
6484 equality/inequality test, then return a simplified form of
6485 the test using shifts and logical operations. Otherwise return
6486 NULL. TYPE is the desired result type. */
6487
6488tree
db3927fb
AH
6489fold_single_bit_test (location_t loc, enum tree_code code,
6490 tree arg0, tree arg1, tree result_type)
a94400fd
KH
6491{
6492 /* If this is testing a single bit, we can optimize the test. */
6493 if ((code == NE_EXPR || code == EQ_EXPR)
6494 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6495 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6496 {
6497 tree inner = TREE_OPERAND (arg0, 0);
6498 tree type = TREE_TYPE (arg0);
6499 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6500 enum machine_mode operand_mode = TYPE_MODE (type);
6501 int ops_unsigned;
6502 tree signed_type, unsigned_type, intermediate_type;
000d8d44 6503 tree tem, one;
a94400fd
KH
6504
6505 /* First, see if we can fold the single bit test into a sign-bit
6506 test. */
db3927fb 6507 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
a94400fd
KH
6508 result_type);
6509 if (tem)
6510 return tem;
c87d821b 6511
d1822754 6512 /* Otherwise we have (A & C) != 0 where C is a single bit,
7960bf22
JL
6513 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6514 Similarly for (A & C) == 0. */
6515
6516 /* If INNER is a right shift of a constant and it plus BITNUM does
6517 not overflow, adjust BITNUM and INNER. */
6518 if (TREE_CODE (inner) == RSHIFT_EXPR
6519 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6520 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6521 && bitnum < TYPE_PRECISION (type)
6522 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6523 bitnum - TYPE_PRECISION (type)))
6524 {
6525 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6526 inner = TREE_OPERAND (inner, 0);
6527 }
6528
6529 /* If we are going to be able to omit the AND below, we must do our
6530 operations as unsigned. If we must use the AND, we have a choice.
6531 Normally unsigned is faster, but for some machines signed is. */
7960bf22 6532#ifdef LOAD_EXTEND_OP
b8698a0f 6533 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
2a1a3cd5 6534 && !flag_syntax_only) ? 0 : 1;
7960bf22 6535#else
c87d821b 6536 ops_unsigned = 1;
7960bf22 6537#endif
7960bf22 6538
5785c7de
RS
6539 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6540 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
e7824b3e 6541 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
db3927fb 6542 inner = fold_convert_loc (loc, intermediate_type, inner);
7960bf22
JL
6543
6544 if (bitnum != 0)
59ce6d6b
RS
6545 inner = build2 (RSHIFT_EXPR, intermediate_type,
6546 inner, size_int (bitnum));
7960bf22 6547
000d8d44
RS
6548 one = build_int_cst (intermediate_type, 1);
6549
7960bf22 6550 if (code == EQ_EXPR)
db3927fb 6551 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7960bf22
JL
6552
6553 /* Put the AND last so it can combine with more things. */
000d8d44 6554 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7960bf22
JL
6555
6556 /* Make sure to return the proper type. */
db3927fb 6557 inner = fold_convert_loc (loc, result_type, inner);
7960bf22
JL
6558
6559 return inner;
6560 }
6561 return NULL_TREE;
6562}
5dfa45d0 6563
05d362b8
RS
6564/* Check whether we are allowed to reorder operands arg0 and arg1,
6565 such that the evaluation of arg1 occurs before arg0. */
6566
6567static bool
ac545c64 6568reorder_operands_p (const_tree arg0, const_tree arg1)
05d362b8
RS
6569{
6570 if (! flag_evaluation_order)
3e6688a7 6571 return true;
05d362b8
RS
6572 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6573 return true;
6574 return ! TREE_SIDE_EFFECTS (arg0)
6575 && ! TREE_SIDE_EFFECTS (arg1);
6576}
6577
37af03cb
RS
6578/* Test whether it is preferable two swap two operands, ARG0 and
6579 ARG1, for example because ARG0 is an integer constant and ARG1
05d362b8
RS
6580 isn't. If REORDER is true, only recommend swapping if we can
6581 evaluate the operands in reverse order. */
37af03cb 6582
fd660b1b 6583bool
fa233e34 6584tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
37af03cb
RS
6585{
6586 STRIP_SIGN_NOPS (arg0);
6587 STRIP_SIGN_NOPS (arg1);
6588
6589 if (TREE_CODE (arg1) == INTEGER_CST)
6590 return 0;
6591 if (TREE_CODE (arg0) == INTEGER_CST)
6592 return 1;
6593
6594 if (TREE_CODE (arg1) == REAL_CST)
6595 return 0;
6596 if (TREE_CODE (arg0) == REAL_CST)
6597 return 1;
6598
325217ed
CF
6599 if (TREE_CODE (arg1) == FIXED_CST)
6600 return 0;
6601 if (TREE_CODE (arg0) == FIXED_CST)
6602 return 1;
6603
37af03cb
RS
6604 if (TREE_CODE (arg1) == COMPLEX_CST)
6605 return 0;
6606 if (TREE_CODE (arg0) == COMPLEX_CST)
6607 return 1;
6608
6609 if (TREE_CONSTANT (arg1))
6610 return 0;
6611 if (TREE_CONSTANT (arg0))
6612 return 1;
d1822754 6613
7f4b6d20 6614 if (optimize_function_for_size_p (cfun))
a352244f 6615 return 0;
37af03cb 6616
05d362b8
RS
6617 if (reorder && flag_evaluation_order
6618 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6619 return 0;
6620
fd660b1b
JL
6621 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6622 for commutative and comparison operators. Ensuring a canonical
6623 form allows the optimizers to find additional redundancies without
6624 having to explicitly check for both orderings. */
6625 if (TREE_CODE (arg0) == SSA_NAME
6626 && TREE_CODE (arg1) == SSA_NAME
6627 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6628 return 1;
6629
421076b5
RG
6630 /* Put SSA_NAMEs last. */
6631 if (TREE_CODE (arg1) == SSA_NAME)
6632 return 0;
6633 if (TREE_CODE (arg0) == SSA_NAME)
6634 return 1;
6635
6636 /* Put variables last. */
6637 if (DECL_P (arg1))
6638 return 0;
6639 if (DECL_P (arg0))
6640 return 1;
6641
37af03cb
RS
6642 return 0;
6643}
6644
18522563
ZD
6645/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6646 ARG0 is extended to a wider type. */
6647
6648static tree
db3927fb
AH
6649fold_widened_comparison (location_t loc, enum tree_code code,
6650 tree type, tree arg0, tree arg1)
18522563
ZD
6651{
6652 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6653 tree arg1_unw;
6654 tree shorter_type, outer_type;
6655 tree min, max;
6656 bool above, below;
6657
6658 if (arg0_unw == arg0)
6659 return NULL_TREE;
6660 shorter_type = TREE_TYPE (arg0_unw);
2a0958c5 6661
6c6d9d33
JDA
6662#ifdef HAVE_canonicalize_funcptr_for_compare
6663 /* Disable this optimization if we're casting a function pointer
6664 type on targets that require function pointer canonicalization. */
6665 if (HAVE_canonicalize_funcptr_for_compare
6666 && TREE_CODE (shorter_type) == POINTER_TYPE
6667 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6668 return NULL_TREE;
6669#endif
6670
2a0958c5
JJ
6671 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6672 return NULL_TREE;
6673
8f768a5a 6674 arg1_unw = get_unwidened (arg1, NULL_TREE);
18522563
ZD
6675
6676 /* If possible, express the comparison in the shorter mode. */
6677 if ((code == EQ_EXPR || code == NE_EXPR
6678 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6679 && (TREE_TYPE (arg1_unw) == shorter_type
02765a37 6680 || ((TYPE_PRECISION (shorter_type)
2e1d2474 6681 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
02765a37
RG
6682 && (TYPE_UNSIGNED (shorter_type)
6683 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
18522563 6684 || (TREE_CODE (arg1_unw) == INTEGER_CST
a7e1c928
AP
6685 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6686 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
18522563 6687 && int_fits_type_p (arg1_unw, shorter_type))))
db3927fb
AH
6688 return fold_build2_loc (loc, code, type, arg0_unw,
6689 fold_convert_loc (loc, shorter_type, arg1_unw));
18522563 6690
1630e763
AS
6691 if (TREE_CODE (arg1_unw) != INTEGER_CST
6692 || TREE_CODE (shorter_type) != INTEGER_TYPE
6693 || !int_fits_type_p (arg1_unw, shorter_type))
18522563
ZD
6694 return NULL_TREE;
6695
6696 /* If we are comparing with the integer that does not fit into the range
6697 of the shorter type, the result is known. */
6698 outer_type = TREE_TYPE (arg1_unw);
6699 min = lower_bound_in_type (outer_type, shorter_type);
6700 max = upper_bound_in_type (outer_type, shorter_type);
6701
6702 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6703 max, arg1_unw));
6704 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6705 arg1_unw, min));
6706
6707 switch (code)
6708 {
6709 case EQ_EXPR:
6710 if (above || below)
db3927fb 6711 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
18522563
ZD
6712 break;
6713
6714 case NE_EXPR:
6715 if (above || below)
db3927fb 6716 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
18522563
ZD
6717 break;
6718
6719 case LT_EXPR:
6720 case LE_EXPR:
6721 if (above)
db3927fb 6722 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
18522563 6723 else if (below)
db3927fb 6724 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
18522563
ZD
6725
6726 case GT_EXPR:
6727 case GE_EXPR:
6728 if (above)
db3927fb 6729 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
18522563 6730 else if (below)
db3927fb 6731 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
18522563
ZD
6732
6733 default:
6734 break;
6735 }
6736
6737 return NULL_TREE;
6738}
6739
6740/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6741 ARG0 just the signedness is changed. */
6742
6743static tree
db3927fb 6744fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
18522563
ZD
6745 tree arg0, tree arg1)
6746{
b8fca551 6747 tree arg0_inner;
18522563
ZD
6748 tree inner_type, outer_type;
6749
1043771b 6750 if (!CONVERT_EXPR_P (arg0))
18522563
ZD
6751 return NULL_TREE;
6752
6753 outer_type = TREE_TYPE (arg0);
6754 arg0_inner = TREE_OPERAND (arg0, 0);
6755 inner_type = TREE_TYPE (arg0_inner);
6756
6c6d9d33
JDA
6757#ifdef HAVE_canonicalize_funcptr_for_compare
6758 /* Disable this optimization if we're casting a function pointer
6759 type on targets that require function pointer canonicalization. */
6760 if (HAVE_canonicalize_funcptr_for_compare
6761 && TREE_CODE (inner_type) == POINTER_TYPE
6762 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6763 return NULL_TREE;
6764#endif
6765
18522563
ZD
6766 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6767 return NULL_TREE;
6768
6769 if (TREE_CODE (arg1) != INTEGER_CST
1043771b 6770 && !(CONVERT_EXPR_P (arg1)
18522563
ZD
6771 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6772 return NULL_TREE;
6773
8ebc39d8
RG
6774 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6775 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
18522563
ZD
6776 && code != NE_EXPR
6777 && code != EQ_EXPR)
6778 return NULL_TREE;
6779
6780 if (TREE_CODE (arg1) == INTEGER_CST)
9589f23e
AS
6781 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6782 0, TREE_OVERFLOW (arg1));
18522563 6783 else
db3927fb 6784 arg1 = fold_convert_loc (loc, inner_type, arg1);
18522563 6785
db3927fb 6786 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
18522563
ZD
6787}
6788
5be014d5 6789/* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
db3927fb
AH
6790 step of the array. Reconstructs s and delta in the case of s *
6791 delta being an integer constant (and thus already folded). ADDR is
6792 the address. MULT is the multiplicative expression. If the
6793 function succeeds, the new address expression is returned.
6794 Otherwise NULL_TREE is returned. LOC is the location of the
6795 resulting expression. */
38b0dcb8
ZD
6796
6797static tree
db3927fb 6798try_move_mult_to_index (location_t loc, tree addr, tree op1)
38b0dcb8
ZD
6799{
6800 tree s, delta, step;
38b0dcb8
ZD
6801 tree ref = TREE_OPERAND (addr, 0), pref;
6802 tree ret, pos;
6803 tree itype;
713e3ec9 6804 bool mdim = false;
38b0dcb8 6805
5be014d5
AP
6806 /* Strip the nops that might be added when converting op1 to sizetype. */
6807 STRIP_NOPS (op1);
6808
c5542940
RG
6809 /* Canonicalize op1 into a possibly non-constant delta
6810 and an INTEGER_CST s. */
6811 if (TREE_CODE (op1) == MULT_EXPR)
38b0dcb8 6812 {
c5542940
RG
6813 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6814
6815 STRIP_NOPS (arg0);
6816 STRIP_NOPS (arg1);
b8698a0f 6817
c5542940
RG
6818 if (TREE_CODE (arg0) == INTEGER_CST)
6819 {
6820 s = arg0;
6821 delta = arg1;
6822 }
6823 else if (TREE_CODE (arg1) == INTEGER_CST)
6824 {
6825 s = arg1;
6826 delta = arg0;
6827 }
6828 else
6829 return NULL_TREE;
38b0dcb8 6830 }
c5542940 6831 else if (TREE_CODE (op1) == INTEGER_CST)
38b0dcb8 6832 {
c5542940
RG
6833 delta = op1;
6834 s = NULL_TREE;
38b0dcb8
ZD
6835 }
6836 else
c5542940
RG
6837 {
6838 /* Simulate we are delta * 1. */
6839 delta = op1;
6840 s = integer_one_node;
6841 }
38b0dcb8
ZD
6842
6843 for (;; ref = TREE_OPERAND (ref, 0))
6844 {
6845 if (TREE_CODE (ref) == ARRAY_REF)
6846 {
8e281a8d
RG
6847 tree domain;
6848
713e3ec9
RG
6849 /* Remember if this was a multi-dimensional array. */
6850 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6851 mdim = true;
6852
8e281a8d
RG
6853 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6854 if (! domain)
03b0db0a 6855 continue;
8e281a8d 6856 itype = TREE_TYPE (domain);
03b0db0a 6857
38b0dcb8 6858 step = array_ref_element_size (ref);
38b0dcb8
ZD
6859 if (TREE_CODE (step) != INTEGER_CST)
6860 continue;
6861
c5542940
RG
6862 if (s)
6863 {
6864 if (! tree_int_cst_equal (step, s))
6865 continue;
6866 }
6867 else
6868 {
6869 /* Try if delta is a multiple of step. */
194ac52a 6870 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
03b0db0a 6871 if (! tmp)
c5542940 6872 continue;
03b0db0a 6873 delta = tmp;
c5542940 6874 }
38b0dcb8 6875
713e3ec9
RG
6876 /* Only fold here if we can verify we do not overflow one
6877 dimension of a multi-dimensional array. */
6878 if (mdim)
6879 {
6880 tree tmp;
6881
6882 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
8e281a8d
RG
6883 || !TYPE_MAX_VALUE (domain)
6884 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
713e3ec9
RG
6885 continue;
6886
db3927fb 6887 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
8e281a8d
RG
6888 fold_convert_loc (loc, itype,
6889 TREE_OPERAND (ref, 1)),
6890 fold_convert_loc (loc, itype, delta));
713e3ec9
RG
6891 if (!tmp
6892 || TREE_CODE (tmp) != INTEGER_CST
8e281a8d 6893 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
713e3ec9
RG
6894 continue;
6895 }
6896
38b0dcb8
ZD
6897 break;
6898 }
713e3ec9
RG
6899 else
6900 mdim = false;
38b0dcb8
ZD
6901
6902 if (!handled_component_p (ref))
6903 return NULL_TREE;
6904 }
6905
6906 /* We found the suitable array reference. So copy everything up to it,
6907 and replace the index. */
6908
6909 pref = TREE_OPERAND (addr, 0);
6910 ret = copy_node (pref);
db3927fb 6911 SET_EXPR_LOCATION (ret, loc);
38b0dcb8
ZD
6912 pos = ret;
6913
6914 while (pref != ref)
6915 {
6916 pref = TREE_OPERAND (pref, 0);
6917 TREE_OPERAND (pos, 0) = copy_node (pref);
6918 pos = TREE_OPERAND (pos, 0);
6919 }
6920
db3927fb
AH
6921 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
6922 fold_convert_loc (loc, itype,
6923 TREE_OPERAND (pos, 1)),
6924 fold_convert_loc (loc, itype, delta));
38b0dcb8 6925
db3927fb 6926 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
38b0dcb8
ZD
6927}
6928
1d481ba8
ZD
6929
6930/* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6931 means A >= Y && A != MAX, but in this case we know that
6932 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6933
6934static tree
db3927fb 6935fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
1d481ba8
ZD
6936{
6937 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6938
6939 if (TREE_CODE (bound) == LT_EXPR)
6940 a = TREE_OPERAND (bound, 0);
6941 else if (TREE_CODE (bound) == GT_EXPR)
6942 a = TREE_OPERAND (bound, 1);
6943 else
6944 return NULL_TREE;
6945
6946 typea = TREE_TYPE (a);
6947 if (!INTEGRAL_TYPE_P (typea)
6948 && !POINTER_TYPE_P (typea))
6949 return NULL_TREE;
6950
6951 if (TREE_CODE (ineq) == LT_EXPR)
6952 {
6953 a1 = TREE_OPERAND (ineq, 1);
6954 y = TREE_OPERAND (ineq, 0);
6955 }
6956 else if (TREE_CODE (ineq) == GT_EXPR)
6957 {
6958 a1 = TREE_OPERAND (ineq, 0);
6959 y = TREE_OPERAND (ineq, 1);
6960 }
6961 else
6962 return NULL_TREE;
6963
6964 if (TREE_TYPE (a1) != typea)
6965 return NULL_TREE;
6966
5be014d5
AP
6967 if (POINTER_TYPE_P (typea))
6968 {
6969 /* Convert the pointer types into integer before taking the difference. */
db3927fb
AH
6970 tree ta = fold_convert_loc (loc, ssizetype, a);
6971 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6972 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
5be014d5
AP
6973 }
6974 else
db3927fb 6975 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
5be014d5
AP
6976
6977 if (!diff || !integer_onep (diff))
6978 return NULL_TREE;
1d481ba8 6979
db3927fb 6980 return fold_build2_loc (loc, GE_EXPR, type, a, y);
1d481ba8
ZD
6981}
6982
0ed9a3e3
RG
6983/* Fold a sum or difference of at least one multiplication.
6984 Returns the folded tree or NULL if no simplification could be made. */
6985
6986static tree
db3927fb
AH
6987fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6988 tree arg0, tree arg1)
0ed9a3e3
RG
6989{
6990 tree arg00, arg01, arg10, arg11;
6991 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6992
6993 /* (A * C) +- (B * C) -> (A+-B) * C.
6994 (A * C) +- A -> A * (C+-1).
6995 We are most concerned about the case where C is a constant,
6996 but other combinations show up during loop reduction. Since
6997 it is not difficult, try all four possibilities. */
6998
6999 if (TREE_CODE (arg0) == MULT_EXPR)
7000 {
7001 arg00 = TREE_OPERAND (arg0, 0);
7002 arg01 = TREE_OPERAND (arg0, 1);
7003 }
b462d62d
RG
7004 else if (TREE_CODE (arg0) == INTEGER_CST)
7005 {
7006 arg00 = build_one_cst (type);
7007 arg01 = arg0;
7008 }
0ed9a3e3
RG
7009 else
7010 {
325217ed
CF
7011 /* We cannot generate constant 1 for fract. */
7012 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7013 return NULL_TREE;
0ed9a3e3 7014 arg00 = arg0;
bfabddb6 7015 arg01 = build_one_cst (type);
0ed9a3e3
RG
7016 }
7017 if (TREE_CODE (arg1) == MULT_EXPR)
7018 {
7019 arg10 = TREE_OPERAND (arg1, 0);
7020 arg11 = TREE_OPERAND (arg1, 1);
7021 }
b462d62d
RG
7022 else if (TREE_CODE (arg1) == INTEGER_CST)
7023 {
7024 arg10 = build_one_cst (type);
cef158f9
RG
7025 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7026 the purpose of this canonicalization. */
7027 if (TREE_INT_CST_HIGH (arg1) == -1
7028 && negate_expr_p (arg1)
7029 && code == PLUS_EXPR)
7030 {
7031 arg11 = negate_expr (arg1);
7032 code = MINUS_EXPR;
7033 }
7034 else
7035 arg11 = arg1;
b462d62d 7036 }
0ed9a3e3
RG
7037 else
7038 {
325217ed
CF
7039 /* We cannot generate constant 1 for fract. */
7040 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7041 return NULL_TREE;
0ed9a3e3 7042 arg10 = arg1;
bfabddb6 7043 arg11 = build_one_cst (type);
0ed9a3e3
RG
7044 }
7045 same = NULL_TREE;
7046
7047 if (operand_equal_p (arg01, arg11, 0))
7048 same = arg01, alt0 = arg00, alt1 = arg10;
7049 else if (operand_equal_p (arg00, arg10, 0))
7050 same = arg00, alt0 = arg01, alt1 = arg11;
7051 else if (operand_equal_p (arg00, arg11, 0))
7052 same = arg00, alt0 = arg01, alt1 = arg10;
7053 else if (operand_equal_p (arg01, arg10, 0))
7054 same = arg01, alt0 = arg00, alt1 = arg11;
7055
7056 /* No identical multiplicands; see if we can find a common
7057 power-of-two factor in non-power-of-two multiplies. This
7058 can help in multi-dimensional array access. */
7059 else if (host_integerp (arg01, 0)
7060 && host_integerp (arg11, 0))
7061 {
7062 HOST_WIDE_INT int01, int11, tmp;
7063 bool swap = false;
7064 tree maybe_same;
7065 int01 = TREE_INT_CST_LOW (arg01);
7066 int11 = TREE_INT_CST_LOW (arg11);
7067
7068 /* Move min of absolute values to int11. */
7069 if ((int01 >= 0 ? int01 : -int01)
7070 < (int11 >= 0 ? int11 : -int11))
7071 {
7072 tmp = int01, int01 = int11, int11 = tmp;
7073 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7074 maybe_same = arg01;
7075 swap = true;
7076 }
7077 else
7078 maybe_same = arg11;
7079
299b87f8
RG
7080 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7081 /* The remainder should not be a constant, otherwise we
7082 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7083 increased the number of multiplications necessary. */
7084 && TREE_CODE (arg10) != INTEGER_CST)
0ed9a3e3 7085 {
db3927fb 7086 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
0ed9a3e3
RG
7087 build_int_cst (TREE_TYPE (arg00),
7088 int01 / int11));
7089 alt1 = arg10;
7090 same = maybe_same;
7091 if (swap)
7092 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7093 }
7094 }
7095
7096 if (same)
db3927fb
AH
7097 return fold_build2_loc (loc, MULT_EXPR, type,
7098 fold_build2_loc (loc, code, type,
7099 fold_convert_loc (loc, type, alt0),
7100 fold_convert_loc (loc, type, alt1)),
7101 fold_convert_loc (loc, type, same));
0ed9a3e3
RG
7102
7103 return NULL_TREE;
7104}
7105
78bf6e2f
RS
7106/* Subroutine of native_encode_expr. Encode the INTEGER_CST
7107 specified by EXPR into the buffer PTR of length LEN bytes.
7108 Return the number of bytes placed in the buffer, or zero
7109 upon failure. */
7110
7111static int
fa233e34 7112native_encode_int (const_tree expr, unsigned char *ptr, int len)
78bf6e2f
RS
7113{
7114 tree type = TREE_TYPE (expr);
7115 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7116 int byte, offset, word, words;
7117 unsigned char value;
7118
7119 if (total_bytes > len)
7120 return 0;
7121 words = total_bytes / UNITS_PER_WORD;
7122
7123 for (byte = 0; byte < total_bytes; byte++)
7124 {
7125 int bitpos = byte * BITS_PER_UNIT;
7126 if (bitpos < HOST_BITS_PER_WIDE_INT)
7127 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7128 else
7129 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7130 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7131
7132 if (total_bytes > UNITS_PER_WORD)
7133 {
7134 word = byte / UNITS_PER_WORD;
7135 if (WORDS_BIG_ENDIAN)
7136 word = (words - 1) - word;
7137 offset = word * UNITS_PER_WORD;
7138 if (BYTES_BIG_ENDIAN)
7139 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7140 else
7141 offset += byte % UNITS_PER_WORD;
7142 }
7143 else
7144 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7145 ptr[offset] = value;
7146 }
7147 return total_bytes;
7148}
7149
7150
7151/* Subroutine of native_encode_expr. Encode the REAL_CST
7152 specified by EXPR into the buffer PTR of length LEN bytes.
7153 Return the number of bytes placed in the buffer, or zero
7154 upon failure. */
7155
7156static int
fa233e34 7157native_encode_real (const_tree expr, unsigned char *ptr, int len)
78bf6e2f
RS
7158{
7159 tree type = TREE_TYPE (expr);
7160 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
0a9430a8 7161 int byte, offset, word, words, bitpos;
78bf6e2f
RS
7162 unsigned char value;
7163
7164 /* There are always 32 bits in each long, no matter the size of
7165 the hosts long. We handle floating point representations with
7166 up to 192 bits. */
7167 long tmp[6];
7168
7169 if (total_bytes > len)
7170 return 0;
54193313 7171 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
78bf6e2f
RS
7172
7173 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7174
0a9430a8
JJ
7175 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7176 bitpos += BITS_PER_UNIT)
78bf6e2f 7177 {
0a9430a8 7178 byte = (bitpos / BITS_PER_UNIT) & 3;
78bf6e2f
RS
7179 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7180
0a9430a8 7181 if (UNITS_PER_WORD < 4)
78bf6e2f
RS
7182 {
7183 word = byte / UNITS_PER_WORD;
0a9430a8 7184 if (WORDS_BIG_ENDIAN)
78bf6e2f
RS
7185 word = (words - 1) - word;
7186 offset = word * UNITS_PER_WORD;
7187 if (BYTES_BIG_ENDIAN)
7188 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7189 else
7190 offset += byte % UNITS_PER_WORD;
7191 }
7192 else
0a9430a8
JJ
7193 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7194 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
78bf6e2f
RS
7195 }
7196 return total_bytes;
7197}
7198
7199/* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7200 specified by EXPR into the buffer PTR of length LEN bytes.
7201 Return the number of bytes placed in the buffer, or zero
7202 upon failure. */
7203
7204static int
fa233e34 7205native_encode_complex (const_tree expr, unsigned char *ptr, int len)
78bf6e2f
RS
7206{
7207 int rsize, isize;
7208 tree part;
7209
7210 part = TREE_REALPART (expr);
7211 rsize = native_encode_expr (part, ptr, len);
7212 if (rsize == 0)
7213 return 0;
7214 part = TREE_IMAGPART (expr);
7215 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7216 if (isize != rsize)
7217 return 0;
7218 return rsize + isize;
7219}
7220
7221
7222/* Subroutine of native_encode_expr. Encode the VECTOR_CST
7223 specified by EXPR into the buffer PTR of length LEN bytes.
7224 Return the number of bytes placed in the buffer, or zero
7225 upon failure. */
7226
7227static int
fa233e34 7228native_encode_vector (const_tree expr, unsigned char *ptr, int len)
78bf6e2f 7229{
15b1c12a 7230 int i, size, offset, count;
1000b34d 7231 tree itype, elem, elements;
78bf6e2f 7232
78bf6e2f
RS
7233 offset = 0;
7234 elements = TREE_VECTOR_CST_ELTS (expr);
7235 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
1000b34d
RS
7236 itype = TREE_TYPE (TREE_TYPE (expr));
7237 size = GET_MODE_SIZE (TYPE_MODE (itype));
78bf6e2f
RS
7238 for (i = 0; i < count; i++)
7239 {
7240 if (elements)
7241 {
7242 elem = TREE_VALUE (elements);
7243 elements = TREE_CHAIN (elements);
7244 }
7245 else
7246 elem = NULL_TREE;
7247
7248 if (elem)
7249 {
1000b34d 7250 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
78bf6e2f
RS
7251 return 0;
7252 }
1000b34d 7253 else
78bf6e2f
RS
7254 {
7255 if (offset + size > len)
7256 return 0;
7257 memset (ptr+offset, 0, size);
7258 }
78bf6e2f
RS
7259 offset += size;
7260 }
7261 return offset;
7262}
7263
7264
27a4e072
JJ
7265/* Subroutine of native_encode_expr. Encode the STRING_CST
7266 specified by EXPR into the buffer PTR of length LEN bytes.
7267 Return the number of bytes placed in the buffer, or zero
7268 upon failure. */
7269
7270static int
7271native_encode_string (const_tree expr, unsigned char *ptr, int len)
7272{
7273 tree type = TREE_TYPE (expr);
7274 HOST_WIDE_INT total_bytes;
7275
7276 if (TREE_CODE (type) != ARRAY_TYPE
7277 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7278 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7279 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7280 return 0;
7281 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7282 if (total_bytes > len)
7283 return 0;
7284 if (TREE_STRING_LENGTH (expr) < total_bytes)
7285 {
7286 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7287 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7288 total_bytes - TREE_STRING_LENGTH (expr));
7289 }
7290 else
7291 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7292 return total_bytes;
7293}
7294
7295
78bf6e2f
RS
7296/* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7297 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7298 buffer PTR of length LEN bytes. Return the number of bytes
7299 placed in the buffer, or zero upon failure. */
7300
db136335 7301int
fa233e34 7302native_encode_expr (const_tree expr, unsigned char *ptr, int len)
78bf6e2f
RS
7303{
7304 switch (TREE_CODE (expr))
7305 {
7306 case INTEGER_CST:
7307 return native_encode_int (expr, ptr, len);
7308
7309 case REAL_CST:
7310 return native_encode_real (expr, ptr, len);
7311
7312 case COMPLEX_CST:
7313 return native_encode_complex (expr, ptr, len);
7314
7315 case VECTOR_CST:
7316 return native_encode_vector (expr, ptr, len);
7317
27a4e072
JJ
7318 case STRING_CST:
7319 return native_encode_string (expr, ptr, len);
7320
78bf6e2f
RS
7321 default:
7322 return 0;
7323 }
7324}
7325
7326
7327/* Subroutine of native_interpret_expr. Interpret the contents of
7328 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7329 If the buffer cannot be interpreted, return NULL_TREE. */
7330
7331static tree
fa233e34 7332native_interpret_int (tree type, const unsigned char *ptr, int len)
78bf6e2f
RS
7333{
7334 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7335 int byte, offset, word, words;
7336 unsigned char value;
1961ffb8 7337 double_int result;
78bf6e2f
RS
7338
7339 if (total_bytes > len)
7340 return NULL_TREE;
7341 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7342 return NULL_TREE;
1961ffb8
AS
7343
7344 result = double_int_zero;
78bf6e2f
RS
7345 words = total_bytes / UNITS_PER_WORD;
7346
7347 for (byte = 0; byte < total_bytes; byte++)
7348 {
7349 int bitpos = byte * BITS_PER_UNIT;
7350 if (total_bytes > UNITS_PER_WORD)
7351 {
7352 word = byte / UNITS_PER_WORD;
7353 if (WORDS_BIG_ENDIAN)
7354 word = (words - 1) - word;
7355 offset = word * UNITS_PER_WORD;
7356 if (BYTES_BIG_ENDIAN)
7357 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7358 else
7359 offset += byte % UNITS_PER_WORD;
7360 }
7361 else
7362 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7363 value = ptr[offset];
7364
7365 if (bitpos < HOST_BITS_PER_WIDE_INT)
1961ffb8 7366 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
78bf6e2f 7367 else
1961ffb8
AS
7368 result.high |= (unsigned HOST_WIDE_INT) value
7369 << (bitpos - HOST_BITS_PER_WIDE_INT);
78bf6e2f
RS
7370 }
7371
1961ffb8 7372 return double_int_to_tree (type, result);
78bf6e2f
RS
7373}
7374
7375
7376/* Subroutine of native_interpret_expr. Interpret the contents of
7377 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7378 If the buffer cannot be interpreted, return NULL_TREE. */
7379
7380static tree
fa233e34 7381native_interpret_real (tree type, const unsigned char *ptr, int len)
78bf6e2f 7382{
15b1c12a
RS
7383 enum machine_mode mode = TYPE_MODE (type);
7384 int total_bytes = GET_MODE_SIZE (mode);
0a9430a8 7385 int byte, offset, word, words, bitpos;
78bf6e2f
RS
7386 unsigned char value;
7387 /* There are always 32 bits in each long, no matter the size of
7388 the hosts long. We handle floating point representations with
7389 up to 192 bits. */
7390 REAL_VALUE_TYPE r;
7391 long tmp[6];
7392
7393 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7394 if (total_bytes > len || total_bytes > 24)
7395 return NULL_TREE;
54193313 7396 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
78bf6e2f
RS
7397
7398 memset (tmp, 0, sizeof (tmp));
0a9430a8
JJ
7399 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7400 bitpos += BITS_PER_UNIT)
78bf6e2f 7401 {
0a9430a8
JJ
7402 byte = (bitpos / BITS_PER_UNIT) & 3;
7403 if (UNITS_PER_WORD < 4)
78bf6e2f
RS
7404 {
7405 word = byte / UNITS_PER_WORD;
0a9430a8 7406 if (WORDS_BIG_ENDIAN)
78bf6e2f
RS
7407 word = (words - 1) - word;
7408 offset = word * UNITS_PER_WORD;
7409 if (BYTES_BIG_ENDIAN)
7410 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7411 else
7412 offset += byte % UNITS_PER_WORD;
7413 }
7414 else
0a9430a8
JJ
7415 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7416 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
78bf6e2f
RS
7417
7418 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7419 }
7420
7421 real_from_target (&r, tmp, mode);
7422 return build_real (type, r);
7423}
7424
7425
7426/* Subroutine of native_interpret_expr. Interpret the contents of
7427 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7428 If the buffer cannot be interpreted, return NULL_TREE. */
7429
7430static tree
fa233e34 7431native_interpret_complex (tree type, const unsigned char *ptr, int len)
78bf6e2f
RS
7432{
7433 tree etype, rpart, ipart;
7434 int size;
7435
7436 etype = TREE_TYPE (type);
7437 size = GET_MODE_SIZE (TYPE_MODE (etype));
7438 if (size * 2 > len)
7439 return NULL_TREE;
7440 rpart = native_interpret_expr (etype, ptr, size);
7441 if (!rpart)
7442 return NULL_TREE;
7443 ipart = native_interpret_expr (etype, ptr+size, size);
7444 if (!ipart)
7445 return NULL_TREE;
7446 return build_complex (type, rpart, ipart);
7447}
7448
7449
7450/* Subroutine of native_interpret_expr. Interpret the contents of
7451 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7452 If the buffer cannot be interpreted, return NULL_TREE. */
7453
7454static tree
fa233e34 7455native_interpret_vector (tree type, const unsigned char *ptr, int len)
78bf6e2f
RS
7456{
7457 tree etype, elem, elements;
7458 int i, size, count;
7459
7460 etype = TREE_TYPE (type);
7461 size = GET_MODE_SIZE (TYPE_MODE (etype));
7462 count = TYPE_VECTOR_SUBPARTS (type);
7463 if (size * count > len)
7464 return NULL_TREE;
7465
7466 elements = NULL_TREE;
7467 for (i = count - 1; i >= 0; i--)
7468 {
7469 elem = native_interpret_expr (etype, ptr+(i*size), size);
7470 if (!elem)
7471 return NULL_TREE;
7472 elements = tree_cons (NULL_TREE, elem, elements);
7473 }
7474 return build_vector (type, elements);
7475}
7476
7477
75c40d56 7478/* Subroutine of fold_view_convert_expr. Interpret the contents of
78bf6e2f
RS
7479 the buffer PTR of length LEN as a constant of type TYPE. For
7480 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7481 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7482 return NULL_TREE. */
7483
db136335 7484tree
fa233e34 7485native_interpret_expr (tree type, const unsigned char *ptr, int len)
78bf6e2f
RS
7486{
7487 switch (TREE_CODE (type))
7488 {
7489 case INTEGER_TYPE:
7490 case ENUMERAL_TYPE:
7491 case BOOLEAN_TYPE:
7492 return native_interpret_int (type, ptr, len);
7493
7494 case REAL_TYPE:
7495 return native_interpret_real (type, ptr, len);
7496
7497 case COMPLEX_TYPE:
7498 return native_interpret_complex (type, ptr, len);
7499
7500 case VECTOR_TYPE:
7501 return native_interpret_vector (type, ptr, len);
7502
7503 default:
7504 return NULL_TREE;
7505 }
7506}
7507
7508
7509/* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7510 TYPE at compile-time. If we're unable to perform the conversion
7511 return NULL_TREE. */
7512
7513static tree
7514fold_view_convert_expr (tree type, tree expr)
7515{
7516 /* We support up to 512-bit values (for V8DFmode). */
7517 unsigned char buffer[64];
7518 int len;
7519
7520 /* Check that the host and target are sane. */
7521 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7522 return NULL_TREE;
7523
7524 len = native_encode_expr (expr, buffer, sizeof (buffer));
7525 if (len == 0)
7526 return NULL_TREE;
7527
7528 return native_interpret_expr (type, buffer, len);
7529}
7530
70826cbb 7531/* Build an expression for the address of T. Folds away INDIRECT_REF
628c189e 7532 to avoid confusing the gimplify process. */
70826cbb 7533
628c189e 7534tree
db3927fb 7535build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
70826cbb
SP
7536{
7537 /* The size of the object is not relevant when talking about its address. */
7538 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7539 t = TREE_OPERAND (t, 0);
7540
be1ac4ec 7541 if (TREE_CODE (t) == INDIRECT_REF)
70826cbb
SP
7542 {
7543 t = TREE_OPERAND (t, 0);
7544
7545 if (TREE_TYPE (t) != ptrtype)
c9019218 7546 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
70826cbb 7547 }
70f34814
RG
7548 else if (TREE_CODE (t) == MEM_REF
7549 && integer_zerop (TREE_OPERAND (t, 1)))
7550 return TREE_OPERAND (t, 0);
d98e8686
EB
7551 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7552 {
db3927fb 7553 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
d98e8686
EB
7554
7555 if (TREE_TYPE (t) != ptrtype)
db3927fb 7556 t = fold_convert_loc (loc, ptrtype, t);
d98e8686 7557 }
70826cbb 7558 else
c9019218 7559 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
70826cbb
SP
7560
7561 return t;
7562}
7563
628c189e 7564/* Build an expression for the address of T. */
70826cbb
SP
7565
7566tree
db3927fb 7567build_fold_addr_expr_loc (location_t loc, tree t)
70826cbb
SP
7568{
7569 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7570
db3927fb 7571 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
70826cbb 7572}
78bf6e2f 7573
7107fa7c
KH
7574/* Fold a unary expression of code CODE and type TYPE with operand
7575 OP0. Return the folded expression if folding is successful.
7576 Otherwise, return NULL_TREE. */
659d8efa 7577
721425b6 7578tree
db3927fb 7579fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
659d8efa 7580{
659d8efa 7581 tree tem;
fbaa905c 7582 tree arg0;
659d8efa
KH
7583 enum tree_code_class kind = TREE_CODE_CLASS (code);
7584
7585 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7586 && TREE_CODE_LENGTH (code) == 1);
7587
fbaa905c 7588 arg0 = op0;
659d8efa
KH
7589 if (arg0)
7590 {
1a87cf0c 7591 if (CONVERT_EXPR_CODE_P (code)
b49ceb45 7592 || code == FLOAT_EXPR || code == ABS_EXPR)
659d8efa 7593 {
b49ceb45
JM
7594 /* Don't use STRIP_NOPS, because signedness of argument type
7595 matters. */
659d8efa
KH
7596 STRIP_SIGN_NOPS (arg0);
7597 }
7598 else
7599 {
7600 /* Strip any conversions that don't change the mode. This
7601 is safe for every expression, except for a comparison
7602 expression because its signedness is derived from its
7603 operands.
7604
7605 Note that this is done as an internal manipulation within
7606 the constant folder, in order to find the simplest
7607 representation of the arguments so that their form can be
7608 studied. In any cases, the appropriate type conversions
7609 should be put back in the tree that will get out of the
7610 constant folder. */
7611 STRIP_NOPS (arg0);
7612 }
7613 }
7614
7615 if (TREE_CODE_CLASS (code) == tcc_unary)
7616 {
7617 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7618 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
db3927fb
AH
7619 fold_build1_loc (loc, code, type,
7620 fold_convert_loc (loc, TREE_TYPE (op0),
7621 TREE_OPERAND (arg0, 1))));
659d8efa
KH
7622 else if (TREE_CODE (arg0) == COND_EXPR)
7623 {
7624 tree arg01 = TREE_OPERAND (arg0, 1);
7625 tree arg02 = TREE_OPERAND (arg0, 2);
7626 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
db3927fb
AH
7627 arg01 = fold_build1_loc (loc, code, type,
7628 fold_convert_loc (loc,
7629 TREE_TYPE (op0), arg01));
659d8efa 7630 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
db3927fb
AH
7631 arg02 = fold_build1_loc (loc, code, type,
7632 fold_convert_loc (loc,
7633 TREE_TYPE (op0), arg02));
7634 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7f20a5b7 7635 arg01, arg02);
659d8efa
KH
7636
7637 /* If this was a conversion, and all we did was to move into
7638 inside the COND_EXPR, bring it back out. But leave it if
7639 it is a conversion from integer to integer and the
7640 result precision is no wider than a word since such a
7641 conversion is cheap and may be optimized away by combine,
7642 while it couldn't if it were outside the COND_EXPR. Then return
7643 so we don't get into an infinite recursion loop taking the
7644 conversion out and then back in. */
7645
1a87cf0c 7646 if ((CONVERT_EXPR_CODE_P (code)
659d8efa
KH
7647 || code == NON_LVALUE_EXPR)
7648 && TREE_CODE (tem) == COND_EXPR
7649 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7650 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7651 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7652 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7653 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7654 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7655 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7656 && (INTEGRAL_TYPE_P
7657 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7658 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7659 || flag_syntax_only))
c9019218
JJ
7660 tem = build1_loc (loc, code, type,
7661 build3 (COND_EXPR,
7662 TREE_TYPE (TREE_OPERAND
7663 (TREE_OPERAND (tem, 1), 0)),
7664 TREE_OPERAND (tem, 0),
7665 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7666 TREE_OPERAND (TREE_OPERAND (tem, 2),
7667 0)));
659d8efa
KH
7668 return tem;
7669 }
7670 else if (COMPARISON_CLASS_P (arg0))
7671 {
7672 if (TREE_CODE (type) == BOOLEAN_TYPE)
7673 {
7674 arg0 = copy_node (arg0);
7675 TREE_TYPE (arg0) = type;
7676 return arg0;
7677 }
7678 else if (TREE_CODE (type) != INTEGER_TYPE)
db3927fb
AH
7679 return fold_build3_loc (loc, COND_EXPR, type, arg0,
7680 fold_build1_loc (loc, code, type,
7f20a5b7 7681 integer_one_node),
db3927fb 7682 fold_build1_loc (loc, code, type,
7f20a5b7 7683 integer_zero_node));
659d8efa
KH
7684 }
7685 }
7686
7687 switch (code)
7688 {
dedd42d5
RG
7689 case PAREN_EXPR:
7690 /* Re-association barriers around constants and other re-association
7691 barriers can be removed. */
7692 if (CONSTANT_CLASS_P (op0)
7693 || TREE_CODE (op0) == PAREN_EXPR)
db3927fb 7694 return fold_convert_loc (loc, type, op0);
dedd42d5
RG
7695 return NULL_TREE;
7696
1043771b 7697 CASE_CONVERT:
659d8efa 7698 case FLOAT_EXPR:
659d8efa 7699 case FIX_TRUNC_EXPR:
4b58fc4d
KH
7700 if (TREE_TYPE (op0) == type)
7701 return op0;
b8698a0f 7702
6416ae7f 7703 /* If we have (type) (a CMP b) and type is an integral type, return
d998dd65
AP
7704 new expression involving the new type. */
7705 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
db3927fb 7706 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
d998dd65 7707 TREE_OPERAND (op0, 1));
659d8efa
KH
7708
7709 /* Handle cases of two conversions in a row. */
1043771b 7710 if (CONVERT_EXPR_P (op0))
659d8efa 7711 {
4b58fc4d
KH
7712 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7713 tree inter_type = TREE_TYPE (op0);
659d8efa
KH
7714 int inside_int = INTEGRAL_TYPE_P (inside_type);
7715 int inside_ptr = POINTER_TYPE_P (inside_type);
7716 int inside_float = FLOAT_TYPE_P (inside_type);
4b8d544b 7717 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
659d8efa
KH
7718 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7719 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7720 int inter_int = INTEGRAL_TYPE_P (inter_type);
7721 int inter_ptr = POINTER_TYPE_P (inter_type);
7722 int inter_float = FLOAT_TYPE_P (inter_type);
4b8d544b 7723 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
659d8efa
KH
7724 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7725 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7726 int final_int = INTEGRAL_TYPE_P (type);
7727 int final_ptr = POINTER_TYPE_P (type);
7728 int final_float = FLOAT_TYPE_P (type);
4b8d544b 7729 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
659d8efa
KH
7730 unsigned int final_prec = TYPE_PRECISION (type);
7731 int final_unsignedp = TYPE_UNSIGNED (type);
7732
7733 /* In addition to the cases of two conversions in a row
7734 handled below, if we are converting something to its own
7735 type via an object of identical or wider precision, neither
7736 conversion is needed. */
7737 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
497cfe24
RG
7738 && (((inter_int || inter_ptr) && final_int)
7739 || (inter_float && final_float))
659d8efa 7740 && inter_prec >= final_prec)
db3927fb 7741 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
659d8efa 7742
1803581d
EB
7743 /* Likewise, if the intermediate and initial types are either both
7744 float or both integer, we don't need the middle conversion if the
7745 former is wider than the latter and doesn't change the signedness
7746 (for integers). Avoid this if the final type is a pointer since
7747 then we sometimes need the middle conversion. Likewise if the
7748 final type has a precision not equal to the size of its mode. */
6aa12f4f 7749 if (((inter_int && inside_int)
4b8d544b
JJ
7750 || (inter_float && inside_float)
7751 || (inter_vec && inside_vec))
659d8efa 7752 && inter_prec >= inside_prec
4b8d544b
JJ
7753 && (inter_float || inter_vec
7754 || inter_unsignedp == inside_unsignedp)
659d8efa
KH
7755 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7756 && TYPE_MODE (type) == TYPE_MODE (inter_type))
4b8d544b
JJ
7757 && ! final_ptr
7758 && (! final_vec || inter_prec == inside_prec))
db3927fb 7759 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
659d8efa
KH
7760
7761 /* If we have a sign-extension of a zero-extended value, we can
7762 replace that by a single zero-extension. */
7763 if (inside_int && inter_int && final_int
7764 && inside_prec < inter_prec && inter_prec < final_prec
7765 && inside_unsignedp && !inter_unsignedp)
db3927fb 7766 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
659d8efa
KH
7767
7768 /* Two conversions in a row are not needed unless:
7769 - some conversion is floating-point (overstrict for now), or
4b8d544b 7770 - some conversion is a vector (overstrict for now), or
659d8efa
KH
7771 - the intermediate type is narrower than both initial and
7772 final, or
7773 - the intermediate type and innermost type differ in signedness,
7774 and the outermost type is wider than the intermediate, or
7775 - the initial type is a pointer type and the precisions of the
7776 intermediate and final types differ, or
7777 - the final type is a pointer type and the precisions of the
c4e5b5a8 7778 initial and intermediate types differ. */
659d8efa 7779 if (! inside_float && ! inter_float && ! final_float
4b8d544b 7780 && ! inside_vec && ! inter_vec && ! final_vec
497cfe24 7781 && (inter_prec >= inside_prec || inter_prec >= final_prec)
659d8efa
KH
7782 && ! (inside_int && inter_int
7783 && inter_unsignedp != inside_unsignedp
7784 && inter_prec < final_prec)
7785 && ((inter_unsignedp && inter_prec > inside_prec)
7786 == (final_unsignedp && final_prec > inter_prec))
7787 && ! (inside_ptr && inter_prec != final_prec)
7788 && ! (final_ptr && inside_prec != inter_prec)
7789 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
c4e5b5a8 7790 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
db3927fb 7791 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
659d8efa
KH
7792 }
7793
46c0a59d 7794 /* Handle (T *)&A.B.C for A being of type T and B and C
a4174ebf 7795 living at offset zero. This occurs frequently in
46c0a59d
RG
7796 C++ upcasting and then accessing the base. */
7797 if (TREE_CODE (op0) == ADDR_EXPR
7798 && POINTER_TYPE_P (type)
7799 && handled_component_p (TREE_OPERAND (op0, 0)))
7800 {
7801 HOST_WIDE_INT bitsize, bitpos;
7802 tree offset;
7803 enum machine_mode mode;
7804 int unsignedp, volatilep;
7805 tree base = TREE_OPERAND (op0, 0);
7806 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7807 &mode, &unsignedp, &volatilep, false);
7808 /* If the reference was to a (constant) zero offset, we can use
7809 the address of the base if it has the same base type
2ea9dc64 7810 as the result type and the pointer type is unqualified. */
46c0a59d 7811 if (! offset && bitpos == 0
2ea9dc64 7812 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
46c0a59d 7813 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
2ea9dc64 7814 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
db3927fb
AH
7815 return fold_convert_loc (loc, type,
7816 build_fold_addr_expr_loc (loc, base));
46c0a59d
RG
7817 }
7818
726a989a
RB
7819 if (TREE_CODE (op0) == MODIFY_EXPR
7820 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
659d8efa 7821 /* Detect assigning a bitfield. */
726a989a 7822 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
07beea0d 7823 && DECL_BIT_FIELD
726a989a 7824 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
659d8efa
KH
7825 {
7826 /* Don't leave an assignment inside a conversion
7827 unless assigning a bitfield. */
db3927fb 7828 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
659d8efa 7829 /* First do the assignment, then return converted constant. */
c9019218 7830 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
659d8efa
KH
7831 TREE_NO_WARNING (tem) = 1;
7832 TREE_USED (tem) = 1;
7833 return tem;
7834 }
7835
7836 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7837 constants (if x has signed type, the sign bit cannot be set
bfab40f8
EB
7838 in c). This folds extension into the BIT_AND_EXPR.
7839 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7840 very likely don't have maximal range for their precision and this
7841 transformation effectively doesn't preserve non-maximal ranges. */
1e17e15a 7842 if (TREE_CODE (type) == INTEGER_TYPE
4b58fc4d 7843 && TREE_CODE (op0) == BIT_AND_EXPR
84fb43a1 7844 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
659d8efa 7845 {
3d8b2a98
ILT
7846 tree and_expr = op0;
7847 tree and0 = TREE_OPERAND (and_expr, 0);
7848 tree and1 = TREE_OPERAND (and_expr, 1);
659d8efa
KH
7849 int change = 0;
7850
3d8b2a98 7851 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
659d8efa 7852 || (TYPE_PRECISION (type)
3d8b2a98 7853 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
659d8efa
KH
7854 change = 1;
7855 else if (TYPE_PRECISION (TREE_TYPE (and1))
7856 <= HOST_BITS_PER_WIDE_INT
7857 && host_integerp (and1, 1))
7858 {
7859 unsigned HOST_WIDE_INT cst;
7860
7861 cst = tree_low_cst (and1, 1);
7862 cst &= (HOST_WIDE_INT) -1
7863 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7864 change = (cst == 0);
7865#ifdef LOAD_EXTEND_OP
7866 if (change
7867 && !flag_syntax_only
7868 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7869 == ZERO_EXTEND))
7870 {
ca5ba2a3 7871 tree uns = unsigned_type_for (TREE_TYPE (and0));
db3927fb
AH
7872 and0 = fold_convert_loc (loc, uns, and0);
7873 and1 = fold_convert_loc (loc, uns, and1);
659d8efa
KH
7874 }
7875#endif
7876 }
7877 if (change)
7878 {
9589f23e
AS
7879 tem = force_fit_type_double (type, tree_to_double_int (and1),
7880 0, TREE_OVERFLOW (and1));
db3927fb
AH
7881 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7882 fold_convert_loc (loc, type, and0), tem);
659d8efa
KH
7883 }
7884 }
7885
5be014d5 7886 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
ac5a28a6 7887 when one of the new casts will fold away. Conservatively we assume
5be014d5
AP
7888 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7889 if (POINTER_TYPE_P (type)
7890 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
ac5a28a6
JH
7891 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7892 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7893 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
659d8efa
KH
7894 {
7895 tree arg00 = TREE_OPERAND (arg0, 0);
ac5a28a6
JH
7896 tree arg01 = TREE_OPERAND (arg0, 1);
7897
db3927fb
AH
7898 return fold_build2_loc (loc,
7899 TREE_CODE (arg0), type,
7900 fold_convert_loc (loc, type, arg00),
7901 fold_convert_loc (loc, sizetype, arg01));
659d8efa
KH
7902 }
7903
e8206491 7904 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
110abdbc 7905 of the same precision, and X is an integer type not narrower than
e8206491
RS
7906 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7907 if (INTEGRAL_TYPE_P (type)
7908 && TREE_CODE (op0) == BIT_NOT_EXPR
7909 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
1043771b 7910 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
e8206491
RS
7911 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7912 {
7913 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7914 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7915 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
db3927fb
AH
7916 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7917 fold_convert_loc (loc, type, tem));
e8206491
RS
7918 }
7919
c83bd37c
PB
7920 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7921 type of X and Y (integer types only). */
7922 if (INTEGRAL_TYPE_P (type)
7923 && TREE_CODE (op0) == MULT_EXPR
7924 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7925 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7926 {
7927 /* Be careful not to introduce new overflows. */
7928 tree mult_type;
7929 if (TYPE_OVERFLOW_WRAPS (type))
7930 mult_type = type;
7931 else
7932 mult_type = unsigned_type_for (type);
b7785654
JJ
7933
7934 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7935 {
db3927fb
AH
7936 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7937 fold_convert_loc (loc, mult_type,
7938 TREE_OPERAND (op0, 0)),
7939 fold_convert_loc (loc, mult_type,
7940 TREE_OPERAND (op0, 1)));
7941 return fold_convert_loc (loc, type, tem);
b7785654 7942 }
c83bd37c
PB
7943 }
7944
84ece8ef 7945 tem = fold_convert_const (code, type, op0);
62ab45cc 7946 return tem ? tem : NULL_TREE;
659d8efa 7947
09e881c9
BE
7948 case ADDR_SPACE_CONVERT_EXPR:
7949 if (integer_zerop (arg0))
7950 return fold_convert_const (code, type, arg0);
7951 return NULL_TREE;
7952
325217ed
CF
7953 case FIXED_CONVERT_EXPR:
7954 tem = fold_convert_const (code, type, arg0);
7955 return tem ? tem : NULL_TREE;
7956
659d8efa 7957 case VIEW_CONVERT_EXPR:
f85242f0
RS
7958 if (TREE_TYPE (op0) == type)
7959 return op0;
9a327766 7960 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
db3927fb
AH
7961 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7962 type, TREE_OPERAND (op0, 0));
70f34814
RG
7963 if (TREE_CODE (op0) == MEM_REF)
7964 return fold_build2_loc (loc, MEM_REF, type,
7965 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9a327766
RG
7966
7967 /* For integral conversions with the same precision or pointer
7968 conversions use a NOP_EXPR instead. */
3d45dd59
RG
7969 if ((INTEGRAL_TYPE_P (type)
7970 || POINTER_TYPE_P (type))
7971 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7972 || POINTER_TYPE_P (TREE_TYPE (op0)))
84fb43a1 7973 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
db3927fb 7974 return fold_convert_loc (loc, type, op0);
9a327766
RG
7975
7976 /* Strip inner integral conversions that do not change the precision. */
1043771b 7977 if (CONVERT_EXPR_P (op0)
3d45dd59
RG
7978 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7979 || POINTER_TYPE_P (TREE_TYPE (op0)))
7980 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7981 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
9a327766
RG
7982 && (TYPE_PRECISION (TREE_TYPE (op0))
7983 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
db3927fb
AH
7984 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7985 type, TREE_OPERAND (op0, 0));
9a327766 7986
78bf6e2f 7987 return fold_view_convert_expr (type, op0);
659d8efa
KH
7988
7989 case NEGATE_EXPR:
db3927fb 7990 tem = fold_negate_expr (loc, arg0);
1af8dcbf 7991 if (tem)
db3927fb 7992 return fold_convert_loc (loc, type, tem);
62ab45cc 7993 return NULL_TREE;
659d8efa
KH
7994
7995 case ABS_EXPR:
7996 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7997 return fold_abs_const (arg0, type);
7998 else if (TREE_CODE (arg0) == NEGATE_EXPR)
db3927fb 7999 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
659d8efa
KH
8000 /* Convert fabs((double)float) into (double)fabsf(float). */
8001 else if (TREE_CODE (arg0) == NOP_EXPR
8002 && TREE_CODE (type) == REAL_TYPE)
8003 {
8004 tree targ0 = strip_float_extensions (arg0);
8005 if (targ0 != arg0)
db3927fb
AH
8006 return fold_convert_loc (loc, type,
8007 fold_build1_loc (loc, ABS_EXPR,
8008 TREE_TYPE (targ0),
8009 targ0));
659d8efa 8010 }
1ade5842 8011 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
6ac01510
ILT
8012 else if (TREE_CODE (arg0) == ABS_EXPR)
8013 return arg0;
8014 else if (tree_expr_nonnegative_p (arg0))
659d8efa
KH
8015 return arg0;
8016
8017 /* Strip sign ops from argument. */
8018 if (TREE_CODE (type) == REAL_TYPE)
8019 {
8020 tem = fold_strip_sign_ops (arg0);
8021 if (tem)
db3927fb
AH
8022 return fold_build1_loc (loc, ABS_EXPR, type,
8023 fold_convert_loc (loc, type, tem));
659d8efa 8024 }
62ab45cc 8025 return NULL_TREE;
659d8efa
KH
8026
8027 case CONJ_EXPR:
8028 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
db3927fb 8029 return fold_convert_loc (loc, type, arg0);
9734ebaf
RS
8030 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8031 {
8032 tree itype = TREE_TYPE (type);
db3927fb
AH
8033 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8034 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8035 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8036 negate_expr (ipart));
9734ebaf
RS
8037 }
8038 if (TREE_CODE (arg0) == COMPLEX_CST)
8039 {
8040 tree itype = TREE_TYPE (type);
db3927fb
AH
8041 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8042 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
9734ebaf
RS
8043 return build_complex (type, rpart, negate_expr (ipart));
8044 }
8045 if (TREE_CODE (arg0) == CONJ_EXPR)
db3927fb 8046 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
62ab45cc 8047 return NULL_TREE;
659d8efa
KH
8048
8049 case BIT_NOT_EXPR:
8050 if (TREE_CODE (arg0) == INTEGER_CST)
8051 return fold_not_const (arg0, type);
8052 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
db3927fb 8053 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
659d8efa
KH
8054 /* Convert ~ (-A) to A - 1. */
8055 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
db3927fb
AH
8056 return fold_build2_loc (loc, MINUS_EXPR, type,
8057 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
7f20a5b7 8058 build_int_cst (type, 1));
659d8efa
KH
8059 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8060 else if (INTEGRAL_TYPE_P (type)
8061 && ((TREE_CODE (arg0) == MINUS_EXPR
8062 && integer_onep (TREE_OPERAND (arg0, 1)))
8063 || (TREE_CODE (arg0) == PLUS_EXPR
8064 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
db3927fb
AH
8065 return fold_build1_loc (loc, NEGATE_EXPR, type,
8066 fold_convert_loc (loc, type,
8067 TREE_OPERAND (arg0, 0)));
f242e769
JM
8068 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8069 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
db3927fb
AH
8070 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8071 fold_convert_loc (loc, type,
8072 TREE_OPERAND (arg0, 0)))))
8073 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8074 fold_convert_loc (loc, type,
8075 TREE_OPERAND (arg0, 1)));
f242e769 8076 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
db3927fb
AH
8077 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8078 fold_convert_loc (loc, type,
8079 TREE_OPERAND (arg0, 1)))))
8080 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8081 fold_convert_loc (loc, type,
8082 TREE_OPERAND (arg0, 0)), tem);
c01ee935
JJ
8083 /* Perform BIT_NOT_EXPR on each element individually. */
8084 else if (TREE_CODE (arg0) == VECTOR_CST)
8085 {
8086 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8087 int count = TYPE_VECTOR_SUBPARTS (type), i;
8088
8089 for (i = 0; i < count; i++)
8090 {
8091 if (elements)
8092 {
8093 elem = TREE_VALUE (elements);
db3927fb 8094 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
c01ee935
JJ
8095 if (elem == NULL_TREE)
8096 break;
8097 elements = TREE_CHAIN (elements);
8098 }
8099 else
8100 elem = build_int_cst (TREE_TYPE (type), -1);
8101 list = tree_cons (NULL_TREE, elem, list);
8102 }
8103 if (i == count)
8104 return build_vector (type, nreverse (list));
8105 }
f242e769 8106
62ab45cc 8107 return NULL_TREE;
659d8efa
KH
8108
8109 case TRUTH_NOT_EXPR:
8110 /* The argument to invert_truthvalue must have Boolean type. */
8111 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
db3927fb 8112 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
659d8efa
KH
8113
8114 /* Note that the operand of this must be an int
8115 and its values must be 0 or 1.
8116 ("true" is a fixed value perhaps depending on the language,
8117 but we don't handle values other than 1 correctly yet.) */
db3927fb 8118 tem = fold_truth_not_expr (loc, arg0);
d817ed3b 8119 if (!tem)
62ab45cc 8120 return NULL_TREE;
db3927fb 8121 return fold_convert_loc (loc, type, tem);
659d8efa
KH
8122
8123 case REALPART_EXPR:
8124 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
db3927fb 8125 return fold_convert_loc (loc, type, arg0);
9734ebaf 8126 if (TREE_CODE (arg0) == COMPLEX_EXPR)
db3927fb 8127 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
659d8efa 8128 TREE_OPERAND (arg0, 1));
9734ebaf 8129 if (TREE_CODE (arg0) == COMPLEX_CST)
db3927fb 8130 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
9734ebaf
RS
8131 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8132 {
8133 tree itype = TREE_TYPE (TREE_TYPE (arg0));
db3927fb
AH
8134 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8135 fold_build1_loc (loc, REALPART_EXPR, itype,
9734ebaf 8136 TREE_OPERAND (arg0, 0)),
db3927fb 8137 fold_build1_loc (loc, REALPART_EXPR, itype,
9734ebaf 8138 TREE_OPERAND (arg0, 1)));
db3927fb 8139 return fold_convert_loc (loc, type, tem);
9734ebaf
RS
8140 }
8141 if (TREE_CODE (arg0) == CONJ_EXPR)
8142 {
8143 tree itype = TREE_TYPE (TREE_TYPE (arg0));
db3927fb
AH
8144 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8145 TREE_OPERAND (arg0, 0));
8146 return fold_convert_loc (loc, type, tem);
9734ebaf 8147 }
85aef79f
RG
8148 if (TREE_CODE (arg0) == CALL_EXPR)
8149 {
8150 tree fn = get_callee_fndecl (arg0);
111f1fca 8151 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
85aef79f
RG
8152 switch (DECL_FUNCTION_CODE (fn))
8153 {
8154 CASE_FLT_FN (BUILT_IN_CEXPI):
8155 fn = mathfn_built_in (type, BUILT_IN_COS);
2d38026b 8156 if (fn)
db3927fb 8157 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
2d38026b 8158 break;
85aef79f 8159
2d38026b
RS
8160 default:
8161 break;
85aef79f
RG
8162 }
8163 }
62ab45cc 8164 return NULL_TREE;
659d8efa
KH
8165
8166 case IMAGPART_EXPR:
8167 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
e8160c9a 8168 return build_zero_cst (type);
9734ebaf 8169 if (TREE_CODE (arg0) == COMPLEX_EXPR)
db3927fb 8170 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
659d8efa 8171 TREE_OPERAND (arg0, 0));
9734ebaf 8172 if (TREE_CODE (arg0) == COMPLEX_CST)
db3927fb 8173 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
9734ebaf
RS
8174 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8175 {
8176 tree itype = TREE_TYPE (TREE_TYPE (arg0));
db3927fb
AH
8177 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8178 fold_build1_loc (loc, IMAGPART_EXPR, itype,
9734ebaf 8179 TREE_OPERAND (arg0, 0)),
db3927fb 8180 fold_build1_loc (loc, IMAGPART_EXPR, itype,
9734ebaf 8181 TREE_OPERAND (arg0, 1)));
db3927fb 8182 return fold_convert_loc (loc, type, tem);
9734ebaf
RS
8183 }
8184 if (TREE_CODE (arg0) == CONJ_EXPR)
8185 {
8186 tree itype = TREE_TYPE (TREE_TYPE (arg0));
db3927fb
AH
8187 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8188 return fold_convert_loc (loc, type, negate_expr (tem));
9734ebaf 8189 }
85aef79f
RG
8190 if (TREE_CODE (arg0) == CALL_EXPR)
8191 {
8192 tree fn = get_callee_fndecl (arg0);
111f1fca 8193 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
85aef79f
RG
8194 switch (DECL_FUNCTION_CODE (fn))
8195 {
8196 CASE_FLT_FN (BUILT_IN_CEXPI):
8197 fn = mathfn_built_in (type, BUILT_IN_SIN);
2d38026b 8198 if (fn)
db3927fb 8199 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
2d38026b 8200 break;
85aef79f 8201
2d38026b
RS
8202 default:
8203 break;
85aef79f
RG
8204 }
8205 }
62ab45cc 8206 return NULL_TREE;
659d8efa 8207
48f30f62
RG
8208 case INDIRECT_REF:
8209 /* Fold *&X to X if X is an lvalue. */
8210 if (TREE_CODE (op0) == ADDR_EXPR)
8211 {
8212 tree op00 = TREE_OPERAND (op0, 0);
8213 if ((TREE_CODE (op00) == VAR_DECL
8214 || TREE_CODE (op00) == PARM_DECL
8215 || TREE_CODE (op00) == RESULT_DECL)
8216 && !TREE_READONLY (op00))
8217 return op00;
8218 }
8219 return NULL_TREE;
8220
659d8efa 8221 default:
62ab45cc 8222 return NULL_TREE;
659d8efa
KH
8223 } /* switch (code) */
8224}
8225
9bacafeb
PB
8226
8227/* If the operation was a conversion do _not_ mark a resulting constant
8228 with TREE_OVERFLOW if the original constant was not. These conversions
8229 have implementation defined behavior and retaining the TREE_OVERFLOW
8230 flag here would confuse later passes such as VRP. */
8231tree
db3927fb
AH
8232fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8233 tree type, tree op0)
9bacafeb 8234{
db3927fb 8235 tree res = fold_unary_loc (loc, code, type, op0);
9bacafeb
PB
8236 if (res
8237 && TREE_CODE (res) == INTEGER_CST
8238 && TREE_CODE (op0) == INTEGER_CST
8239 && CONVERT_EXPR_CODE_P (code))
8240 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8241
8242 return res;
8243}
8244
292f30c5
EB
8245/* Fold a binary expression of code CODE and type TYPE with operands
8246 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8247 Return the folded expression if folding is successful. Otherwise,
8248 return NULL_TREE. */
8249
8250static tree
db3927fb 8251fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
292f30c5
EB
8252{
8253 enum tree_code compl_code;
8254
8255 if (code == MIN_EXPR)
8256 compl_code = MAX_EXPR;
8257 else if (code == MAX_EXPR)
8258 compl_code = MIN_EXPR;
8259 else
5f180d36 8260 gcc_unreachable ();
292f30c5 8261
f0dbdfbb 8262 /* MIN (MAX (a, b), b) == b. */
292f30c5
EB
8263 if (TREE_CODE (op0) == compl_code
8264 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
db3927fb 8265 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
292f30c5 8266
f0dbdfbb 8267 /* MIN (MAX (b, a), b) == b. */
292f30c5
EB
8268 if (TREE_CODE (op0) == compl_code
8269 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8270 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
db3927fb 8271 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
292f30c5 8272
f0dbdfbb 8273 /* MIN (a, MAX (a, b)) == a. */
292f30c5
EB
8274 if (TREE_CODE (op1) == compl_code
8275 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8276 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
db3927fb 8277 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
292f30c5 8278
f0dbdfbb 8279 /* MIN (a, MAX (b, a)) == a. */
292f30c5
EB
8280 if (TREE_CODE (op1) == compl_code
8281 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8282 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
db3927fb 8283 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
292f30c5
EB
8284
8285 return NULL_TREE;
8286}
8287
e73dbcae
RG
8288/* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8289 by changing CODE to reduce the magnitude of constants involved in
8290 ARG0 of the comparison.
8291 Returns a canonicalized comparison tree if a simplification was
6ac01510
ILT
8292 possible, otherwise returns NULL_TREE.
8293 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8294 valid if signed overflow is undefined. */
e73dbcae
RG
8295
8296static tree
db3927fb 8297maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
6ac01510
ILT
8298 tree arg0, tree arg1,
8299 bool *strict_overflow_p)
e73dbcae
RG
8300{
8301 enum tree_code code0 = TREE_CODE (arg0);
8302 tree t, cst0 = NULL_TREE;
8303 int sgn0;
8304 bool swap = false;
8305
0b45fd7a
RG
8306 /* Match A +- CST code arg1 and CST code arg1. We can change the
8307 first form only if overflow is undefined. */
8308 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8309 /* In principle pointers also have undefined overflow behavior,
8310 but that causes problems elsewhere. */
8311 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8312 && (code0 == MINUS_EXPR
8313 || code0 == PLUS_EXPR)
e73dbcae
RG
8314 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8315 || code0 == INTEGER_CST))
8316 return NULL_TREE;
8317
8318 /* Identify the constant in arg0 and its sign. */
8319 if (code0 == INTEGER_CST)
8320 cst0 = arg0;
8321 else
8322 cst0 = TREE_OPERAND (arg0, 1);
8323 sgn0 = tree_int_cst_sgn (cst0);
8324
8325 /* Overflowed constants and zero will cause problems. */
8326 if (integer_zerop (cst0)
8327 || TREE_OVERFLOW (cst0))
8328 return NULL_TREE;
8329
2f8e468b 8330 /* See if we can reduce the magnitude of the constant in
e73dbcae
RG
8331 arg0 by changing the comparison code. */
8332 if (code0 == INTEGER_CST)
8333 {
8334 /* CST <= arg1 -> CST-1 < arg1. */
8335 if (code == LE_EXPR && sgn0 == 1)
8336 code = LT_EXPR;
8337 /* -CST < arg1 -> -CST-1 <= arg1. */
8338 else if (code == LT_EXPR && sgn0 == -1)
8339 code = LE_EXPR;
8340 /* CST > arg1 -> CST-1 >= arg1. */
8341 else if (code == GT_EXPR && sgn0 == 1)
8342 code = GE_EXPR;
8343 /* -CST >= arg1 -> -CST-1 > arg1. */
8344 else if (code == GE_EXPR && sgn0 == -1)
8345 code = GT_EXPR;
8346 else
8347 return NULL_TREE;
8348 /* arg1 code' CST' might be more canonical. */
8349 swap = true;
8350 }
8351 else
8352 {
8353 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8354 if (code == LT_EXPR
8355 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8356 code = LE_EXPR;
8357 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8358 else if (code == GT_EXPR
8359 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8360 code = GE_EXPR;
8361 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8362 else if (code == LE_EXPR
8363 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8364 code = LT_EXPR;
8365 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8366 else if (code == GE_EXPR
8367 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8368 code = GT_EXPR;
8369 else
8370 return NULL_TREE;
6ac01510 8371 *strict_overflow_p = true;
e73dbcae
RG
8372 }
8373
0b45fd7a
RG
8374 /* Now build the constant reduced in magnitude. But not if that
8375 would produce one outside of its types range. */
8376 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8377 && ((sgn0 == 1
8378 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8379 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8380 || (sgn0 == -1
8381 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8382 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8383 /* We cannot swap the comparison here as that would cause us to
8384 endlessly recurse. */
8385 return NULL_TREE;
8386
e73dbcae 8387 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
0b45fd7a 8388 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
e73dbcae 8389 if (code0 != INTEGER_CST)
db3927fb 8390 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
e73dbcae
RG
8391
8392 /* If swapping might yield to a more canonical form, do so. */
8393 if (swap)
db3927fb 8394 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
e73dbcae 8395 else
db3927fb 8396 return fold_build2_loc (loc, code, type, t, arg1);
e73dbcae
RG
8397}
8398
8399/* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8400 overflow further. Try to decrease the magnitude of constants involved
8401 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8402 and put sole constants at the second argument position.
8403 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8404
8405static tree
db3927fb 8406maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
e73dbcae
RG
8407 tree arg0, tree arg1)
8408{
8409 tree t;
6ac01510
ILT
8410 bool strict_overflow_p;
8411 const char * const warnmsg = G_("assuming signed overflow does not occur "
8412 "when reducing constant in comparison");
e73dbcae 8413
e73dbcae 8414 /* Try canonicalization by simplifying arg0. */
6ac01510 8415 strict_overflow_p = false;
db3927fb 8416 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
6ac01510 8417 &strict_overflow_p);
e73dbcae 8418 if (t)
6ac01510
ILT
8419 {
8420 if (strict_overflow_p)
8421 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8422 return t;
8423 }
e73dbcae
RG
8424
8425 /* Try canonicalization by simplifying arg1 using the swapped
2f8e468b 8426 comparison. */
e73dbcae 8427 code = swap_tree_comparison (code);
6ac01510 8428 strict_overflow_p = false;
db3927fb 8429 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
6ac01510
ILT
8430 &strict_overflow_p);
8431 if (t && strict_overflow_p)
8432 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8433 return t;
e73dbcae
RG
8434}
8435
6e3c5c30
ILT
8436/* Return whether BASE + OFFSET + BITPOS may wrap around the address
8437 space. This is used to avoid issuing overflow warnings for
8438 expressions like &p->x which can not wrap. */
8439
8440static bool
8441pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8442{
6e3c5c30 8443 unsigned HOST_WIDE_INT offset_low, total_low;
b2f06c39 8444 HOST_WIDE_INT size, offset_high, total_high;
6e3c5c30
ILT
8445
8446 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8447 return true;
8448
8449 if (bitpos < 0)
8450 return true;
8451
6e3c5c30
ILT
8452 if (offset == NULL_TREE)
8453 {
8454 offset_low = 0;
8455 offset_high = 0;
8456 }
8457 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8458 return true;
8459 else
8460 {
8461 offset_low = TREE_INT_CST_LOW (offset);
8462 offset_high = TREE_INT_CST_HIGH (offset);
8463 }
8464
8465 if (add_double_with_sign (offset_low, offset_high,
8466 bitpos / BITS_PER_UNIT, 0,
8467 &total_low, &total_high,
8468 true))
8469 return true;
8470
b2f06c39 8471 if (total_high != 0)
6e3c5c30 8472 return true;
b2f06c39
ILT
8473
8474 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8475 if (size <= 0)
8476 return true;
8477
8478 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8479 array. */
8480 if (TREE_CODE (base) == ADDR_EXPR)
8481 {
8482 HOST_WIDE_INT base_size;
8483
8484 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8485 if (base_size > 0 && size < base_size)
8486 size = base_size;
8487 }
8488
8489 return total_low > (unsigned HOST_WIDE_INT) size;
6e3c5c30
ILT
8490}
8491
e26ec0bb
RS
8492/* Subroutine of fold_binary. This routine performs all of the
8493 transformations that are common to the equality/inequality
8494 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8495 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8496 fold_binary should call fold_binary. Fold a comparison with
8497 tree code CODE and type TYPE with operands OP0 and OP1. Return
8498 the folded comparison or NULL_TREE. */
8499
8500static tree
db3927fb
AH
8501fold_comparison (location_t loc, enum tree_code code, tree type,
8502 tree op0, tree op1)
e26ec0bb
RS
8503{
8504 tree arg0, arg1, tem;
8505
8506 arg0 = op0;
8507 arg1 = op1;
8508
8509 STRIP_SIGN_NOPS (arg0);
8510 STRIP_SIGN_NOPS (arg1);
8511
8512 tem = fold_relational_const (code, type, arg0, arg1);
8513 if (tem != NULL_TREE)
8514 return tem;
8515
8516 /* If one arg is a real or integer constant, put it last. */
8517 if (tree_swap_operands_p (arg0, arg1, true))
db3927fb 8518 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
e26ec0bb 8519
e26ec0bb
RS
8520 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8521 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8522 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8523 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
eeef0e45 8524 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
e26ec0bb
RS
8525 && (TREE_CODE (arg1) == INTEGER_CST
8526 && !TREE_OVERFLOW (arg1)))
8527 {
8528 tree const1 = TREE_OPERAND (arg0, 1);
8529 tree const2 = arg1;
8530 tree variable = TREE_OPERAND (arg0, 0);
8531 tree lhs;
8532 int lhs_add;
8533 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8534
db3927fb 8535 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
e26ec0bb 8536 TREE_TYPE (arg1), const2, const1);
b44e7f07
ZD
8537
8538 /* If the constant operation overflowed this can be
8539 simplified as a comparison against INT_MAX/INT_MIN. */
8540 if (TREE_CODE (lhs) == INTEGER_CST
8541 && TREE_OVERFLOW (lhs))
8542 {
8543 int const1_sgn = tree_int_cst_sgn (const1);
8544 enum tree_code code2 = code;
8545
8546 /* Get the sign of the constant on the lhs if the
8547 operation were VARIABLE + CONST1. */
8548 if (TREE_CODE (arg0) == MINUS_EXPR)
8549 const1_sgn = -const1_sgn;
8550
8551 /* The sign of the constant determines if we overflowed
8552 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8553 Canonicalize to the INT_MIN overflow by swapping the comparison
8554 if necessary. */
8555 if (const1_sgn == -1)
8556 code2 = swap_tree_comparison (code);
8557
8558 /* We now can look at the canonicalized case
8559 VARIABLE + 1 CODE2 INT_MIN
8560 and decide on the result. */
8561 if (code2 == LT_EXPR
8562 || code2 == LE_EXPR
8563 || code2 == EQ_EXPR)
db3927fb 8564 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
b44e7f07
ZD
8565 else if (code2 == NE_EXPR
8566 || code2 == GE_EXPR
8567 || code2 == GT_EXPR)
db3927fb 8568 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
b44e7f07
ZD
8569 }
8570
e26ec0bb
RS
8571 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8572 && (TREE_CODE (lhs) != INTEGER_CST
8573 || !TREE_OVERFLOW (lhs)))
6ac01510 8574 {
49c8958b 8575 fold_overflow_warning ("assuming signed overflow does not occur "
6ac01510 8576 "when changing X +- C1 cmp C2 to "
49c8958b 8577 "X cmp C1 +- C2",
6ac01510 8578 WARN_STRICT_OVERFLOW_COMPARISON);
db3927fb 8579 return fold_build2_loc (loc, code, type, variable, lhs);
6ac01510 8580 }
e26ec0bb
RS
8581 }
8582
e015f578
RG
8583 /* For comparisons of pointers we can decompose it to a compile time
8584 comparison of the base objects and the offsets into the object.
3e0de255
RG
8585 This requires at least one operand being an ADDR_EXPR or a
8586 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
e015f578
RG
8587 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8588 && (TREE_CODE (arg0) == ADDR_EXPR
3e0de255
RG
8589 || TREE_CODE (arg1) == ADDR_EXPR
8590 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8591 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
e015f578
RG
8592 {
8593 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8594 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8595 enum machine_mode mode;
8596 int volatilep, unsignedp;
bd03c084 8597 bool indirect_base0 = false, indirect_base1 = false;
e015f578
RG
8598
8599 /* Get base and offset for the access. Strip ADDR_EXPR for
8600 get_inner_reference, but put it back by stripping INDIRECT_REF
bd03c084
RG
8601 off the base object if possible. indirect_baseN will be true
8602 if baseN is not an address but refers to the object itself. */
e015f578
RG
8603 base0 = arg0;
8604 if (TREE_CODE (arg0) == ADDR_EXPR)
8605 {
8606 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8607 &bitsize, &bitpos0, &offset0, &mode,
8608 &unsignedp, &volatilep, false);
8609 if (TREE_CODE (base0) == INDIRECT_REF)
8610 base0 = TREE_OPERAND (base0, 0);
8611 else
8612 indirect_base0 = true;
8613 }
3e0de255
RG
8614 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8615 {
8616 base0 = TREE_OPERAND (arg0, 0);
743ad76e 8617 STRIP_SIGN_NOPS (base0);
70f34814
RG
8618 if (TREE_CODE (base0) == ADDR_EXPR)
8619 {
8620 base0 = TREE_OPERAND (base0, 0);
8621 indirect_base0 = true;
8622 }
3e0de255
RG
8623 offset0 = TREE_OPERAND (arg0, 1);
8624 }
e015f578
RG
8625
8626 base1 = arg1;
8627 if (TREE_CODE (arg1) == ADDR_EXPR)
8628 {
8629 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8630 &bitsize, &bitpos1, &offset1, &mode,
8631 &unsignedp, &volatilep, false);
bd03c084 8632 if (TREE_CODE (base1) == INDIRECT_REF)
e015f578 8633 base1 = TREE_OPERAND (base1, 0);
bd03c084
RG
8634 else
8635 indirect_base1 = true;
e015f578 8636 }
3e0de255
RG
8637 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8638 {
8639 base1 = TREE_OPERAND (arg1, 0);
743ad76e 8640 STRIP_SIGN_NOPS (base1);
70f34814
RG
8641 if (TREE_CODE (base1) == ADDR_EXPR)
8642 {
8643 base1 = TREE_OPERAND (base1, 0);
8644 indirect_base1 = true;
8645 }
3e0de255
RG
8646 offset1 = TREE_OPERAND (arg1, 1);
8647 }
e015f578 8648
94e85e0a
XDL
8649 /* A local variable can never be pointed to by
8650 the default SSA name of an incoming parameter. */
8651 if ((TREE_CODE (arg0) == ADDR_EXPR
8652 && indirect_base0
8653 && TREE_CODE (base0) == VAR_DECL
8654 && auto_var_in_fn_p (base0, current_function_decl)
8655 && !indirect_base1
8656 && TREE_CODE (base1) == SSA_NAME
8657 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8658 && SSA_NAME_IS_DEFAULT_DEF (base1))
8659 || (TREE_CODE (arg1) == ADDR_EXPR
8660 && indirect_base1
8661 && TREE_CODE (base1) == VAR_DECL
8662 && auto_var_in_fn_p (base1, current_function_decl)
8663 && !indirect_base0
8664 && TREE_CODE (base0) == SSA_NAME
8665 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8666 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8667 {
8668 if (code == NE_EXPR)
8669 return constant_boolean_node (1, type);
8670 else if (code == EQ_EXPR)
8671 return constant_boolean_node (0, type);
8672 }
e015f578 8673 /* If we have equivalent bases we might be able to simplify. */
94e85e0a
XDL
8674 else if (indirect_base0 == indirect_base1
8675 && operand_equal_p (base0, base1, 0))
e015f578
RG
8676 {
8677 /* We can fold this expression to a constant if the non-constant
8678 offset parts are equal. */
6e3c5c30
ILT
8679 if ((offset0 == offset1
8680 || (offset0 && offset1
8681 && operand_equal_p (offset0, offset1, 0)))
8682 && (code == EQ_EXPR
8683 || code == NE_EXPR
8684 || POINTER_TYPE_OVERFLOW_UNDEFINED))
b8698a0f 8685
e015f578 8686 {
6e3c5c30
ILT
8687 if (code != EQ_EXPR
8688 && code != NE_EXPR
8689 && bitpos0 != bitpos1
8690 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8691 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8692 fold_overflow_warning (("assuming pointer wraparound does not "
8693 "occur when comparing P +- C1 with "
8694 "P +- C2"),
8695 WARN_STRICT_OVERFLOW_CONDITIONAL);
8696
e015f578
RG
8697 switch (code)
8698 {
8699 case EQ_EXPR:
b0331ccb 8700 return constant_boolean_node (bitpos0 == bitpos1, type);
e015f578 8701 case NE_EXPR:
b0331ccb 8702 return constant_boolean_node (bitpos0 != bitpos1, type);
e015f578 8703 case LT_EXPR:
b0331ccb 8704 return constant_boolean_node (bitpos0 < bitpos1, type);
e015f578 8705 case LE_EXPR:
b0331ccb 8706 return constant_boolean_node (bitpos0 <= bitpos1, type);
e015f578 8707 case GE_EXPR:
b0331ccb 8708 return constant_boolean_node (bitpos0 >= bitpos1, type);
e015f578 8709 case GT_EXPR:
b0331ccb 8710 return constant_boolean_node (bitpos0 > bitpos1, type);
e015f578
RG
8711 default:;
8712 }
8713 }
8714 /* We can simplify the comparison to a comparison of the variable
8715 offset parts if the constant offset parts are equal.
8716 Be careful to use signed size type here because otherwise we
8717 mess with array offsets in the wrong way. This is possible
8718 because pointer arithmetic is restricted to retain within an
8719 object and overflow on pointer differences is undefined as of
8720 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
4c9db6e0
ILT
8721 else if (bitpos0 == bitpos1
8722 && ((code == EQ_EXPR || code == NE_EXPR)
8723 || POINTER_TYPE_OVERFLOW_UNDEFINED))
e015f578 8724 {
e015f578
RG
8725 /* By converting to signed size type we cover middle-end pointer
8726 arithmetic which operates on unsigned pointer types of size
8727 type size and ARRAY_REF offsets which are properly sign or
8728 zero extended from their type in case it is narrower than
8729 size type. */
8730 if (offset0 == NULL_TREE)
3b9e5d95 8731 offset0 = build_int_cst (ssizetype, 0);
e015f578 8732 else
3b9e5d95 8733 offset0 = fold_convert_loc (loc, ssizetype, offset0);
e015f578 8734 if (offset1 == NULL_TREE)
3b9e5d95 8735 offset1 = build_int_cst (ssizetype, 0);
e015f578 8736 else
3b9e5d95 8737 offset1 = fold_convert_loc (loc, ssizetype, offset1);
e015f578 8738
6e3c5c30
ILT
8739 if (code != EQ_EXPR
8740 && code != NE_EXPR
8741 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8742 || pointer_may_wrap_p (base1, offset1, bitpos1)))
4c9db6e0
ILT
8743 fold_overflow_warning (("assuming pointer wraparound does not "
8744 "occur when comparing P +- C1 with "
8745 "P +- C2"),
8746 WARN_STRICT_OVERFLOW_COMPARISON);
8747
db3927fb 8748 return fold_build2_loc (loc, code, type, offset0, offset1);
e015f578
RG
8749 }
8750 }
bd03c084
RG
8751 /* For non-equal bases we can simplify if they are addresses
8752 of local binding decls or constants. */
8753 else if (indirect_base0 && indirect_base1
8754 /* We know that !operand_equal_p (base0, base1, 0)
ffd837fe
RG
8755 because the if condition was false. But make
8756 sure two decls are not the same. */
8757 && base0 != base1
bd03c084
RG
8758 && TREE_CODE (arg0) == ADDR_EXPR
8759 && TREE_CODE (arg1) == ADDR_EXPR
ffd837fe
RG
8760 && (((TREE_CODE (base0) == VAR_DECL
8761 || TREE_CODE (base0) == PARM_DECL)
bd03c084
RG
8762 && (targetm.binds_local_p (base0)
8763 || CONSTANT_CLASS_P (base1)))
8764 || CONSTANT_CLASS_P (base0))
ffd837fe
RG
8765 && (((TREE_CODE (base1) == VAR_DECL
8766 || TREE_CODE (base1) == PARM_DECL)
bd03c084
RG
8767 && (targetm.binds_local_p (base1)
8768 || CONSTANT_CLASS_P (base0)))
8769 || CONSTANT_CLASS_P (base1)))
8770 {
8771 if (code == EQ_EXPR)
db3927fb
AH
8772 return omit_two_operands_loc (loc, type, boolean_false_node,
8773 arg0, arg1);
bd03c084 8774 else if (code == NE_EXPR)
db3927fb
AH
8775 return omit_two_operands_loc (loc, type, boolean_true_node,
8776 arg0, arg1);
bd03c084
RG
8777 }
8778 /* For equal offsets we can simplify to a comparison of the
8779 base addresses. */
8780 else if (bitpos0 == bitpos1
8781 && (indirect_base0
8782 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8783 && (indirect_base1
8784 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8785 && ((offset0 == offset1)
8786 || (offset0 && offset1
8787 && operand_equal_p (offset0, offset1, 0))))
8788 {
8789 if (indirect_base0)
db3927fb 8790 base0 = build_fold_addr_expr_loc (loc, base0);
bd03c084 8791 if (indirect_base1)
db3927fb
AH
8792 base1 = build_fold_addr_expr_loc (loc, base1);
8793 return fold_build2_loc (loc, code, type, base0, base1);
bd03c084 8794 }
e015f578
RG
8795 }
8796
8a1eca08
RG
8797 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8798 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8799 the resulting offset is smaller in absolute value than the
8800 original one. */
eeef0e45 8801 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8a1eca08
RG
8802 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8803 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8804 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8805 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8806 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8807 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8808 {
8809 tree const1 = TREE_OPERAND (arg0, 1);
8810 tree const2 = TREE_OPERAND (arg1, 1);
8811 tree variable1 = TREE_OPERAND (arg0, 0);
8812 tree variable2 = TREE_OPERAND (arg1, 0);
8813 tree cst;
6ac01510
ILT
8814 const char * const warnmsg = G_("assuming signed overflow does not "
8815 "occur when combining constants around "
8816 "a comparison");
8a1eca08
RG
8817
8818 /* Put the constant on the side where it doesn't overflow and is
8819 of lower absolute value than before. */
8820 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8821 ? MINUS_EXPR : PLUS_EXPR,
8822 const2, const1, 0);
8823 if (!TREE_OVERFLOW (cst)
8824 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
6ac01510
ILT
8825 {
8826 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
db3927fb 8827 return fold_build2_loc (loc, code, type,
6ac01510 8828 variable1,
db3927fb
AH
8829 fold_build2_loc (loc,
8830 TREE_CODE (arg1), TREE_TYPE (arg1),
6ac01510
ILT
8831 variable2, cst));
8832 }
8a1eca08
RG
8833
8834 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8835 ? MINUS_EXPR : PLUS_EXPR,
8836 const1, const2, 0);
8837 if (!TREE_OVERFLOW (cst)
8838 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
6ac01510
ILT
8839 {
8840 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
db3927fb
AH
8841 return fold_build2_loc (loc, code, type,
8842 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
6ac01510
ILT
8843 variable1, cst),
8844 variable2);
8845 }
8a1eca08
RG
8846 }
8847
6b074ef6
RK
8848 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8849 signed arithmetic case. That form is created by the compiler
8850 often enough for folding it to be of value. One example is in
8851 computing loop trip counts after Operator Strength Reduction. */
eeef0e45 8852 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
6b074ef6
RK
8853 && TREE_CODE (arg0) == MULT_EXPR
8854 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8855 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8856 && integer_zerop (arg1))
8857 {
8858 tree const1 = TREE_OPERAND (arg0, 1);
8859 tree const2 = arg1; /* zero */
8860 tree variable1 = TREE_OPERAND (arg0, 0);
8861 enum tree_code cmp_code = code;
8862
eb12d0ae
RG
8863 /* Handle unfolded multiplication by zero. */
8864 if (integer_zerop (const1))
8865 return fold_build2_loc (loc, cmp_code, type, const1, const2);
6b074ef6 8866
6ac01510
ILT
8867 fold_overflow_warning (("assuming signed overflow does not occur when "
8868 "eliminating multiplication in comparison "
8869 "with zero"),
8870 WARN_STRICT_OVERFLOW_COMPARISON);
8871
6b074ef6
RK
8872 /* If const1 is negative we swap the sense of the comparison. */
8873 if (tree_int_cst_sgn (const1) < 0)
8874 cmp_code = swap_tree_comparison (cmp_code);
8875
db3927fb 8876 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
6b074ef6
RK
8877 }
8878
db3927fb 8879 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
e73dbcae
RG
8880 if (tem)
8881 return tem;
8882
e26ec0bb
RS
8883 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8884 {
8885 tree targ0 = strip_float_extensions (arg0);
8886 tree targ1 = strip_float_extensions (arg1);
8887 tree newtype = TREE_TYPE (targ0);
8888
8889 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8890 newtype = TREE_TYPE (targ1);
8891
8892 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8893 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
db3927fb
AH
8894 return fold_build2_loc (loc, code, type,
8895 fold_convert_loc (loc, newtype, targ0),
8896 fold_convert_loc (loc, newtype, targ1));
e26ec0bb
RS
8897
8898 /* (-a) CMP (-b) -> b CMP a */
8899 if (TREE_CODE (arg0) == NEGATE_EXPR
8900 && TREE_CODE (arg1) == NEGATE_EXPR)
db3927fb 8901 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
e26ec0bb
RS
8902 TREE_OPERAND (arg0, 0));
8903
8904 if (TREE_CODE (arg1) == REAL_CST)
8905 {
8906 REAL_VALUE_TYPE cst;
8907 cst = TREE_REAL_CST (arg1);
8908
8909 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8910 if (TREE_CODE (arg0) == NEGATE_EXPR)
db3927fb 8911 return fold_build2_loc (loc, swap_tree_comparison (code), type,
e26ec0bb
RS
8912 TREE_OPERAND (arg0, 0),
8913 build_real (TREE_TYPE (arg1),
d49b6e1e 8914 real_value_negate (&cst)));
e26ec0bb
RS
8915
8916 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8917 /* a CMP (-0) -> a CMP 0 */
8918 if (REAL_VALUE_MINUS_ZERO (cst))
db3927fb 8919 return fold_build2_loc (loc, code, type, arg0,
e26ec0bb
RS
8920 build_real (TREE_TYPE (arg1), dconst0));
8921
8922 /* x != NaN is always true, other ops are always false. */
8923 if (REAL_VALUE_ISNAN (cst)
8924 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8925 {
8926 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
db3927fb 8927 return omit_one_operand_loc (loc, type, tem, arg0);
e26ec0bb
RS
8928 }
8929
8930 /* Fold comparisons against infinity. */
dc215785
UW
8931 if (REAL_VALUE_ISINF (cst)
8932 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
e26ec0bb 8933 {
db3927fb 8934 tem = fold_inf_compare (loc, code, type, arg0, arg1);
e26ec0bb
RS
8935 if (tem != NULL_TREE)
8936 return tem;
8937 }
8938 }
8939
8940 /* If this is a comparison of a real constant with a PLUS_EXPR
8941 or a MINUS_EXPR of a real constant, we can convert it into a
8942 comparison with a revised real constant as long as no overflow
8943 occurs when unsafe_math_optimizations are enabled. */
8944 if (flag_unsafe_math_optimizations
8945 && TREE_CODE (arg1) == REAL_CST
8946 && (TREE_CODE (arg0) == PLUS_EXPR
8947 || TREE_CODE (arg0) == MINUS_EXPR)
8948 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8949 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8950 ? MINUS_EXPR : PLUS_EXPR,
43a5d30b 8951 arg1, TREE_OPERAND (arg0, 1)))
455f14dd 8952 && !TREE_OVERFLOW (tem))
db3927fb 8953 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
e26ec0bb
RS
8954
8955 /* Likewise, we can simplify a comparison of a real constant with
8956 a MINUS_EXPR whose first operand is also a real constant, i.e.
b8698a0f 8957 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
a1a82611
RE
8958 floating-point types only if -fassociative-math is set. */
8959 if (flag_associative_math
e26ec0bb
RS
8960 && TREE_CODE (arg1) == REAL_CST
8961 && TREE_CODE (arg0) == MINUS_EXPR
8962 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8963 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
43a5d30b 8964 arg1))
455f14dd 8965 && !TREE_OVERFLOW (tem))
db3927fb 8966 return fold_build2_loc (loc, swap_tree_comparison (code), type,
e26ec0bb
RS
8967 TREE_OPERAND (arg0, 1), tem);
8968
8969 /* Fold comparisons against built-in math functions. */
8970 if (TREE_CODE (arg1) == REAL_CST
8971 && flag_unsafe_math_optimizations
8972 && ! flag_errno_math)
8973 {
8974 enum built_in_function fcode = builtin_mathfn_code (arg0);
8975
8976 if (fcode != END_BUILTINS)
8977 {
db3927fb 8978 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
e26ec0bb
RS
8979 if (tem != NULL_TREE)
8980 return tem;
8981 }
8982 }
8983 }
8984
e26ec0bb 8985 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
1043771b 8986 && CONVERT_EXPR_P (arg0))
e26ec0bb
RS
8987 {
8988 /* If we are widening one operand of an integer comparison,
8989 see if the other operand is similarly being widened. Perhaps we
8990 can do the comparison in the narrower type. */
db3927fb 8991 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
e26ec0bb
RS
8992 if (tem)
8993 return tem;
8994
8995 /* Or if we are changing signedness. */
db3927fb 8996 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
e26ec0bb
RS
8997 if (tem)
8998 return tem;
8999 }
9000
9001 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9002 constant, we can simplify it. */
9003 if (TREE_CODE (arg1) == INTEGER_CST
9004 && (TREE_CODE (arg0) == MIN_EXPR
9005 || TREE_CODE (arg0) == MAX_EXPR)
9006 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9007 {
db3927fb 9008 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
e26ec0bb
RS
9009 if (tem)
9010 return tem;
9011 }
9012
9013 /* Simplify comparison of something with itself. (For IEEE
9014 floating-point, we can only do some of these simplifications.) */
9015 if (operand_equal_p (arg0, arg1, 0))
9016 {
9017 switch (code)
9018 {
9019 case EQ_EXPR:
9020 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9021 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9022 return constant_boolean_node (1, type);
9023 break;
9024
9025 case GE_EXPR:
9026 case LE_EXPR:
9027 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9028 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9029 return constant_boolean_node (1, type);
db3927fb 9030 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
e26ec0bb
RS
9031
9032 case NE_EXPR:
9033 /* For NE, we can only do this simplification if integer
9034 or we don't honor IEEE floating point NaNs. */
9035 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9036 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9037 break;
9038 /* ... fall through ... */
9039 case GT_EXPR:
9040 case LT_EXPR:
9041 return constant_boolean_node (0, type);
9042 default:
9043 gcc_unreachable ();
9044 }
9045 }
9046
9047 /* If we are comparing an expression that just has comparisons
9048 of two integer values, arithmetic expressions of those comparisons,
9049 and constants, we can simplify it. There are only three cases
9050 to check: the two values can either be equal, the first can be
9051 greater, or the second can be greater. Fold the expression for
9052 those three values. Since each value must be 0 or 1, we have
9053 eight possibilities, each of which corresponds to the constant 0
9054 or 1 or one of the six possible comparisons.
9055
9056 This handles common cases like (a > b) == 0 but also handles
9057 expressions like ((x > y) - (y > x)) > 0, which supposedly
9058 occur in macroized code. */
9059
9060 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9061 {
9062 tree cval1 = 0, cval2 = 0;
9063 int save_p = 0;
9064
9065 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9066 /* Don't handle degenerate cases here; they should already
9067 have been handled anyway. */
9068 && cval1 != 0 && cval2 != 0
9069 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9070 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9071 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9072 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9073 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9074 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9075 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9076 {
9077 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9078 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9079
9080 /* We can't just pass T to eval_subst in case cval1 or cval2
9081 was the same as ARG1. */
9082
9083 tree high_result
db3927fb
AH
9084 = fold_build2_loc (loc, code, type,
9085 eval_subst (loc, arg0, cval1, maxval,
e26ec0bb
RS
9086 cval2, minval),
9087 arg1);
9088 tree equal_result
db3927fb
AH
9089 = fold_build2_loc (loc, code, type,
9090 eval_subst (loc, arg0, cval1, maxval,
e26ec0bb
RS
9091 cval2, maxval),
9092 arg1);
9093 tree low_result
db3927fb
AH
9094 = fold_build2_loc (loc, code, type,
9095 eval_subst (loc, arg0, cval1, minval,
e26ec0bb
RS
9096 cval2, maxval),
9097 arg1);
9098
9099 /* All three of these results should be 0 or 1. Confirm they are.
9100 Then use those values to select the proper code to use. */
9101
9102 if (TREE_CODE (high_result) == INTEGER_CST
9103 && TREE_CODE (equal_result) == INTEGER_CST
9104 && TREE_CODE (low_result) == INTEGER_CST)
9105 {
9106 /* Make a 3-bit mask with the high-order bit being the
9107 value for `>', the next for '=', and the low for '<'. */
9108 switch ((integer_onep (high_result) * 4)
9109 + (integer_onep (equal_result) * 2)
9110 + integer_onep (low_result))
9111 {
9112 case 0:
9113 /* Always false. */
db3927fb 9114 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
e26ec0bb
RS
9115 case 1:
9116 code = LT_EXPR;
9117 break;
9118 case 2:
9119 code = EQ_EXPR;
9120 break;
9121 case 3:
9122 code = LE_EXPR;
9123 break;
9124 case 4:
9125 code = GT_EXPR;
9126 break;
9127 case 5:
9128 code = NE_EXPR;
9129 break;
9130 case 6:
9131 code = GE_EXPR;
9132 break;
9133 case 7:
9134 /* Always true. */
db3927fb 9135 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
e26ec0bb
RS
9136 }
9137
9138 if (save_p)
db3927fb
AH
9139 {
9140 tem = save_expr (build2 (code, type, cval1, cval2));
9141 SET_EXPR_LOCATION (tem, loc);
9142 return tem;
9143 }
9144 return fold_build2_loc (loc, code, type, cval1, cval2);
e26ec0bb
RS
9145 }
9146 }
9147 }
9148
e26ec0bb
RS
9149 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9150 into a single range test. */
9151 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9152 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9153 && TREE_CODE (arg1) == INTEGER_CST
9154 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9155 && !integer_zerop (TREE_OPERAND (arg0, 1))
9156 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9157 && !TREE_OVERFLOW (arg1))
9158 {
db3927fb 9159 tem = fold_div_compare (loc, code, type, arg0, arg1);
e26ec0bb
RS
9160 if (tem != NULL_TREE)
9161 return tem;
9162 }
9163
c159ffe7
RS
9164 /* Fold ~X op ~Y as Y op X. */
9165 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9166 && TREE_CODE (arg1) == BIT_NOT_EXPR)
270d43bf
RS
9167 {
9168 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
db3927fb
AH
9169 return fold_build2_loc (loc, code, type,
9170 fold_convert_loc (loc, cmp_type,
9171 TREE_OPERAND (arg1, 0)),
270d43bf
RS
9172 TREE_OPERAND (arg0, 0));
9173 }
c159ffe7
RS
9174
9175 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9176 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9177 && TREE_CODE (arg1) == INTEGER_CST)
270d43bf
RS
9178 {
9179 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
db3927fb 9180 return fold_build2_loc (loc, swap_tree_comparison (code), type,
270d43bf 9181 TREE_OPERAND (arg0, 0),
db3927fb
AH
9182 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9183 fold_convert_loc (loc, cmp_type, arg1)));
270d43bf 9184 }
c159ffe7 9185
e26ec0bb
RS
9186 return NULL_TREE;
9187}
9188
99b25753
RS
9189
9190/* Subroutine of fold_binary. Optimize complex multiplications of the
9191 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9192 argument EXPR represents the expression "z" of type TYPE. */
9193
9194static tree
db3927fb 9195fold_mult_zconjz (location_t loc, tree type, tree expr)
99b25753
RS
9196{
9197 tree itype = TREE_TYPE (type);
9198 tree rpart, ipart, tem;
9199
9200 if (TREE_CODE (expr) == COMPLEX_EXPR)
9201 {
9202 rpart = TREE_OPERAND (expr, 0);
9203 ipart = TREE_OPERAND (expr, 1);
9204 }
9205 else if (TREE_CODE (expr) == COMPLEX_CST)
9206 {
9207 rpart = TREE_REALPART (expr);
9208 ipart = TREE_IMAGPART (expr);
9209 }
9210 else
9211 {
9212 expr = save_expr (expr);
db3927fb
AH
9213 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9214 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
99b25753
RS
9215 }
9216
9217 rpart = save_expr (rpart);
9218 ipart = save_expr (ipart);
db3927fb
AH
9219 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9220 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9221 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9222 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
e8160c9a 9223 build_zero_cst (itype));
99b25753
RS
9224}
9225
9226
e5901cad
OW
9227/* Subroutine of fold_binary. If P is the value of EXPR, computes
9228 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9229 guarantees that P and N have the same least significant log2(M) bits.
9230 N is not otherwise constrained. In particular, N is not normalized to
9231 0 <= N < M as is common. In general, the precise value of P is unknown.
9232 M is chosen as large as possible such that constant N can be determined.
9233
617f3897
MJ
9234 Returns M and sets *RESIDUE to N.
9235
9236 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9237 account. This is not always possible due to PR 35705.
9238 */
e5901cad
OW
9239
9240static unsigned HOST_WIDE_INT
617f3897
MJ
9241get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9242 bool allow_func_align)
e5901cad
OW
9243{
9244 enum tree_code code;
9245
9246 *residue = 0;
9247
9248 code = TREE_CODE (expr);
9249 if (code == ADDR_EXPR)
9250 {
9251 expr = TREE_OPERAND (expr, 0);
9252 if (handled_component_p (expr))
9253 {
9254 HOST_WIDE_INT bitsize, bitpos;
9255 tree offset;
9256 enum machine_mode mode;
9257 int unsignedp, volatilep;
9258
9259 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9260 &mode, &unsignedp, &volatilep, false);
9261 *residue = bitpos / BITS_PER_UNIT;
9262 if (offset)
9263 {
9264 if (TREE_CODE (offset) == INTEGER_CST)
9265 *residue += TREE_INT_CST_LOW (offset);
9266 else
9267 /* We don't handle more complicated offset expressions. */
9268 return 1;
9269 }
9270 }
9271
617f3897
MJ
9272 if (DECL_P (expr)
9273 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
e5901cad
OW
9274 return DECL_ALIGN_UNIT (expr);
9275 }
9276 else if (code == POINTER_PLUS_EXPR)
9277 {
9278 tree op0, op1;
9279 unsigned HOST_WIDE_INT modulus;
9280 enum tree_code inner_code;
b8698a0f 9281
e5901cad
OW
9282 op0 = TREE_OPERAND (expr, 0);
9283 STRIP_NOPS (op0);
617f3897
MJ
9284 modulus = get_pointer_modulus_and_residue (op0, residue,
9285 allow_func_align);
e5901cad
OW
9286
9287 op1 = TREE_OPERAND (expr, 1);
9288 STRIP_NOPS (op1);
9289 inner_code = TREE_CODE (op1);
9290 if (inner_code == INTEGER_CST)
9291 {
9292 *residue += TREE_INT_CST_LOW (op1);
9293 return modulus;
9294 }
9295 else if (inner_code == MULT_EXPR)
9296 {
9297 op1 = TREE_OPERAND (op1, 1);
9298 if (TREE_CODE (op1) == INTEGER_CST)
9299 {
9300 unsigned HOST_WIDE_INT align;
b8698a0f 9301
e5901cad
OW
9302 /* Compute the greatest power-of-2 divisor of op1. */
9303 align = TREE_INT_CST_LOW (op1);
9304 align &= -align;
9305
9306 /* If align is non-zero and less than *modulus, replace
9307 *modulus with align., If align is 0, then either op1 is 0
9308 or the greatest power-of-2 divisor of op1 doesn't fit in an
9309 unsigned HOST_WIDE_INT. In either case, no additional
9310 constraint is imposed. */
9311 if (align)
9312 modulus = MIN (modulus, align);
9313
9314 return modulus;
9315 }
9316 }
9317 }
9318
9319 /* If we get here, we were unable to determine anything useful about the
9320 expression. */
9321 return 1;
9322}
9323
9324
7107fa7c 9325/* Fold a binary expression of code CODE and type TYPE with operands
db3927fb
AH
9326 OP0 and OP1. LOC is the location of the resulting expression.
9327 Return the folded expression if folding is successful. Otherwise,
9328 return NULL_TREE. */
0aee4751 9329
721425b6 9330tree
db3927fb
AH
9331fold_binary_loc (location_t loc,
9332 enum tree_code code, tree type, tree op0, tree op1)
0aee4751 9333{
0aee4751 9334 enum tree_code_class kind = TREE_CODE_CLASS (code);
e26ec0bb
RS
9335 tree arg0, arg1, tem;
9336 tree t1 = NULL_TREE;
6ac01510 9337 bool strict_overflow_p;
0aee4751 9338
726a989a 9339 gcc_assert (IS_EXPR_CODE_CLASS (kind)
fd6c76f4
RS
9340 && TREE_CODE_LENGTH (code) == 2
9341 && op0 != NULL_TREE
9342 && op1 != NULL_TREE);
0aee4751 9343
fbaa905c
KH
9344 arg0 = op0;
9345 arg1 = op1;
1eaea409 9346
fd6c76f4
RS
9347 /* Strip any conversions that don't change the mode. This is
9348 safe for every expression, except for a comparison expression
9349 because its signedness is derived from its operands. So, in
9350 the latter case, only strip conversions that don't change the
f61edbf6
JJ
9351 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9352 preserved.
0aee4751 9353
fd6c76f4
RS
9354 Note that this is done as an internal manipulation within the
9355 constant folder, in order to find the simplest representation
9356 of the arguments so that their form can be studied. In any
9357 cases, the appropriate type conversions should be put back in
9358 the tree that will get out of the constant folder. */
0aee4751 9359
f61edbf6 9360 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
fd6c76f4
RS
9361 {
9362 STRIP_SIGN_NOPS (arg0);
9363 STRIP_SIGN_NOPS (arg1);
1eaea409 9364 }
fd6c76f4 9365 else
1eaea409 9366 {
fd6c76f4
RS
9367 STRIP_NOPS (arg0);
9368 STRIP_NOPS (arg1);
9369 }
0aee4751 9370
fd6c76f4
RS
9371 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9372 constant but we can't do arithmetic on them. */
9373 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9374 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
325217ed
CF
9375 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9376 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
fd6c76f4
RS
9377 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9378 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9379 {
9380 if (kind == tcc_binary)
325217ed
CF
9381 {
9382 /* Make sure type and arg0 have the same saturating flag. */
9383 gcc_assert (TYPE_SATURATING (type)
9384 == TYPE_SATURATING (TREE_TYPE (arg0)));
43a5d30b 9385 tem = const_binop (code, arg0, arg1);
325217ed 9386 }
fd6c76f4
RS
9387 else if (kind == tcc_comparison)
9388 tem = fold_relational_const (code, type, arg0, arg1);
1eaea409 9389 else
fd6c76f4 9390 tem = NULL_TREE;
1eaea409 9391
fd6c76f4
RS
9392 if (tem != NULL_TREE)
9393 {
9394 if (TREE_TYPE (tem) != type)
db3927fb 9395 tem = fold_convert_loc (loc, type, tem);
fd6c76f4
RS
9396 return tem;
9397 }
0aee4751
KH
9398 }
9399
9400 /* If this is a commutative operation, and ARG0 is a constant, move it
9401 to ARG1 to reduce the number of tests below. */
9402 if (commutative_tree_code (code)
9403 && tree_swap_operands_p (arg0, arg1, true))
db3927fb 9404 return fold_build2_loc (loc, code, type, op1, op0);
0aee4751 9405
fd6c76f4 9406 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
0aee4751
KH
9407
9408 First check for cases where an arithmetic operation is applied to a
9409 compound, conditional, or comparison operation. Push the arithmetic
9410 operation inside the compound or conditional to see if any folding
9411 can then be done. Convert comparison to conditional for this purpose.
9412 The also optimizes non-constant cases that used to be done in
9413 expand_expr.
9414
9415 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9416 one of the operands is a comparison and the other is a comparison, a
9417 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9418 code below would make the expression more complex. Change it to a
9419 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9420 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9421
9422 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9423 || code == EQ_EXPR || code == NE_EXPR)
9424 && ((truth_value_p (TREE_CODE (arg0))
9425 && (truth_value_p (TREE_CODE (arg1))
9426 || (TREE_CODE (arg1) == BIT_AND_EXPR
9427 && integer_onep (TREE_OPERAND (arg1, 1)))))
9428 || (truth_value_p (TREE_CODE (arg1))
9429 && (truth_value_p (TREE_CODE (arg0))
9430 || (TREE_CODE (arg0) == BIT_AND_EXPR
9431 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9432 {
db3927fb 9433 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7f20a5b7
KH
9434 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9435 : TRUTH_XOR_EXPR,
9436 boolean_type_node,
db3927fb
AH
9437 fold_convert_loc (loc, boolean_type_node, arg0),
9438 fold_convert_loc (loc, boolean_type_node, arg1));
0aee4751
KH
9439
9440 if (code == EQ_EXPR)
db3927fb 9441 tem = invert_truthvalue_loc (loc, tem);
0aee4751 9442
db3927fb 9443 return fold_convert_loc (loc, type, tem);
0aee4751
KH
9444 }
9445
4c17e288
RG
9446 if (TREE_CODE_CLASS (code) == tcc_binary
9447 || TREE_CODE_CLASS (code) == tcc_comparison)
0aee4751
KH
9448 {
9449 if (TREE_CODE (arg0) == COMPOUND_EXPR)
db3927fb
AH
9450 {
9451 tem = fold_build2_loc (loc, code, type,
9452 fold_convert_loc (loc, TREE_TYPE (op0),
9453 TREE_OPERAND (arg0, 1)), op1);
c9019218
JJ
9454 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9455 tem);
db3927fb 9456 }
0aee4751
KH
9457 if (TREE_CODE (arg1) == COMPOUND_EXPR
9458 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
db3927fb
AH
9459 {
9460 tem = fold_build2_loc (loc, code, type, op0,
9461 fold_convert_loc (loc, TREE_TYPE (op1),
9462 TREE_OPERAND (arg1, 1)));
c9019218
JJ
9463 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9464 tem);
db3927fb 9465 }
0aee4751
KH
9466
9467 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9468 {
db3927fb 9469 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
b8698a0f 9470 arg0, arg1,
0aee4751
KH
9471 /*cond_first_p=*/1);
9472 if (tem != NULL_TREE)
9473 return tem;
9474 }
9475
9476 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9477 {
db3927fb 9478 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
b8698a0f 9479 arg1, arg0,
0aee4751
KH
9480 /*cond_first_p=*/0);
9481 if (tem != NULL_TREE)
9482 return tem;
9483 }
9484 }
9485
9486 switch (code)
9487 {
70f34814
RG
9488 case MEM_REF:
9489 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9490 if (TREE_CODE (arg0) == ADDR_EXPR
9491 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9492 {
9493 tree iref = TREE_OPERAND (arg0, 0);
9494 return fold_build2 (MEM_REF, type,
9495 TREE_OPERAND (iref, 0),
9496 int_const_binop (PLUS_EXPR, arg1,
9497 TREE_OPERAND (iref, 1), 0));
9498 }
9499
9500 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9501 if (TREE_CODE (arg0) == ADDR_EXPR
9502 && handled_component_p (TREE_OPERAND (arg0, 0)))
9503 {
9504 tree base;
9505 HOST_WIDE_INT coffset;
9506 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9507 &coffset);
9508 if (!base)
9509 return NULL_TREE;
9510 return fold_build2 (MEM_REF, type,
9511 build_fold_addr_expr (base),
9512 int_const_binop (PLUS_EXPR, arg1,
9513 size_int (coffset), 0));
9514 }
9515
9516 return NULL_TREE;
9517
5be014d5
AP
9518 case POINTER_PLUS_EXPR:
9519 /* 0 +p index -> (type)index */
9520 if (integer_zerop (arg0))
db3927fb 9521 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5be014d5
AP
9522
9523 /* PTR +p 0 -> PTR */
9524 if (integer_zerop (arg1))
db3927fb 9525 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
5be014d5
AP
9526
9527 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9528 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9529 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
db3927fb
AH
9530 return fold_convert_loc (loc, type,
9531 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9532 fold_convert_loc (loc, sizetype,
9533 arg1),
9534 fold_convert_loc (loc, sizetype,
9535 arg0)));
5be014d5 9536
f7d1e0c6
RG
9537 /* index +p PTR -> PTR +p index */
9538 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9539 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
db3927fb
AH
9540 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
9541 fold_convert_loc (loc, type, arg1),
9542 fold_convert_loc (loc, sizetype, arg0));
f7d1e0c6 9543
5be014d5
AP
9544 /* (PTR +p B) +p A -> PTR +p (B + A) */
9545 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9546 {
9547 tree inner;
db3927fb 9548 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
5be014d5 9549 tree arg00 = TREE_OPERAND (arg0, 0);
db3927fb
AH
9550 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9551 arg01, fold_convert_loc (loc, sizetype, arg1));
9552 return fold_convert_loc (loc, type,
9553 fold_build2_loc (loc, POINTER_PLUS_EXPR,
9554 TREE_TYPE (arg00),
9555 arg00, inner));
5be014d5
AP
9556 }
9557
9558 /* PTR_CST +p CST -> CST1 */
9559 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
db3927fb
AH
9560 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9561 fold_convert_loc (loc, type, arg1));
5be014d5
AP
9562
9563 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9564 of the array. Loop optimizer sometimes produce this type of
9565 expressions. */
9566 if (TREE_CODE (arg0) == ADDR_EXPR)
9567 {
db3927fb
AH
9568 tem = try_move_mult_to_index (loc, arg0,
9569 fold_convert_loc (loc, sizetype, arg1));
5be014d5 9570 if (tem)
db3927fb 9571 return fold_convert_loc (loc, type, tem);
5be014d5
AP
9572 }
9573
9574 return NULL_TREE;
8015455a 9575
0aee4751
KH
9576 case PLUS_EXPR:
9577 /* A + (-B) -> A - B */
9578 if (TREE_CODE (arg1) == NEGATE_EXPR)
db3927fb
AH
9579 return fold_build2_loc (loc, MINUS_EXPR, type,
9580 fold_convert_loc (loc, type, arg0),
9581 fold_convert_loc (loc, type,
9582 TREE_OPERAND (arg1, 0)));
0aee4751
KH
9583 /* (-A) + B -> B - A */
9584 if (TREE_CODE (arg0) == NEGATE_EXPR
9585 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
db3927fb
AH
9586 return fold_build2_loc (loc, MINUS_EXPR, type,
9587 fold_convert_loc (loc, type, arg1),
9588 fold_convert_loc (loc, type,
9589 TREE_OPERAND (arg0, 0)));
0ed9a3e3 9590
c22f6d33 9591 if (INTEGRAL_TYPE_P (type))
0aee4751 9592 {
c22f6d33
UB
9593 /* Convert ~A + 1 to -A. */
9594 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9595 && integer_onep (arg1))
db3927fb
AH
9596 return fold_build1_loc (loc, NEGATE_EXPR, type,
9597 fold_convert_loc (loc, type,
9598 TREE_OPERAND (arg0, 0)));
0aee4751 9599
870aa1eb
RS
9600 /* ~X + X is -1. */
9601 if (TREE_CODE (arg0) == BIT_NOT_EXPR
eeef0e45 9602 && !TYPE_OVERFLOW_TRAPS (type))
870aa1eb 9603 {
a49c5793
SP
9604 tree tem = TREE_OPERAND (arg0, 0);
9605
9606 STRIP_NOPS (tem);
9607 if (operand_equal_p (tem, arg1, 0))
9608 {
9609 t1 = build_int_cst_type (type, -1);
db3927fb 9610 return omit_one_operand_loc (loc, type, t1, arg1);
a49c5793 9611 }
870aa1eb
RS
9612 }
9613
9614 /* X + ~X is -1. */
9615 if (TREE_CODE (arg1) == BIT_NOT_EXPR
eeef0e45 9616 && !TYPE_OVERFLOW_TRAPS (type))
870aa1eb 9617 {
a49c5793
SP
9618 tree tem = TREE_OPERAND (arg1, 0);
9619
9620 STRIP_NOPS (tem);
9621 if (operand_equal_p (arg0, tem, 0))
9622 {
9623 t1 = build_int_cst_type (type, -1);
db3927fb 9624 return omit_one_operand_loc (loc, type, t1, arg0);
a49c5793
SP
9625 }
9626 }
65648dd4
RG
9627
9628 /* X + (X / CST) * -CST is X % CST. */
9629 if (TREE_CODE (arg1) == MULT_EXPR
9630 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9631 && operand_equal_p (arg0,
9632 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9633 {
9634 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9635 tree cst1 = TREE_OPERAND (arg1, 1);
db3927fb
AH
9636 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9637 cst1, cst0);
65648dd4 9638 if (sum && integer_zerop (sum))
db3927fb
AH
9639 return fold_convert_loc (loc, type,
9640 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9641 TREE_TYPE (arg0), arg0,
9642 cst0));
65648dd4 9643 }
c22f6d33
UB
9644 }
9645
9646 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
a1a82611
RE
9647 same or one. Make sure type is not saturating.
9648 fold_plusminus_mult_expr will re-associate. */
c22f6d33
UB
9649 if ((TREE_CODE (arg0) == MULT_EXPR
9650 || TREE_CODE (arg1) == MULT_EXPR)
325217ed 9651 && !TYPE_SATURATING (type)
a1a82611 9652 && (!FLOAT_TYPE_P (type) || flag_associative_math))
c22f6d33 9653 {
db3927fb 9654 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
c22f6d33
UB
9655 if (tem)
9656 return tem;
9657 }
9658
9659 if (! FLOAT_TYPE_P (type))
9660 {
9661 if (integer_zerop (arg1))
db3927fb 9662 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
870aa1eb 9663
0aee4751
KH
9664 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9665 with a constant, and the two constants have no bits in common,
9666 we should treat this as a BIT_IOR_EXPR since this may produce more
9667 simplifications. */
9668 if (TREE_CODE (arg0) == BIT_AND_EXPR
9669 && TREE_CODE (arg1) == BIT_AND_EXPR
9670 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9671 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9672 && integer_zerop (const_binop (BIT_AND_EXPR,
9673 TREE_OPERAND (arg0, 1),
43a5d30b 9674 TREE_OPERAND (arg1, 1))))
0aee4751
KH
9675 {
9676 code = BIT_IOR_EXPR;
9677 goto bit_ior;
9678 }
9679
9680 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9681 (plus (plus (mult) (mult)) (foo)) so that we can
9682 take advantage of the factoring cases below. */
9683 if (((TREE_CODE (arg0) == PLUS_EXPR
9684 || TREE_CODE (arg0) == MINUS_EXPR)
9685 && TREE_CODE (arg1) == MULT_EXPR)
9686 || ((TREE_CODE (arg1) == PLUS_EXPR
9687 || TREE_CODE (arg1) == MINUS_EXPR)
9688 && TREE_CODE (arg0) == MULT_EXPR))
9689 {
9690 tree parg0, parg1, parg, marg;
9691 enum tree_code pcode;
9692
9693 if (TREE_CODE (arg1) == MULT_EXPR)
9694 parg = arg0, marg = arg1;
9695 else
9696 parg = arg1, marg = arg0;
9697 pcode = TREE_CODE (parg);
9698 parg0 = TREE_OPERAND (parg, 0);
9699 parg1 = TREE_OPERAND (parg, 1);
9700 STRIP_NOPS (parg0);
9701 STRIP_NOPS (parg1);
9702
9703 if (TREE_CODE (parg0) == MULT_EXPR
9704 && TREE_CODE (parg1) != MULT_EXPR)
db3927fb
AH
9705 return fold_build2_loc (loc, pcode, type,
9706 fold_build2_loc (loc, PLUS_EXPR, type,
9707 fold_convert_loc (loc, type,
9708 parg0),
9709 fold_convert_loc (loc, type,
9710 marg)),
9711 fold_convert_loc (loc, type, parg1));
0aee4751
KH
9712 if (TREE_CODE (parg0) != MULT_EXPR
9713 && TREE_CODE (parg1) == MULT_EXPR)
db3927fb
AH
9714 return
9715 fold_build2_loc (loc, PLUS_EXPR, type,
9716 fold_convert_loc (loc, type, parg0),
9717 fold_build2_loc (loc, pcode, type,
9718 fold_convert_loc (loc, type, marg),
9719 fold_convert_loc (loc, type,
9720 parg1)));
0aee4751 9721 }
0aee4751
KH
9722 }
9723 else
9724 {
9725 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9726 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
db3927fb 9727 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751
KH
9728
9729 /* Likewise if the operands are reversed. */
9730 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
db3927fb 9731 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
0aee4751
KH
9732
9733 /* Convert X + -C into X - C. */
9734 if (TREE_CODE (arg1) == REAL_CST
9735 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9736 {
9737 tem = fold_negate_const (arg1, type);
9738 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
db3927fb
AH
9739 return fold_build2_loc (loc, MINUS_EXPR, type,
9740 fold_convert_loc (loc, type, arg0),
9741 fold_convert_loc (loc, type, tem));
0aee4751
KH
9742 }
9743
9f539671
RG
9744 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9745 to __complex__ ( x, y ). This is not the same for SNaNs or
d1ad84c2 9746 if signed zeros are involved. */
9f539671
RG
9747 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9748 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9749 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9750 {
9751 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
db3927fb
AH
9752 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9753 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9f539671
RG
9754 bool arg0rz = false, arg0iz = false;
9755 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9756 || (arg0i && (arg0iz = real_zerop (arg0i))))
9757 {
db3927fb
AH
9758 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9759 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9f539671
RG
9760 if (arg0rz && arg1i && real_zerop (arg1i))
9761 {
9762 tree rp = arg1r ? arg1r
9763 : build1 (REALPART_EXPR, rtype, arg1);
9764 tree ip = arg0i ? arg0i
9765 : build1 (IMAGPART_EXPR, rtype, arg0);
db3927fb 9766 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9f539671
RG
9767 }
9768 else if (arg0iz && arg1r && real_zerop (arg1r))
9769 {
9770 tree rp = arg0r ? arg0r
9771 : build1 (REALPART_EXPR, rtype, arg0);
9772 tree ip = arg1i ? arg1i
9773 : build1 (IMAGPART_EXPR, rtype, arg1);
db3927fb 9774 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9f539671
RG
9775 }
9776 }
9777 }
9778
e0dd989a 9779 if (flag_unsafe_math_optimizations
f8912a55
PB
9780 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9781 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
db3927fb 9782 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
f8912a55
PB
9783 return tem;
9784
0aee4751
KH
9785 /* Convert x+x into x*2.0. */
9786 if (operand_equal_p (arg0, arg1, 0)
9787 && SCALAR_FLOAT_TYPE_P (type))
db3927fb 9788 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
7f20a5b7 9789 build_real (type, dconst2));
0aee4751 9790
b8698a0f 9791 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
a1a82611
RE
9792 We associate floats only if the user has specified
9793 -fassociative-math. */
9794 if (flag_associative_math
0aee4751
KH
9795 && TREE_CODE (arg1) == PLUS_EXPR
9796 && TREE_CODE (arg0) != MULT_EXPR)
9797 {
9798 tree tree10 = TREE_OPERAND (arg1, 0);
9799 tree tree11 = TREE_OPERAND (arg1, 1);
9800 if (TREE_CODE (tree11) == MULT_EXPR
9801 && TREE_CODE (tree10) == MULT_EXPR)
9802 {
9803 tree tree0;
db3927fb
AH
9804 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9805 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
0aee4751
KH
9806 }
9807 }
b8698a0f 9808 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
a1a82611
RE
9809 We associate floats only if the user has specified
9810 -fassociative-math. */
9811 if (flag_associative_math
0aee4751
KH
9812 && TREE_CODE (arg0) == PLUS_EXPR
9813 && TREE_CODE (arg1) != MULT_EXPR)
9814 {
9815 tree tree00 = TREE_OPERAND (arg0, 0);
9816 tree tree01 = TREE_OPERAND (arg0, 1);
9817 if (TREE_CODE (tree01) == MULT_EXPR
9818 && TREE_CODE (tree00) == MULT_EXPR)
9819 {
9820 tree tree0;
db3927fb
AH
9821 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9822 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
0aee4751
KH
9823 }
9824 }
9825 }
9826
9827 bit_rotate:
9828 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9829 is a rotate of A by C1 bits. */
9830 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9831 is a rotate of A by B bits. */
9832 {
9833 enum tree_code code0, code1;
70582b3a 9834 tree rtype;
0aee4751
KH
9835 code0 = TREE_CODE (arg0);
9836 code1 = TREE_CODE (arg1);
9837 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9838 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9839 && operand_equal_p (TREE_OPERAND (arg0, 0),
9840 TREE_OPERAND (arg1, 0), 0)
70582b3a
RG
9841 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9842 TYPE_UNSIGNED (rtype))
9843 /* Only create rotates in complete modes. Other cases are not
9844 expanded properly. */
9845 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
0aee4751
KH
9846 {
9847 tree tree01, tree11;
9848 enum tree_code code01, code11;
9849
9850 tree01 = TREE_OPERAND (arg0, 1);
9851 tree11 = TREE_OPERAND (arg1, 1);
9852 STRIP_NOPS (tree01);
9853 STRIP_NOPS (tree11);
9854 code01 = TREE_CODE (tree01);
9855 code11 = TREE_CODE (tree11);
9856 if (code01 == INTEGER_CST
9857 && code11 == INTEGER_CST
9858 && TREE_INT_CST_HIGH (tree01) == 0
9859 && TREE_INT_CST_HIGH (tree11) == 0
9860 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9861 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
db3927fb 9862 {
c9019218
JJ
9863 tem = build2_loc (loc, LROTATE_EXPR,
9864 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9865 TREE_OPERAND (arg0, 0),
9866 code0 == LSHIFT_EXPR ? tree01 : tree11);
db3927fb
AH
9867 return fold_convert_loc (loc, type, tem);
9868 }
0aee4751
KH
9869 else if (code11 == MINUS_EXPR)
9870 {
9871 tree tree110, tree111;
9872 tree110 = TREE_OPERAND (tree11, 0);
9873 tree111 = TREE_OPERAND (tree11, 1);
9874 STRIP_NOPS (tree110);
9875 STRIP_NOPS (tree111);
9876 if (TREE_CODE (tree110) == INTEGER_CST
9877 && 0 == compare_tree_int (tree110,
9878 TYPE_PRECISION
9879 (TREE_TYPE (TREE_OPERAND
9880 (arg0, 0))))
9881 && operand_equal_p (tree01, tree111, 0))
db3927fb
AH
9882 return
9883 fold_convert_loc (loc, type,
9884 build2 ((code0 == LSHIFT_EXPR
9885 ? LROTATE_EXPR
9886 : RROTATE_EXPR),
9887 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9888 TREE_OPERAND (arg0, 0), tree01));
0aee4751
KH
9889 }
9890 else if (code01 == MINUS_EXPR)
9891 {
9892 tree tree010, tree011;
9893 tree010 = TREE_OPERAND (tree01, 0);
9894 tree011 = TREE_OPERAND (tree01, 1);
9895 STRIP_NOPS (tree010);
9896 STRIP_NOPS (tree011);
9897 if (TREE_CODE (tree010) == INTEGER_CST
9898 && 0 == compare_tree_int (tree010,
9899 TYPE_PRECISION
9900 (TREE_TYPE (TREE_OPERAND
9901 (arg0, 0))))
9902 && operand_equal_p (tree11, tree011, 0))
db3927fb
AH
9903 return fold_convert_loc
9904 (loc, type,
9905 build2 ((code0 != LSHIFT_EXPR
9906 ? LROTATE_EXPR
9907 : RROTATE_EXPR),
9908 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9909 TREE_OPERAND (arg0, 0), tree11));
0aee4751
KH
9910 }
9911 }
9912 }
9913
9914 associate:
9915 /* In most languages, can't associate operations on floats through
9916 parentheses. Rather than remember where the parentheses were, we
9917 don't associate floats at all, unless the user has specified
a1a82611 9918 -fassociative-math.
325217ed 9919 And, we need to make sure type is not saturating. */
0aee4751 9920
a1a82611 9921 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
325217ed 9922 && !TYPE_SATURATING (type))
0aee4751
KH
9923 {
9924 tree var0, con0, lit0, minus_lit0;
9925 tree var1, con1, lit1, minus_lit1;
a6d5f37c 9926 bool ok = true;
0aee4751
KH
9927
9928 /* Split both trees into variables, constants, and literals. Then
9929 associate each group together, the constants with literals,
9930 then the result with variables. This increases the chances of
9931 literals being recombined later and of generating relocatable
9932 expressions for the sum of a constant and literal. */
9933 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9934 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9935 code == MINUS_EXPR);
9936
9e9ef331
EB
9937 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9938 if (code == MINUS_EXPR)
9939 code = PLUS_EXPR;
9940
9941 /* With undefined overflow we can only associate constants with one
9942 variable, and constants whose association doesn't overflow. */
9943 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9944 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
a6d5f37c 9945 {
9e9ef331
EB
9946 if (var0 && var1)
9947 {
9948 tree tmp0 = var0;
9949 tree tmp1 = var1;
9950
9951 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9952 tmp0 = TREE_OPERAND (tmp0, 0);
9953 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9954 tmp1 = TREE_OPERAND (tmp1, 0);
9955 /* The only case we can still associate with two variables
9956 is if they are the same, modulo negation. */
9957 if (!operand_equal_p (tmp0, tmp1, 0))
9958 ok = false;
9959 }
9960
9961 if (ok && lit0 && lit1)
9962 {
9963 tree tmp0 = fold_convert (type, lit0);
9964 tree tmp1 = fold_convert (type, lit1);
9965
9966 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
9967 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
9968 ok = false;
9969 }
a6d5f37c
RG
9970 }
9971
0aee4751
KH
9972 /* Only do something if we found more than two objects. Otherwise,
9973 nothing has changed and we risk infinite recursion. */
a6d5f37c
RG
9974 if (ok
9975 && (2 < ((var0 != 0) + (var1 != 0)
9976 + (con0 != 0) + (con1 != 0)
9977 + (lit0 != 0) + (lit1 != 0)
9978 + (minus_lit0 != 0) + (minus_lit1 != 0))))
0aee4751 9979 {
db3927fb
AH
9980 var0 = associate_trees (loc, var0, var1, code, type);
9981 con0 = associate_trees (loc, con0, con1, code, type);
9982 lit0 = associate_trees (loc, lit0, lit1, code, type);
9983 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
0aee4751
KH
9984
9985 /* Preserve the MINUS_EXPR if the negative part of the literal is
9986 greater than the positive part. Otherwise, the multiplicative
9987 folding code (i.e extract_muldiv) may be fooled in case
9988 unsigned constants are subtracted, like in the following
9989 example: ((X*2 + 4) - 8U)/2. */
9990 if (minus_lit0 && lit0)
9991 {
9992 if (TREE_CODE (lit0) == INTEGER_CST
9993 && TREE_CODE (minus_lit0) == INTEGER_CST
9994 && tree_int_cst_lt (lit0, minus_lit0))
9995 {
db3927fb 9996 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
0aee4751
KH
9997 MINUS_EXPR, type);
9998 lit0 = 0;
9999 }
10000 else
10001 {
db3927fb 10002 lit0 = associate_trees (loc, lit0, minus_lit0,
0aee4751
KH
10003 MINUS_EXPR, type);
10004 minus_lit0 = 0;
10005 }
10006 }
10007 if (minus_lit0)
10008 {
10009 if (con0 == 0)
db3927fb
AH
10010 return
10011 fold_convert_loc (loc, type,
10012 associate_trees (loc, var0, minus_lit0,
10013 MINUS_EXPR, type));
0aee4751
KH
10014 else
10015 {
db3927fb 10016 con0 = associate_trees (loc, con0, minus_lit0,
0aee4751 10017 MINUS_EXPR, type);
db3927fb
AH
10018 return
10019 fold_convert_loc (loc, type,
10020 associate_trees (loc, var0, con0,
10021 PLUS_EXPR, type));
0aee4751
KH
10022 }
10023 }
10024
db3927fb
AH
10025 con0 = associate_trees (loc, con0, lit0, code, type);
10026 return
10027 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10028 code, type));
0aee4751
KH
10029 }
10030 }
10031
62ab45cc 10032 return NULL_TREE;
0aee4751
KH
10033
10034 case MINUS_EXPR:
5be014d5
AP
10035 /* Pointer simplifications for subtraction, simple reassociations. */
10036 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10037 {
10038 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10039 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10040 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10041 {
db3927fb
AH
10042 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10043 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10044 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10045 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10046 return fold_build2_loc (loc, PLUS_EXPR, type,
10047 fold_build2_loc (loc, MINUS_EXPR, type,
10048 arg00, arg10),
10049 fold_build2_loc (loc, MINUS_EXPR, type,
10050 arg01, arg11));
5be014d5
AP
10051 }
10052 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10053 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10054 {
db3927fb
AH
10055 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10056 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10057 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10058 fold_convert_loc (loc, type, arg1));
5be014d5 10059 if (tmp)
db3927fb 10060 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
5be014d5
AP
10061 }
10062 }
0aee4751
KH
10063 /* A - (-B) -> A + B */
10064 if (TREE_CODE (arg1) == NEGATE_EXPR)
db3927fb
AH
10065 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10066 fold_convert_loc (loc, type,
10067 TREE_OPERAND (arg1, 0)));
0aee4751
KH
10068 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10069 if (TREE_CODE (arg0) == NEGATE_EXPR
10070 && (FLOAT_TYPE_P (type)
b0cd88d2 10071 || INTEGRAL_TYPE_P (type))
0aee4751
KH
10072 && negate_expr_p (arg1)
10073 && reorder_operands_p (arg0, arg1))
db3927fb
AH
10074 return fold_build2_loc (loc, MINUS_EXPR, type,
10075 fold_convert_loc (loc, type,
10076 negate_expr (arg1)),
10077 fold_convert_loc (loc, type,
10078 TREE_OPERAND (arg0, 0)));
cbefb99c
JL
10079 /* Convert -A - 1 to ~A. */
10080 if (INTEGRAL_TYPE_P (type)
10081 && TREE_CODE (arg0) == NEGATE_EXPR
870aa1eb 10082 && integer_onep (arg1)
eeef0e45 10083 && !TYPE_OVERFLOW_TRAPS (type))
db3927fb
AH
10084 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10085 fold_convert_loc (loc, type,
10086 TREE_OPERAND (arg0, 0)));
cbefb99c
JL
10087
10088 /* Convert -1 - A to ~A. */
10089 if (INTEGRAL_TYPE_P (type)
10090 && integer_all_onesp (arg0))
db3927fb 10091 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
0aee4751 10092
65648dd4
RG
10093
10094 /* X - (X / CST) * CST is X % CST. */
10095 if (INTEGRAL_TYPE_P (type)
10096 && TREE_CODE (arg1) == MULT_EXPR
10097 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10098 && operand_equal_p (arg0,
10099 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10100 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10101 TREE_OPERAND (arg1, 1), 0))
db3927fb
AH
10102 return
10103 fold_convert_loc (loc, type,
10104 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10105 arg0, TREE_OPERAND (arg1, 1)));
65648dd4 10106
0aee4751
KH
10107 if (! FLOAT_TYPE_P (type))
10108 {
fd6c76f4 10109 if (integer_zerop (arg0))
db3927fb 10110 return negate_expr (fold_convert_loc (loc, type, arg1));
0aee4751 10111 if (integer_zerop (arg1))
db3927fb 10112 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751
KH
10113
10114 /* Fold A - (A & B) into ~B & A. */
10115 if (!TREE_SIDE_EFFECTS (arg0)
10116 && TREE_CODE (arg1) == BIT_AND_EXPR)
10117 {
10118 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
48075623 10119 {
db3927fb
AH
10120 tree arg10 = fold_convert_loc (loc, type,
10121 TREE_OPERAND (arg1, 0));
10122 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10123 fold_build1_loc (loc, BIT_NOT_EXPR,
10124 type, arg10),
10125 fold_convert_loc (loc, type, arg0));
48075623 10126 }
0aee4751 10127 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
48075623 10128 {
db3927fb
AH
10129 tree arg11 = fold_convert_loc (loc,
10130 type, TREE_OPERAND (arg1, 1));
10131 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10132 fold_build1_loc (loc, BIT_NOT_EXPR,
10133 type, arg11),
10134 fold_convert_loc (loc, type, arg0));
48075623 10135 }
0aee4751
KH
10136 }
10137
10138 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10139 any power of 2 minus 1. */
10140 if (TREE_CODE (arg0) == BIT_AND_EXPR
10141 && TREE_CODE (arg1) == BIT_AND_EXPR
10142 && operand_equal_p (TREE_OPERAND (arg0, 0),
10143 TREE_OPERAND (arg1, 0), 0))
10144 {
10145 tree mask0 = TREE_OPERAND (arg0, 1);
10146 tree mask1 = TREE_OPERAND (arg1, 1);
db3927fb 10147 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
0aee4751
KH
10148
10149 if (operand_equal_p (tem, mask1, 0))
10150 {
db3927fb 10151 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
7f20a5b7 10152 TREE_OPERAND (arg0, 0), mask1);
db3927fb 10153 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
0aee4751
KH
10154 }
10155 }
10156 }
10157
10158 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10159 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
db3927fb 10160 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751
KH
10161
10162 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10163 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10164 (-ARG1 + ARG0) reduces to -ARG1. */
fd6c76f4 10165 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
db3927fb 10166 return negate_expr (fold_convert_loc (loc, type, arg1));
0aee4751 10167
d1ad84c2
KG
10168 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10169 __complex__ ( x, -y ). This is not the same for SNaNs or if
10170 signed zeros are involved. */
10171 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10172 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10173 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10174 {
10175 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
db3927fb
AH
10176 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10177 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
d1ad84c2
KG
10178 bool arg0rz = false, arg0iz = false;
10179 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10180 || (arg0i && (arg0iz = real_zerop (arg0i))))
10181 {
db3927fb
AH
10182 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10183 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
d1ad84c2
KG
10184 if (arg0rz && arg1i && real_zerop (arg1i))
10185 {
db3927fb 10186 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
d1ad84c2
KG
10187 arg1r ? arg1r
10188 : build1 (REALPART_EXPR, rtype, arg1));
10189 tree ip = arg0i ? arg0i
10190 : build1 (IMAGPART_EXPR, rtype, arg0);
db3927fb 10191 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
d1ad84c2
KG
10192 }
10193 else if (arg0iz && arg1r && real_zerop (arg1r))
10194 {
10195 tree rp = arg0r ? arg0r
10196 : build1 (REALPART_EXPR, rtype, arg0);
db3927fb 10197 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
d1ad84c2
KG
10198 arg1i ? arg1i
10199 : build1 (IMAGPART_EXPR, rtype, arg1));
db3927fb 10200 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
d1ad84c2
KG
10201 }
10202 }
10203 }
10204
0aee4751
KH
10205 /* Fold &x - &x. This can happen from &x.foo - &x.
10206 This is unsafe for certain floats even in non-IEEE formats.
10207 In IEEE, it is unsafe because it does wrong for NaNs.
10208 Also note that operand_equal_p is always false if an operand
10209 is volatile. */
10210
81d2fb02 10211 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
0aee4751 10212 && operand_equal_p (arg0, arg1, 0))
e8160c9a 10213 return build_zero_cst (type);
0aee4751
KH
10214
10215 /* A - B -> A + (-B) if B is easily negatable. */
fd6c76f4 10216 if (negate_expr_p (arg1)
0aee4751
KH
10217 && ((FLOAT_TYPE_P (type)
10218 /* Avoid this transformation if B is a positive REAL_CST. */
10219 && (TREE_CODE (arg1) != REAL_CST
10220 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
b0cd88d2 10221 || INTEGRAL_TYPE_P (type)))
db3927fb
AH
10222 return fold_build2_loc (loc, PLUS_EXPR, type,
10223 fold_convert_loc (loc, type, arg0),
10224 fold_convert_loc (loc, type,
10225 negate_expr (arg1)));
0aee4751
KH
10226
10227 /* Try folding difference of addresses. */
10228 {
10229 HOST_WIDE_INT diff;
10230
10231 if ((TREE_CODE (arg0) == ADDR_EXPR
10232 || TREE_CODE (arg1) == ADDR_EXPR)
10233 && ptr_difference_const (arg0, arg1, &diff))
10234 return build_int_cst_type (type, diff);
10235 }
75cf42cc
RG
10236
10237 /* Fold &a[i] - &a[j] to i-j. */
10238 if (TREE_CODE (arg0) == ADDR_EXPR
10239 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10240 && TREE_CODE (arg1) == ADDR_EXPR
10241 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10242 {
10243 tree aref0 = TREE_OPERAND (arg0, 0);
10244 tree aref1 = TREE_OPERAND (arg1, 0);
10245 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10246 TREE_OPERAND (aref1, 0), 0))
10247 {
db3927fb
AH
10248 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10249 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
75cf42cc
RG
10250 tree esz = array_ref_element_size (aref0);
10251 tree diff = build2 (MINUS_EXPR, type, op0, op1);
db3927fb
AH
10252 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10253 fold_convert_loc (loc, type, esz));
b8698a0f 10254
75cf42cc
RG
10255 }
10256 }
10257
e0dd989a
RG
10258 if (FLOAT_TYPE_P (type)
10259 && flag_unsafe_math_optimizations
f8912a55
PB
10260 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10261 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
db3927fb 10262 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
f8912a55
PB
10263 return tem;
10264
0ed9a3e3 10265 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
a1a82611
RE
10266 same or one. Make sure type is not saturating.
10267 fold_plusminus_mult_expr will re-associate. */
0ed9a3e3
RG
10268 if ((TREE_CODE (arg0) == MULT_EXPR
10269 || TREE_CODE (arg1) == MULT_EXPR)
325217ed 10270 && !TYPE_SATURATING (type)
a1a82611 10271 && (!FLOAT_TYPE_P (type) || flag_associative_math))
0ed9a3e3 10272 {
db3927fb 10273 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
0ed9a3e3
RG
10274 if (tem)
10275 return tem;
0aee4751
KH
10276 }
10277
10278 goto associate;
10279
10280 case MULT_EXPR:
10281 /* (-A) * (-B) -> A * B */
10282 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
db3927fb
AH
10283 return fold_build2_loc (loc, MULT_EXPR, type,
10284 fold_convert_loc (loc, type,
10285 TREE_OPERAND (arg0, 0)),
10286 fold_convert_loc (loc, type,
10287 negate_expr (arg1)));
0aee4751 10288 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
db3927fb
AH
10289 return fold_build2_loc (loc, MULT_EXPR, type,
10290 fold_convert_loc (loc, type,
10291 negate_expr (arg0)),
10292 fold_convert_loc (loc, type,
10293 TREE_OPERAND (arg1, 0)));
0aee4751 10294
0aee4751
KH
10295 if (! FLOAT_TYPE_P (type))
10296 {
10297 if (integer_zerop (arg1))
db3927fb 10298 return omit_one_operand_loc (loc, type, arg1, arg0);
0aee4751 10299 if (integer_onep (arg1))
db3927fb 10300 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
b9e67f8b
RG
10301 /* Transform x * -1 into -x. Make sure to do the negation
10302 on the original operand with conversions not stripped
10303 because we can only strip non-sign-changing conversions. */
694d73e1 10304 if (integer_all_onesp (arg1))
db3927fb 10305 return fold_convert_loc (loc, type, negate_expr (op0));
b0cd88d2
RG
10306 /* Transform x * -C into -x * C if x is easily negatable. */
10307 if (TREE_CODE (arg1) == INTEGER_CST
10308 && tree_int_cst_sgn (arg1) == -1
10309 && negate_expr_p (arg0)
10310 && (tem = negate_expr (arg1)) != arg1
10311 && !TREE_OVERFLOW (tem))
db3927fb
AH
10312 return fold_build2_loc (loc, MULT_EXPR, type,
10313 fold_convert_loc (loc, type,
10314 negate_expr (arg0)),
10315 tem);
0aee4751
KH
10316
10317 /* (a * (1 << b)) is (a << b) */
10318 if (TREE_CODE (arg1) == LSHIFT_EXPR
10319 && integer_onep (TREE_OPERAND (arg1, 0)))
db3927fb 10320 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
7f20a5b7 10321 TREE_OPERAND (arg1, 1));
0aee4751
KH
10322 if (TREE_CODE (arg0) == LSHIFT_EXPR
10323 && integer_onep (TREE_OPERAND (arg0, 0)))
db3927fb 10324 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
7f20a5b7 10325 TREE_OPERAND (arg0, 1));
0aee4751 10326
1447bf05
RG
10327 /* (A + A) * C -> A * 2 * C */
10328 if (TREE_CODE (arg0) == PLUS_EXPR
10329 && TREE_CODE (arg1) == INTEGER_CST
10330 && operand_equal_p (TREE_OPERAND (arg0, 0),
10331 TREE_OPERAND (arg0, 1), 0))
db3927fb
AH
10332 return fold_build2_loc (loc, MULT_EXPR, type,
10333 omit_one_operand_loc (loc, type,
10334 TREE_OPERAND (arg0, 0),
1447bf05 10335 TREE_OPERAND (arg0, 1)),
db3927fb 10336 fold_build2_loc (loc, MULT_EXPR, type,
1447bf05
RG
10337 build_int_cst (type, 2) , arg1));
10338
6ac01510 10339 strict_overflow_p = false;
0aee4751 10340 if (TREE_CODE (arg1) == INTEGER_CST
ac029795 10341 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
6ac01510
ILT
10342 &strict_overflow_p)))
10343 {
10344 if (strict_overflow_p)
10345 fold_overflow_warning (("assuming signed overflow does not "
10346 "occur when simplifying "
10347 "multiplication"),
10348 WARN_STRICT_OVERFLOW_MISC);
db3927fb 10349 return fold_convert_loc (loc, type, tem);
6ac01510 10350 }
0aee4751 10351
99b25753
RS
10352 /* Optimize z * conj(z) for integer complex numbers. */
10353 if (TREE_CODE (arg0) == CONJ_EXPR
10354 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
db3927fb 10355 return fold_mult_zconjz (loc, type, arg1);
99b25753
RS
10356 if (TREE_CODE (arg1) == CONJ_EXPR
10357 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
db3927fb 10358 return fold_mult_zconjz (loc, type, arg0);
0aee4751
KH
10359 }
10360 else
10361 {
10362 /* Maybe fold x * 0 to 0. The expressions aren't the same
10363 when x is NaN, since x * 0 is also NaN. Nor are they the
10364 same in modes with signed zeros, since multiplying a
10365 negative value by 0 gives -0, not +0. */
10366 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10367 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10368 && real_zerop (arg1))
db3927fb 10369 return omit_one_operand_loc (loc, type, arg1, arg0);
c94f9067
JM
10370 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10371 Likewise for complex arithmetic with signed zeros. */
0aee4751 10372 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
c94f9067
JM
10373 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10374 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
0aee4751 10375 && real_onep (arg1))
db3927fb 10376 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751
KH
10377
10378 /* Transform x * -1.0 into -x. */
10379 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
c94f9067
JM
10380 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10381 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
0aee4751 10382 && real_minus_onep (arg1))
db3927fb 10383 return fold_convert_loc (loc, type, negate_expr (arg0));
0aee4751 10384
a1a82611
RE
10385 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10386 the result for floating point types due to rounding so it is applied
10387 only if -fassociative-math was specify. */
10388 if (flag_associative_math
0aee4751
KH
10389 && TREE_CODE (arg0) == RDIV_EXPR
10390 && TREE_CODE (arg1) == REAL_CST
10391 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10392 {
10393 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
43a5d30b 10394 arg1);
0aee4751 10395 if (tem)
db3927fb 10396 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
7f20a5b7 10397 TREE_OPERAND (arg0, 1));
0aee4751
KH
10398 }
10399
10400 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10401 if (operand_equal_p (arg0, arg1, 0))
10402 {
10403 tree tem = fold_strip_sign_ops (arg0);
10404 if (tem != NULL_TREE)
10405 {
db3927fb
AH
10406 tem = fold_convert_loc (loc, type, tem);
10407 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
0aee4751
KH
10408 }
10409 }
10410
9f539671 10411 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
d1ad84c2 10412 This is not the same for NaNs or if signed zeros are
9f539671
RG
10413 involved. */
10414 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10415 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10416 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10417 && TREE_CODE (arg1) == COMPLEX_CST
10418 && real_zerop (TREE_REALPART (arg1)))
10419 {
10420 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10421 if (real_onep (TREE_IMAGPART (arg1)))
db3927fb
AH
10422 return
10423 fold_build2_loc (loc, COMPLEX_EXPR, type,
10424 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10425 rtype, arg0)),
10426 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9f539671 10427 else if (real_minus_onep (TREE_IMAGPART (arg1)))
db3927fb
AH
10428 return
10429 fold_build2_loc (loc, COMPLEX_EXPR, type,
10430 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10431 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10432 rtype, arg0)));
9f539671
RG
10433 }
10434
99b25753
RS
10435 /* Optimize z * conj(z) for floating point complex numbers.
10436 Guarded by flag_unsafe_math_optimizations as non-finite
10437 imaginary components don't produce scalar results. */
10438 if (flag_unsafe_math_optimizations
10439 && TREE_CODE (arg0) == CONJ_EXPR
10440 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
db3927fb 10441 return fold_mult_zconjz (loc, type, arg1);
99b25753
RS
10442 if (flag_unsafe_math_optimizations
10443 && TREE_CODE (arg1) == CONJ_EXPR
10444 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
db3927fb 10445 return fold_mult_zconjz (loc, type, arg0);
99b25753 10446
0aee4751
KH
10447 if (flag_unsafe_math_optimizations)
10448 {
10449 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10450 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10451
10452 /* Optimizations of root(...)*root(...). */
10453 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10454 {
5039610b
SL
10455 tree rootfn, arg;
10456 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10457 tree arg10 = CALL_EXPR_ARG (arg1, 0);
0aee4751
KH
10458
10459 /* Optimize sqrt(x)*sqrt(x) as x. */
10460 if (BUILTIN_SQRT_P (fcode0)
10461 && operand_equal_p (arg00, arg10, 0)
10462 && ! HONOR_SNANS (TYPE_MODE (type)))
10463 return arg00;
10464
10465 /* Optimize root(x)*root(y) as root(x*y). */
5039610b 10466 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
db3927fb
AH
10467 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10468 return build_call_expr_loc (loc, rootfn, 1, arg);
0aee4751
KH
10469 }
10470
10471 /* Optimize expN(x)*expN(y) as expN(x+y). */
10472 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10473 {
5039610b 10474 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
db3927fb 10475 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
5039610b
SL
10476 CALL_EXPR_ARG (arg0, 0),
10477 CALL_EXPR_ARG (arg1, 0));
db3927fb 10478 return build_call_expr_loc (loc, expfn, 1, arg);
0aee4751
KH
10479 }
10480
10481 /* Optimizations of pow(...)*pow(...). */
10482 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10483 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10484 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10485 {
5039610b
SL
10486 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10487 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10488 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10489 tree arg11 = CALL_EXPR_ARG (arg1, 1);
0aee4751
KH
10490
10491 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10492 if (operand_equal_p (arg01, arg11, 0))
10493 {
5039610b 10494 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
db3927fb
AH
10495 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10496 arg00, arg10);
10497 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
0aee4751
KH
10498 }
10499
10500 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10501 if (operand_equal_p (arg00, arg10, 0))
10502 {
5039610b 10503 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
db3927fb
AH
10504 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10505 arg01, arg11);
10506 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
0aee4751
KH
10507 }
10508 }
10509
10510 /* Optimize tan(x)*cos(x) as sin(x). */
10511 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10512 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10513 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10514 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10515 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10516 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
5039610b
SL
10517 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10518 CALL_EXPR_ARG (arg1, 0), 0))
0aee4751
KH
10519 {
10520 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10521
10522 if (sinfn != NULL_TREE)
db3927fb
AH
10523 return build_call_expr_loc (loc, sinfn, 1,
10524 CALL_EXPR_ARG (arg0, 0));
0aee4751
KH
10525 }
10526
10527 /* Optimize x*pow(x,c) as pow(x,c+1). */
10528 if (fcode1 == BUILT_IN_POW
10529 || fcode1 == BUILT_IN_POWF
10530 || fcode1 == BUILT_IN_POWL)
10531 {
5039610b
SL
10532 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10533 tree arg11 = CALL_EXPR_ARG (arg1, 1);
0aee4751 10534 if (TREE_CODE (arg11) == REAL_CST
455f14dd 10535 && !TREE_OVERFLOW (arg11)
0aee4751
KH
10536 && operand_equal_p (arg0, arg10, 0))
10537 {
5039610b 10538 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
0aee4751 10539 REAL_VALUE_TYPE c;
5039610b 10540 tree arg;
0aee4751
KH
10541
10542 c = TREE_REAL_CST (arg11);
10543 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10544 arg = build_real (type, c);
db3927fb 10545 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
0aee4751
KH
10546 }
10547 }
10548
10549 /* Optimize pow(x,c)*x as pow(x,c+1). */
10550 if (fcode0 == BUILT_IN_POW
10551 || fcode0 == BUILT_IN_POWF
10552 || fcode0 == BUILT_IN_POWL)
10553 {
5039610b
SL
10554 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10555 tree arg01 = CALL_EXPR_ARG (arg0, 1);
0aee4751 10556 if (TREE_CODE (arg01) == REAL_CST
455f14dd 10557 && !TREE_OVERFLOW (arg01)
0aee4751
KH
10558 && operand_equal_p (arg1, arg00, 0))
10559 {
5039610b 10560 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
0aee4751 10561 REAL_VALUE_TYPE c;
5039610b 10562 tree arg;
0aee4751
KH
10563
10564 c = TREE_REAL_CST (arg01);
10565 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10566 arg = build_real (type, c);
db3927fb 10567 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
0aee4751
KH
10568 }
10569 }
10570
10571 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
efd8f750 10572 if (optimize_function_for_speed_p (cfun)
0aee4751
KH
10573 && operand_equal_p (arg0, arg1, 0))
10574 {
10575 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10576
10577 if (powfn)
10578 {
10579 tree arg = build_real (type, dconst2);
db3927fb 10580 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
0aee4751
KH
10581 }
10582 }
10583 }
10584 }
10585 goto associate;
10586
10587 case BIT_IOR_EXPR:
10588 bit_ior:
10589 if (integer_all_onesp (arg1))
db3927fb 10590 return omit_one_operand_loc (loc, type, arg1, arg0);
0aee4751 10591 if (integer_zerop (arg1))
db3927fb 10592 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751 10593 if (operand_equal_p (arg0, arg1, 0))
db3927fb 10594 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751
KH
10595
10596 /* ~X | X is -1. */
10597 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10598 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10599 {
e8160c9a 10600 t1 = build_zero_cst (type);
db3927fb
AH
10601 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10602 return omit_one_operand_loc (loc, type, t1, arg1);
0aee4751
KH
10603 }
10604
10605 /* X | ~X is -1. */
10606 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10607 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10608 {
e8160c9a 10609 t1 = build_zero_cst (type);
db3927fb
AH
10610 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10611 return omit_one_operand_loc (loc, type, t1, arg0);
0aee4751
KH
10612 }
10613
840992bd
RS
10614 /* Canonicalize (X & C1) | C2. */
10615 if (TREE_CODE (arg0) == BIT_AND_EXPR
10616 && TREE_CODE (arg1) == INTEGER_CST
10617 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10618 {
517ddae9
JJ
10619 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10620 int width = TYPE_PRECISION (type), w;
840992bd
RS
10621 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10622 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10623 hi2 = TREE_INT_CST_HIGH (arg1);
10624 lo2 = TREE_INT_CST_LOW (arg1);
10625
10626 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10627 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
db3927fb
AH
10628 return omit_one_operand_loc (loc, type, arg1,
10629 TREE_OPERAND (arg0, 0));
840992bd
RS
10630
10631 if (width > HOST_BITS_PER_WIDE_INT)
10632 {
b8698a0f 10633 mhi = (unsigned HOST_WIDE_INT) -1
840992bd
RS
10634 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10635 mlo = -1;
10636 }
10637 else
10638 {
10639 mhi = 0;
10640 mlo = (unsigned HOST_WIDE_INT) -1
10641 >> (HOST_BITS_PER_WIDE_INT - width);
10642 }
10643
10644 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10645 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
db3927fb 10646 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
840992bd
RS
10647 TREE_OPERAND (arg0, 0), arg1);
10648
517ddae9
JJ
10649 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10650 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10651 mode which allows further optimizations. */
840992bd
RS
10652 hi1 &= mhi;
10653 lo1 &= mlo;
517ddae9
JJ
10654 hi2 &= mhi;
10655 lo2 &= mlo;
10656 hi3 = hi1 & ~hi2;
10657 lo3 = lo1 & ~lo2;
10658 for (w = BITS_PER_UNIT;
10659 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10660 w <<= 1)
10661 {
10662 unsigned HOST_WIDE_INT mask
10663 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10664 if (((lo1 | lo2) & mask) == mask
10665 && (lo1 & ~mask) == 0 && hi1 == 0)
10666 {
10667 hi3 = 0;
10668 lo3 = mask;
10669 break;
10670 }
10671 }
10672 if (hi3 != hi1 || lo3 != lo1)
db3927fb
AH
10673 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10674 fold_build2_loc (loc, BIT_AND_EXPR, type,
840992bd
RS
10675 TREE_OPERAND (arg0, 0),
10676 build_int_cst_wide (type,
517ddae9 10677 lo3, hi3)),
840992bd
RS
10678 arg1);
10679 }
10680
03bebcac
RS
10681 /* (X & Y) | Y is (X, Y). */
10682 if (TREE_CODE (arg0) == BIT_AND_EXPR
10683 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
db3927fb 10684 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
03bebcac
RS
10685 /* (X & Y) | X is (Y, X). */
10686 if (TREE_CODE (arg0) == BIT_AND_EXPR
10687 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10688 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
db3927fb 10689 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
03bebcac
RS
10690 /* X | (X & Y) is (Y, X). */
10691 if (TREE_CODE (arg1) == BIT_AND_EXPR
10692 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10693 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
db3927fb 10694 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
03bebcac
RS
10695 /* X | (Y & X) is (Y, X). */
10696 if (TREE_CODE (arg1) == BIT_AND_EXPR
10697 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10698 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
db3927fb 10699 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
03bebcac 10700
db3927fb 10701 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
0aee4751
KH
10702 if (t1 != NULL_TREE)
10703 return t1;
10704
10705 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10706
10707 This results in more efficient code for machines without a NAND
10708 instruction. Combine will canonicalize to the first form
10709 which will allow use of NAND instructions provided by the
10710 backend if they exist. */
10711 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10712 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10713 {
db3927fb
AH
10714 return
10715 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10716 build2 (BIT_AND_EXPR, type,
10717 fold_convert_loc (loc, type,
10718 TREE_OPERAND (arg0, 0)),
10719 fold_convert_loc (loc, type,
10720 TREE_OPERAND (arg1, 0))));
0aee4751
KH
10721 }
10722
10723 /* See if this can be simplified into a rotate first. If that
10724 is unsuccessful continue in the association code. */
10725 goto bit_rotate;
10726
10727 case BIT_XOR_EXPR:
10728 if (integer_zerop (arg1))
db3927fb 10729 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751 10730 if (integer_all_onesp (arg1))
db3927fb 10731 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
0aee4751 10732 if (operand_equal_p (arg0, arg1, 0))
db3927fb 10733 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
0aee4751
KH
10734
10735 /* ~X ^ X is -1. */
10736 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10737 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10738 {
e8160c9a 10739 t1 = build_zero_cst (type);
db3927fb
AH
10740 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10741 return omit_one_operand_loc (loc, type, t1, arg1);
0aee4751
KH
10742 }
10743
10744 /* X ^ ~X is -1. */
10745 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10746 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10747 {
e8160c9a 10748 t1 = build_zero_cst (type);
db3927fb
AH
10749 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10750 return omit_one_operand_loc (loc, type, t1, arg0);
0aee4751
KH
10751 }
10752
10753 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10754 with a constant, and the two constants have no bits in common,
10755 we should treat this as a BIT_IOR_EXPR since this may produce more
10756 simplifications. */
10757 if (TREE_CODE (arg0) == BIT_AND_EXPR
10758 && TREE_CODE (arg1) == BIT_AND_EXPR
10759 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10760 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10761 && integer_zerop (const_binop (BIT_AND_EXPR,
10762 TREE_OPERAND (arg0, 1),
43a5d30b 10763 TREE_OPERAND (arg1, 1))))
0aee4751
KH
10764 {
10765 code = BIT_IOR_EXPR;
10766 goto bit_ior;
10767 }
10768
9d24eb54
AP
10769 /* (X | Y) ^ X -> Y & ~ X*/
10770 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10771 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10772 {
10773 tree t2 = TREE_OPERAND (arg0, 1);
db3927fb 10774 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
9d24eb54 10775 arg1);
db3927fb
AH
10776 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10777 fold_convert_loc (loc, type, t2),
10778 fold_convert_loc (loc, type, t1));
9d24eb54
AP
10779 return t1;
10780 }
10781
10782 /* (Y | X) ^ X -> Y & ~ X*/
10783 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10784 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10785 {
10786 tree t2 = TREE_OPERAND (arg0, 0);
db3927fb 10787 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
9d24eb54 10788 arg1);
db3927fb
AH
10789 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10790 fold_convert_loc (loc, type, t2),
10791 fold_convert_loc (loc, type, t1));
9d24eb54
AP
10792 return t1;
10793 }
10794
10795 /* X ^ (X | Y) -> Y & ~ X*/
10796 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10797 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10798 {
10799 tree t2 = TREE_OPERAND (arg1, 1);
db3927fb 10800 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
9d24eb54 10801 arg0);
db3927fb
AH
10802 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10803 fold_convert_loc (loc, type, t2),
10804 fold_convert_loc (loc, type, t1));
9d24eb54
AP
10805 return t1;
10806 }
10807
10808 /* X ^ (Y | X) -> Y & ~ X*/
10809 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10810 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10811 {
10812 tree t2 = TREE_OPERAND (arg1, 0);
db3927fb 10813 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
9d24eb54 10814 arg0);
db3927fb
AH
10815 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10816 fold_convert_loc (loc, type, t2),
10817 fold_convert_loc (loc, type, t1));
9d24eb54
AP
10818 return t1;
10819 }
b8698a0f 10820
33ab6245
JM
10821 /* Convert ~X ^ ~Y to X ^ Y. */
10822 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10823 && TREE_CODE (arg1) == BIT_NOT_EXPR)
db3927fb
AH
10824 return fold_build2_loc (loc, code, type,
10825 fold_convert_loc (loc, type,
10826 TREE_OPERAND (arg0, 0)),
10827 fold_convert_loc (loc, type,
10828 TREE_OPERAND (arg1, 0)));
33ab6245 10829
f8ed9a1c
RS
10830 /* Convert ~X ^ C to X ^ ~C. */
10831 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10832 && TREE_CODE (arg1) == INTEGER_CST)
db3927fb
AH
10833 return fold_build2_loc (loc, code, type,
10834 fold_convert_loc (loc, type,
10835 TREE_OPERAND (arg0, 0)),
10836 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
f8ed9a1c 10837
cef65eaa
RS
10838 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10839 if (TREE_CODE (arg0) == BIT_AND_EXPR
10840 && integer_onep (TREE_OPERAND (arg0, 1))
10841 && integer_onep (arg1))
db3927fb 10842 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
cef65eaa
RS
10843 build_int_cst (TREE_TYPE (arg0), 0));
10844
dd2c62dc
RS
10845 /* Fold (X & Y) ^ Y as ~X & Y. */
10846 if (TREE_CODE (arg0) == BIT_AND_EXPR
10847 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10848 {
db3927fb 10849 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
b8698a0f 10850 return fold_build2_loc (loc, BIT_AND_EXPR, type,
db3927fb
AH
10851 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10852 fold_convert_loc (loc, type, arg1));
dd2c62dc
RS
10853 }
10854 /* Fold (X & Y) ^ X as ~Y & X. */
10855 if (TREE_CODE (arg0) == BIT_AND_EXPR
10856 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10857 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10858 {
db3927fb
AH
10859 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10860 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10861 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10862 fold_convert_loc (loc, type, arg1));
dd2c62dc
RS
10863 }
10864 /* Fold X ^ (X & Y) as X & ~Y. */
10865 if (TREE_CODE (arg1) == BIT_AND_EXPR
10866 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10867 {
db3927fb
AH
10868 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10869 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10870 fold_convert_loc (loc, type, arg0),
10871 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
dd2c62dc
RS
10872 }
10873 /* Fold X ^ (Y & X) as ~Y & X. */
10874 if (TREE_CODE (arg1) == BIT_AND_EXPR
10875 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10876 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10877 {
db3927fb
AH
10878 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10879 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10880 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10881 fold_convert_loc (loc, type, arg0));
dd2c62dc
RS
10882 }
10883
0aee4751
KH
10884 /* See if this can be simplified into a rotate first. If that
10885 is unsuccessful continue in the association code. */
10886 goto bit_rotate;
10887
10888 case BIT_AND_EXPR:
10889 if (integer_all_onesp (arg1))
db3927fb 10890 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751 10891 if (integer_zerop (arg1))
db3927fb 10892 return omit_one_operand_loc (loc, type, arg1, arg0);
0aee4751 10893 if (operand_equal_p (arg0, arg1, 0))
db3927fb 10894 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751
KH
10895
10896 /* ~X & X is always zero. */
10897 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10898 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
db3927fb 10899 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
0aee4751
KH
10900
10901 /* X & ~X is always zero. */
10902 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10903 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
db3927fb 10904 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
0aee4751 10905
840992bd
RS
10906 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10907 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10908 && TREE_CODE (arg1) == INTEGER_CST
10909 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8174836f 10910 {
db3927fb
AH
10911 tree tmp1 = fold_convert_loc (loc, type, arg1);
10912 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10913 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10914 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
10915 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
10916 return
10917 fold_convert_loc (loc, type,
10918 fold_build2_loc (loc, BIT_IOR_EXPR,
10919 type, tmp2, tmp3));
8174836f 10920 }
840992bd 10921
03bebcac
RS
10922 /* (X | Y) & Y is (X, Y). */
10923 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10924 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
db3927fb 10925 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
03bebcac
RS
10926 /* (X | Y) & X is (Y, X). */
10927 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10928 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10929 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
db3927fb 10930 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
03bebcac
RS
10931 /* X & (X | Y) is (Y, X). */
10932 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10933 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10934 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
db3927fb 10935 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
03bebcac
RS
10936 /* X & (Y | X) is (Y, X). */
10937 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10938 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10939 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
db3927fb 10940 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
03bebcac 10941
cef65eaa
RS
10942 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10943 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10944 && integer_onep (TREE_OPERAND (arg0, 1))
10945 && integer_onep (arg1))
10946 {
10947 tem = TREE_OPERAND (arg0, 0);
db3927fb
AH
10948 return fold_build2_loc (loc, EQ_EXPR, type,
10949 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
cef65eaa
RS
10950 build_int_cst (TREE_TYPE (tem), 1)),
10951 build_int_cst (TREE_TYPE (tem), 0));
10952 }
10953 /* Fold ~X & 1 as (X & 1) == 0. */
10954 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10955 && integer_onep (arg1))
10956 {
10957 tem = TREE_OPERAND (arg0, 0);
db3927fb
AH
10958 return fold_build2_loc (loc, EQ_EXPR, type,
10959 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
cef65eaa
RS
10960 build_int_cst (TREE_TYPE (tem), 1)),
10961 build_int_cst (TREE_TYPE (tem), 0));
10962 }
10963
dd2c62dc
RS
10964 /* Fold (X ^ Y) & Y as ~X & Y. */
10965 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10966 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10967 {
db3927fb 10968 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
b8698a0f 10969 return fold_build2_loc (loc, BIT_AND_EXPR, type,
db3927fb
AH
10970 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10971 fold_convert_loc (loc, type, arg1));
dd2c62dc
RS
10972 }
10973 /* Fold (X ^ Y) & X as ~Y & X. */
10974 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10975 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10976 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10977 {
db3927fb
AH
10978 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10979 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10980 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10981 fold_convert_loc (loc, type, arg1));
dd2c62dc
RS
10982 }
10983 /* Fold X & (X ^ Y) as X & ~Y. */
10984 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10985 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10986 {
db3927fb
AH
10987 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10988 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10989 fold_convert_loc (loc, type, arg0),
10990 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
dd2c62dc
RS
10991 }
10992 /* Fold X & (Y ^ X) as ~Y & X. */
10993 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10994 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10995 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10996 {
db3927fb
AH
10997 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10998 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10999 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11000 fold_convert_loc (loc, type, arg0));
dd2c62dc
RS
11001 }
11002
140d4eff
JJ
11003 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11004 ((A & N) + B) & M -> (A + B) & M
11005 Similarly if (N & M) == 0,
11006 ((A | N) + B) & M -> (A + B) & M
11007 and for - instead of + (or unary - instead of +)
11008 and/or ^ instead of |.
11009 If B is constant and (B & M) == 0, fold into A & M. */
11010 if (host_integerp (arg1, 1))
11011 {
11012 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11013 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11014 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11015 && (TREE_CODE (arg0) == PLUS_EXPR
11016 || TREE_CODE (arg0) == MINUS_EXPR
11017 || TREE_CODE (arg0) == NEGATE_EXPR)
11018 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11019 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11020 {
11021 tree pmop[2];
11022 int which = 0;
11023 unsigned HOST_WIDE_INT cst0;
11024
11025 /* Now we know that arg0 is (C + D) or (C - D) or
11026 -C and arg1 (M) is == (1LL << cst) - 1.
11027 Store C into PMOP[0] and D into PMOP[1]. */
11028 pmop[0] = TREE_OPERAND (arg0, 0);
11029 pmop[1] = NULL;
11030 if (TREE_CODE (arg0) != NEGATE_EXPR)
11031 {
11032 pmop[1] = TREE_OPERAND (arg0, 1);
11033 which = 1;
11034 }
11035
11036 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11037 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11038 & cst1) != cst1)
11039 which = -1;
11040
11041 for (; which >= 0; which--)
11042 switch (TREE_CODE (pmop[which]))
11043 {
11044 case BIT_AND_EXPR:
11045 case BIT_IOR_EXPR:
11046 case BIT_XOR_EXPR:
11047 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11048 != INTEGER_CST)
11049 break;
11050 /* tree_low_cst not used, because we don't care about
11051 the upper bits. */
11052 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11053 cst0 &= cst1;
11054 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11055 {
11056 if (cst0 != cst1)
11057 break;
11058 }
11059 else if (cst0 != 0)
11060 break;
11061 /* If C or D is of the form (A & N) where
11062 (N & M) == M, or of the form (A | N) or
11063 (A ^ N) where (N & M) == 0, replace it with A. */
11064 pmop[which] = TREE_OPERAND (pmop[which], 0);
11065 break;
11066 case INTEGER_CST:
11067 /* If C or D is a N where (N & M) == 0, it can be
11068 omitted (assumed 0). */
11069 if ((TREE_CODE (arg0) == PLUS_EXPR
11070 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11071 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11072 pmop[which] = NULL;
11073 break;
11074 default:
11075 break;
11076 }
11077
11078 /* Only build anything new if we optimized one or both arguments
11079 above. */
11080 if (pmop[0] != TREE_OPERAND (arg0, 0)
11081 || (TREE_CODE (arg0) != NEGATE_EXPR
11082 && pmop[1] != TREE_OPERAND (arg0, 1)))
11083 {
828fde80 11084 tree utype = TREE_TYPE (arg0);
140d4eff
JJ
11085 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11086 {
11087 /* Perform the operations in a type that has defined
11088 overflow behavior. */
828fde80 11089 utype = unsigned_type_for (TREE_TYPE (arg0));
140d4eff
JJ
11090 if (pmop[0] != NULL)
11091 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11092 if (pmop[1] != NULL)
11093 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11094 }
11095
11096 if (TREE_CODE (arg0) == NEGATE_EXPR)
11097 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11098 else if (TREE_CODE (arg0) == PLUS_EXPR)
11099 {
11100 if (pmop[0] != NULL && pmop[1] != NULL)
11101 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11102 pmop[0], pmop[1]);
11103 else if (pmop[0] != NULL)
11104 tem = pmop[0];
11105 else if (pmop[1] != NULL)
11106 tem = pmop[1];
11107 else
11108 return build_int_cst (type, 0);
11109 }
11110 else if (pmop[0] == NULL)
11111 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11112 else
11113 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11114 pmop[0], pmop[1]);
11115 /* TEM is now the new binary +, - or unary - replacement. */
828fde80
JJ
11116 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11117 fold_convert_loc (loc, utype, arg1));
11118 return fold_convert_loc (loc, type, tem);
140d4eff
JJ
11119 }
11120 }
11121 }
11122
db3927fb 11123 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
0aee4751
KH
11124 if (t1 != NULL_TREE)
11125 return t1;
11126 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11127 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11128 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11129 {
11130 unsigned int prec
11131 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11132
11133 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11134 && (~TREE_INT_CST_LOW (arg1)
11135 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
db3927fb
AH
11136 return
11137 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
0aee4751
KH
11138 }
11139
11140 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11141
11142 This results in more efficient code for machines without a NOR
11143 instruction. Combine will canonicalize to the first form
11144 which will allow use of NOR instructions provided by the
11145 backend if they exist. */
11146 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11147 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11148 {
db3927fb 11149 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7f20a5b7 11150 build2 (BIT_IOR_EXPR, type,
db3927fb
AH
11151 fold_convert_loc (loc, type,
11152 TREE_OPERAND (arg0, 0)),
11153 fold_convert_loc (loc, type,
11154 TREE_OPERAND (arg1, 0))));
0aee4751
KH
11155 }
11156
e5901cad
OW
11157 /* If arg0 is derived from the address of an object or function, we may
11158 be able to fold this expression using the object or function's
11159 alignment. */
11160 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11161 {
11162 unsigned HOST_WIDE_INT modulus, residue;
11163 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11164
617f3897
MJ
11165 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11166 integer_onep (arg1));
e5901cad
OW
11167
11168 /* This works because modulus is a power of 2. If this weren't the
11169 case, we'd have to replace it by its greatest power-of-2
11170 divisor: modulus & -modulus. */
11171 if (low < modulus)
11172 return build_int_cst (type, residue & low);
11173 }
11174
22164c3d
JJ
11175 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11176 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11177 if the new mask might be further optimized. */
11178 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11179 || TREE_CODE (arg0) == RSHIFT_EXPR)
11180 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11181 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11182 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11183 < TYPE_PRECISION (TREE_TYPE (arg0))
11184 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11185 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11186 {
11187 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11188 unsigned HOST_WIDE_INT mask
11189 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11190 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11191 tree shift_type = TREE_TYPE (arg0);
11192
11193 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11194 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11195 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11196 && TYPE_PRECISION (TREE_TYPE (arg0))
11197 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11198 {
11199 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11200 tree arg00 = TREE_OPERAND (arg0, 0);
11201 /* See if more bits can be proven as zero because of
11202 zero extension. */
11203 if (TREE_CODE (arg00) == NOP_EXPR
11204 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11205 {
11206 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11207 if (TYPE_PRECISION (inner_type)
11208 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11209 && TYPE_PRECISION (inner_type) < prec)
11210 {
11211 prec = TYPE_PRECISION (inner_type);
11212 /* See if we can shorten the right shift. */
11213 if (shiftc < prec)
11214 shift_type = inner_type;
11215 }
11216 }
11217 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11218 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11219 zerobits <<= prec - shiftc;
11220 /* For arithmetic shift if sign bit could be set, zerobits
11221 can contain actually sign bits, so no transformation is
11222 possible, unless MASK masks them all away. In that
11223 case the shift needs to be converted into logical shift. */
11224 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11225 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11226 {
11227 if ((mask & zerobits) == 0)
11228 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11229 else
11230 zerobits = 0;
11231 }
11232 }
11233
11234 /* ((X << 16) & 0xff00) is (X, 0). */
11235 if ((mask & zerobits) == mask)
db3927fb
AH
11236 return omit_one_operand_loc (loc, type,
11237 build_int_cst (type, 0), arg0);
22164c3d
JJ
11238
11239 newmask = mask | zerobits;
11240 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11241 {
11242 unsigned int prec;
11243
11244 /* Only do the transformation if NEWMASK is some integer
11245 mode's mask. */
11246 for (prec = BITS_PER_UNIT;
11247 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11248 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11249 break;
11250 if (prec < HOST_BITS_PER_WIDE_INT
11251 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11252 {
776248b8
JJ
11253 tree newmaskt;
11254
22164c3d
JJ
11255 if (shift_type != TREE_TYPE (arg0))
11256 {
db3927fb
AH
11257 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11258 fold_convert_loc (loc, shift_type,
11259 TREE_OPERAND (arg0, 0)),
22164c3d 11260 TREE_OPERAND (arg0, 1));
db3927fb 11261 tem = fold_convert_loc (loc, type, tem);
22164c3d
JJ
11262 }
11263 else
11264 tem = op0;
776248b8
JJ
11265 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11266 if (!tree_int_cst_equal (newmaskt, arg1))
db3927fb 11267 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
22164c3d
JJ
11268 }
11269 }
11270 }
11271
0aee4751
KH
11272 goto associate;
11273
11274 case RDIV_EXPR:
11275 /* Don't touch a floating-point divide by zero unless the mode
11276 of the constant can represent infinity. */
11277 if (TREE_CODE (arg1) == REAL_CST
11278 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11279 && real_zerop (arg1))
62ab45cc 11280 return NULL_TREE;
0aee4751 11281
ffbc33cc 11282 /* Optimize A / A to 1.0 if we don't care about
1d8b38a0
UB
11283 NaNs or Infinities. Skip the transformation
11284 for non-real operands. */
11285 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11286 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
ffbc33cc
UB
11287 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11288 && operand_equal_p (arg0, arg1, 0))
11289 {
11290 tree r = build_real (TREE_TYPE (arg0), dconst1);
11291
db3927fb 11292 return omit_two_operands_loc (loc, type, r, arg0, arg1);
ffbc33cc
UB
11293 }
11294
1d8b38a0
UB
11295 /* The complex version of the above A / A optimization. */
11296 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11297 && operand_equal_p (arg0, arg1, 0))
11298 {
11299 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11300 if (! HONOR_NANS (TYPE_MODE (elem_type))
11301 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11302 {
11303 tree r = build_real (elem_type, dconst1);
11304 /* omit_two_operands will call fold_convert for us. */
db3927fb 11305 return omit_two_operands_loc (loc, type, r, arg0, arg1);
1d8b38a0
UB
11306 }
11307 }
11308
0aee4751
KH
11309 /* (-A) / (-B) -> A / B */
11310 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
db3927fb 11311 return fold_build2_loc (loc, RDIV_EXPR, type,
7f20a5b7
KH
11312 TREE_OPERAND (arg0, 0),
11313 negate_expr (arg1));
0aee4751 11314 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
db3927fb 11315 return fold_build2_loc (loc, RDIV_EXPR, type,
7f20a5b7
KH
11316 negate_expr (arg0),
11317 TREE_OPERAND (arg1, 0));
0aee4751
KH
11318
11319 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11320 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11321 && real_onep (arg1))
db3927fb 11322 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751
KH
11323
11324 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11325 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11326 && real_minus_onep (arg1))
db3927fb
AH
11327 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11328 negate_expr (arg0)));
0aee4751
KH
11329
11330 /* If ARG1 is a constant, we can convert this to a multiply by the
11331 reciprocal. This does not have the same rounding properties,
a1a82611 11332 so only do this if -freciprocal-math. We can actually
0aee4751
KH
11333 always safely do it if ARG1 is a power of two, but it's hard to
11334 tell if it is or not in a portable manner. */
11335 if (TREE_CODE (arg1) == REAL_CST)
11336 {
a1a82611 11337 if (flag_reciprocal_math
0aee4751 11338 && 0 != (tem = const_binop (code, build_real (type, dconst1),
43a5d30b 11339 arg1)))
db3927fb 11340 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
0aee4751
KH
11341 /* Find the reciprocal if optimizing and the result is exact. */
11342 if (optimize)
11343 {
11344 REAL_VALUE_TYPE r;
11345 r = TREE_REAL_CST (arg1);
11346 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11347 {
11348 tem = build_real (type, r);
db3927fb
AH
11349 return fold_build2_loc (loc, MULT_EXPR, type,
11350 fold_convert_loc (loc, type, arg0), tem);
0aee4751
KH
11351 }
11352 }
11353 }
b8698a0f 11354 /* Convert A/B/C to A/(B*C). */
a1a82611 11355 if (flag_reciprocal_math
0aee4751 11356 && TREE_CODE (arg0) == RDIV_EXPR)
db3927fb
AH
11357 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11358 fold_build2_loc (loc, MULT_EXPR, type,
7f20a5b7 11359 TREE_OPERAND (arg0, 1), arg1));
0aee4751
KH
11360
11361 /* Convert A/(B/C) to (A/B)*C. */
a1a82611 11362 if (flag_reciprocal_math
0aee4751 11363 && TREE_CODE (arg1) == RDIV_EXPR)
db3927fb
AH
11364 return fold_build2_loc (loc, MULT_EXPR, type,
11365 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
7f20a5b7
KH
11366 TREE_OPERAND (arg1, 0)),
11367 TREE_OPERAND (arg1, 1));
0aee4751
KH
11368
11369 /* Convert C1/(X*C2) into (C1/C2)/X. */
a1a82611 11370 if (flag_reciprocal_math
0aee4751
KH
11371 && TREE_CODE (arg1) == MULT_EXPR
11372 && TREE_CODE (arg0) == REAL_CST
11373 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11374 {
11375 tree tem = const_binop (RDIV_EXPR, arg0,
43a5d30b 11376 TREE_OPERAND (arg1, 1));
0aee4751 11377 if (tem)
db3927fb 11378 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
7f20a5b7 11379 TREE_OPERAND (arg1, 0));
0aee4751
KH
11380 }
11381
0aee4751
KH
11382 if (flag_unsafe_math_optimizations)
11383 {
11384 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11385 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11386
11387 /* Optimize sin(x)/cos(x) as tan(x). */
11388 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11389 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11390 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
5039610b
SL
11391 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11392 CALL_EXPR_ARG (arg1, 0), 0))
0aee4751
KH
11393 {
11394 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11395
11396 if (tanfn != NULL_TREE)
db3927fb 11397 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
0aee4751
KH
11398 }
11399
11400 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11401 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11402 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11403 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
5039610b
SL
11404 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11405 CALL_EXPR_ARG (arg1, 0), 0))
0aee4751
KH
11406 {
11407 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11408
11409 if (tanfn != NULL_TREE)
11410 {
db3927fb
AH
11411 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11412 CALL_EXPR_ARG (arg0, 0));
11413 return fold_build2_loc (loc, RDIV_EXPR, type,
7f20a5b7 11414 build_real (type, dconst1), tmp);
0aee4751
KH
11415 }
11416 }
11417
d531830f
RS
11418 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11419 NaNs or Infinities. */
11420 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11421 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11422 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11423 {
5039610b
SL
11424 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11425 tree arg01 = CALL_EXPR_ARG (arg1, 0);
d531830f
RS
11426
11427 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11428 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11429 && operand_equal_p (arg00, arg01, 0))
11430 {
11431 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11432
11433 if (cosfn != NULL_TREE)
db3927fb 11434 return build_call_expr_loc (loc, cosfn, 1, arg00);
d531830f
RS
11435 }
11436 }
11437
11438 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
6416ae7f 11439 NaNs or Infinities. */
d531830f
RS
11440 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11441 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11442 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11443 {
5039610b
SL
11444 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11445 tree arg01 = CALL_EXPR_ARG (arg1, 0);
d531830f
RS
11446
11447 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11448 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11449 && operand_equal_p (arg00, arg01, 0))
11450 {
11451 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11452
11453 if (cosfn != NULL_TREE)
11454 {
db3927fb
AH
11455 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11456 return fold_build2_loc (loc, RDIV_EXPR, type,
d531830f 11457 build_real (type, dconst1),
b71b8086 11458 tmp);
d531830f
RS
11459 }
11460 }
11461 }
11462
0aee4751
KH
11463 /* Optimize pow(x,c)/x as pow(x,c-1). */
11464 if (fcode0 == BUILT_IN_POW
11465 || fcode0 == BUILT_IN_POWF
11466 || fcode0 == BUILT_IN_POWL)
11467 {
5039610b
SL
11468 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11469 tree arg01 = CALL_EXPR_ARG (arg0, 1);
0aee4751 11470 if (TREE_CODE (arg01) == REAL_CST
455f14dd 11471 && !TREE_OVERFLOW (arg01)
0aee4751
KH
11472 && operand_equal_p (arg1, arg00, 0))
11473 {
5039610b 11474 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
0aee4751 11475 REAL_VALUE_TYPE c;
5039610b 11476 tree arg;
0aee4751
KH
11477
11478 c = TREE_REAL_CST (arg01);
11479 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11480 arg = build_real (type, c);
db3927fb 11481 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
0aee4751
KH
11482 }
11483 }
d531830f 11484
9883e373
UB
11485 /* Optimize a/root(b/c) into a*root(c/b). */
11486 if (BUILTIN_ROOT_P (fcode1))
f1da2df1
UB
11487 {
11488 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11489
11490 if (TREE_CODE (rootarg) == RDIV_EXPR)
11491 {
11492 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11493 tree b = TREE_OPERAND (rootarg, 0);
11494 tree c = TREE_OPERAND (rootarg, 1);
11495
db3927fb 11496 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
f1da2df1 11497
db3927fb
AH
11498 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11499 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
f1da2df1
UB
11500 }
11501 }
11502
d531830f
RS
11503 /* Optimize x/expN(y) into x*expN(-y). */
11504 if (BUILTIN_EXPONENT_P (fcode1))
11505 {
5039610b
SL
11506 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11507 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
db3927fb
AH
11508 arg1 = build_call_expr_loc (loc,
11509 expfn, 1,
11510 fold_convert_loc (loc, type, arg));
11511 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
d531830f
RS
11512 }
11513
11514 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11515 if (fcode1 == BUILT_IN_POW
11516 || fcode1 == BUILT_IN_POWF
11517 || fcode1 == BUILT_IN_POWL)
11518 {
5039610b
SL
11519 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11520 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11521 tree arg11 = CALL_EXPR_ARG (arg1, 1);
db3927fb
AH
11522 tree neg11 = fold_convert_loc (loc, type,
11523 negate_expr (arg11));
11524 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11525 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
d531830f 11526 }
0aee4751 11527 }
fd6c76f4 11528 return NULL_TREE;
0aee4751
KH
11529
11530 case TRUNC_DIV_EXPR:
2298ade7
DM
11531 /* Optimize (X & (-A)) / A where A is a power of 2,
11532 to X >> log2(A) */
11533 if (TREE_CODE (arg0) == BIT_AND_EXPR
11534 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11535 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11536 {
11537 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11538 arg1, TREE_OPERAND (arg0, 1));
11539 if (sum && integer_zerop (sum)) {
11540 unsigned long pow2;
11541
11542 if (TREE_INT_CST_LOW (arg1))
11543 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
11544 else
11545 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
11546 + HOST_BITS_PER_WIDE_INT;
11547
11548 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11549 TREE_OPERAND (arg0, 0),
11550 build_int_cst (NULL_TREE, pow2));
11551 }
11552 }
11553
11554 /* Fall thru */
11555
0aee4751 11556 case FLOOR_DIV_EXPR:
0f35201e
AM
11557 /* Simplify A / (B << N) where A and B are positive and B is
11558 a power of 2, to A >> (N + log2(B)). */
6ac01510 11559 strict_overflow_p = false;
0f35201e 11560 if (TREE_CODE (arg1) == LSHIFT_EXPR
6ac01510 11561 && (TYPE_UNSIGNED (type)
916c75b4 11562 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
0f35201e
AM
11563 {
11564 tree sval = TREE_OPERAND (arg1, 0);
11565 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11566 {
11567 tree sh_cnt = TREE_OPERAND (arg1, 1);
8ddf04c2
JJ
11568 unsigned long pow2;
11569
11570 if (TREE_INT_CST_LOW (sval))
11571 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11572 else
11573 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
11574 + HOST_BITS_PER_WIDE_INT;
0f35201e 11575
6ac01510
ILT
11576 if (strict_overflow_p)
11577 fold_overflow_warning (("assuming signed overflow does not "
11578 "occur when simplifying A / (B << N)"),
11579 WARN_STRICT_OVERFLOW_MISC);
11580
db3927fb 11581 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
0f35201e 11582 sh_cnt, build_int_cst (NULL_TREE, pow2));
db3927fb
AH
11583 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11584 fold_convert_loc (loc, type, arg0), sh_cnt);
0f35201e
AM
11585 }
11586 }
65648dd4
RG
11587
11588 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11589 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11590 if (INTEGRAL_TYPE_P (type)
11591 && TYPE_UNSIGNED (type)
11592 && code == FLOOR_DIV_EXPR)
db3927fb 11593 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
65648dd4 11594
0f35201e
AM
11595 /* Fall thru */
11596
11597 case ROUND_DIV_EXPR:
0aee4751
KH
11598 case CEIL_DIV_EXPR:
11599 case EXACT_DIV_EXPR:
11600 if (integer_onep (arg1))
db3927fb 11601 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751 11602 if (integer_zerop (arg1))
62ab45cc 11603 return NULL_TREE;
0aee4751
KH
11604 /* X / -1 is -X. */
11605 if (!TYPE_UNSIGNED (type)
11606 && TREE_CODE (arg1) == INTEGER_CST
11607 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11608 && TREE_INT_CST_HIGH (arg1) == -1)
db3927fb 11609 return fold_convert_loc (loc, type, negate_expr (arg0));
0aee4751 11610
37d3243d
AP
11611 /* Convert -A / -B to A / B when the type is signed and overflow is
11612 undefined. */
eeef0e45 11613 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
37d3243d
AP
11614 && TREE_CODE (arg0) == NEGATE_EXPR
11615 && negate_expr_p (arg1))
6ac01510
ILT
11616 {
11617 if (INTEGRAL_TYPE_P (type))
11618 fold_overflow_warning (("assuming signed overflow does not occur "
11619 "when distributing negation across "
11620 "division"),
11621 WARN_STRICT_OVERFLOW_MISC);
db3927fb
AH
11622 return fold_build2_loc (loc, code, type,
11623 fold_convert_loc (loc, type,
11624 TREE_OPERAND (arg0, 0)),
11625 fold_convert_loc (loc, type,
11626 negate_expr (arg1)));
6ac01510 11627 }
eeef0e45 11628 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
37d3243d
AP
11629 && TREE_CODE (arg1) == NEGATE_EXPR
11630 && negate_expr_p (arg0))
6ac01510
ILT
11631 {
11632 if (INTEGRAL_TYPE_P (type))
11633 fold_overflow_warning (("assuming signed overflow does not occur "
11634 "when distributing negation across "
11635 "division"),
11636 WARN_STRICT_OVERFLOW_MISC);
db3927fb
AH
11637 return fold_build2_loc (loc, code, type,
11638 fold_convert_loc (loc, type,
11639 negate_expr (arg0)),
11640 fold_convert_loc (loc, type,
11641 TREE_OPERAND (arg1, 0)));
6ac01510 11642 }
37d3243d 11643
0aee4751
KH
11644 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11645 operation, EXACT_DIV_EXPR.
11646
11647 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11648 At one time others generated faster code, it's not clear if they do
11649 after the last round to changes to the DIV code in expmed.c. */
11650 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11651 && multiple_of_p (type, arg0, arg1))
db3927fb 11652 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
0aee4751 11653
6ac01510 11654 strict_overflow_p = false;
0aee4751 11655 if (TREE_CODE (arg1) == INTEGER_CST
6ac01510
ILT
11656 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11657 &strict_overflow_p)))
11658 {
11659 if (strict_overflow_p)
11660 fold_overflow_warning (("assuming signed overflow does not occur "
11661 "when simplifying division"),
11662 WARN_STRICT_OVERFLOW_MISC);
db3927fb 11663 return fold_convert_loc (loc, type, tem);
6ac01510 11664 }
0aee4751 11665
fd6c76f4 11666 return NULL_TREE;
0aee4751
KH
11667
11668 case CEIL_MOD_EXPR:
11669 case FLOOR_MOD_EXPR:
11670 case ROUND_MOD_EXPR:
11671 case TRUNC_MOD_EXPR:
11672 /* X % 1 is always zero, but be sure to preserve any side
11673 effects in X. */
11674 if (integer_onep (arg1))
db3927fb 11675 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
0aee4751
KH
11676
11677 /* X % 0, return X % 0 unchanged so that we can get the
11678 proper warnings and errors. */
11679 if (integer_zerop (arg1))
62ab45cc 11680 return NULL_TREE;
0aee4751
KH
11681
11682 /* 0 % X is always zero, but be sure to preserve any side
11683 effects in X. Place this after checking for X == 0. */
11684 if (integer_zerop (arg0))
db3927fb 11685 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
0aee4751
KH
11686
11687 /* X % -1 is zero. */
11688 if (!TYPE_UNSIGNED (type)
11689 && TREE_CODE (arg1) == INTEGER_CST
11690 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11691 && TREE_INT_CST_HIGH (arg1) == -1)
db3927fb 11692 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
0aee4751 11693
0aee4751
KH
11694 /* X % -C is the same as X % C. */
11695 if (code == TRUNC_MOD_EXPR
11696 && !TYPE_UNSIGNED (type)
11697 && TREE_CODE (arg1) == INTEGER_CST
455f14dd 11698 && !TREE_OVERFLOW (arg1)
0aee4751 11699 && TREE_INT_CST_HIGH (arg1) < 0
eeef0e45 11700 && !TYPE_OVERFLOW_TRAPS (type)
0aee4751
KH
11701 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11702 && !sign_bit_p (arg1, arg1))
db3927fb
AH
11703 return fold_build2_loc (loc, code, type,
11704 fold_convert_loc (loc, type, arg0),
11705 fold_convert_loc (loc, type,
11706 negate_expr (arg1)));
0aee4751
KH
11707
11708 /* X % -Y is the same as X % Y. */
11709 if (code == TRUNC_MOD_EXPR
11710 && !TYPE_UNSIGNED (type)
11711 && TREE_CODE (arg1) == NEGATE_EXPR
eeef0e45 11712 && !TYPE_OVERFLOW_TRAPS (type))
db3927fb
AH
11713 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11714 fold_convert_loc (loc, type,
11715 TREE_OPERAND (arg1, 0)));
0aee4751 11716
9e9ef331 11717 strict_overflow_p = false;
0aee4751 11718 if (TREE_CODE (arg1) == INTEGER_CST
6ac01510
ILT
11719 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11720 &strict_overflow_p)))
11721 {
11722 if (strict_overflow_p)
11723 fold_overflow_warning (("assuming signed overflow does not occur "
fa10beec 11724 "when simplifying modulus"),
6ac01510 11725 WARN_STRICT_OVERFLOW_MISC);
db3927fb 11726 return fold_convert_loc (loc, type, tem);
6ac01510 11727 }
0aee4751 11728
9e9ef331
EB
11729 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11730 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11731 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11732 && (TYPE_UNSIGNED (type)
11733 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11734 {
11735 tree c = arg1;
11736 /* Also optimize A % (C << N) where C is a power of 2,
11737 to A & ((C << N) - 1). */
11738 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11739 c = TREE_OPERAND (arg1, 0);
11740
11741 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11742 {
11743 tree mask
11744 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11745 build_int_cst (TREE_TYPE (arg1), 1));
11746 if (strict_overflow_p)
11747 fold_overflow_warning (("assuming signed overflow does not "
11748 "occur when simplifying "
11749 "X % (power of two)"),
11750 WARN_STRICT_OVERFLOW_MISC);
11751 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11752 fold_convert_loc (loc, type, arg0),
11753 fold_convert_loc (loc, type, mask));
11754 }
11755 }
11756
fd6c76f4 11757 return NULL_TREE;
0aee4751
KH
11758
11759 case LROTATE_EXPR:
11760 case RROTATE_EXPR:
11761 if (integer_all_onesp (arg0))
db3927fb 11762 return omit_one_operand_loc (loc, type, arg0, arg1);
0aee4751
KH
11763 goto shift;
11764
11765 case RSHIFT_EXPR:
11766 /* Optimize -1 >> x for arithmetic right shifts. */
bd170bbc
RG
11767 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11768 && tree_expr_nonnegative_p (arg1))
db3927fb 11769 return omit_one_operand_loc (loc, type, arg0, arg1);
0aee4751
KH
11770 /* ... fall through ... */
11771
11772 case LSHIFT_EXPR:
11773 shift:
11774 if (integer_zerop (arg1))
db3927fb 11775 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751 11776 if (integer_zerop (arg0))
db3927fb 11777 return omit_one_operand_loc (loc, type, arg0, arg1);
0aee4751
KH
11778
11779 /* Since negative shift count is not well-defined,
11780 don't try to compute it in the compiler. */
11781 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
62ab45cc 11782 return NULL_TREE;
e3d025cb
JM
11783
11784 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
2d60e929 11785 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
e3d025cb
JM
11786 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11787 && host_integerp (TREE_OPERAND (arg0, 1), false)
11788 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11789 {
11790 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11791 + TREE_INT_CST_LOW (arg1));
11792
11793 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11794 being well defined. */
11795 if (low >= TYPE_PRECISION (type))
11796 {
11797 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11798 low = low % TYPE_PRECISION (type);
11799 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
db3927fb 11800 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
2c0eba5a 11801 TREE_OPERAND (arg0, 0));
e3d025cb
JM
11802 else
11803 low = TYPE_PRECISION (type) - 1;
11804 }
11805
db3927fb 11806 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
e3d025cb
JM
11807 build_int_cst (type, low));
11808 }
11809
a165e746
JM
11810 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11811 into x & ((unsigned)-1 >> c) for unsigned types. */
11812 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11813 || (TYPE_UNSIGNED (type)
11814 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
e3d025cb
JM
11815 && host_integerp (arg1, false)
11816 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11817 && host_integerp (TREE_OPERAND (arg0, 1), false)
11818 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11819 {
11820 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11821 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
e3d025cb
JM
11822 tree lshift;
11823 tree arg00;
11824
11825 if (low0 == low1)
11826 {
db3927fb 11827 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
e3d025cb 11828
a165e746
JM
11829 lshift = build_int_cst (type, -1);
11830 lshift = int_const_binop (code, lshift, arg1, 0);
e3d025cb 11831
db3927fb 11832 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
e3d025cb
JM
11833 }
11834 }
11835
0aee4751
KH
11836 /* Rewrite an LROTATE_EXPR by a constant into an
11837 RROTATE_EXPR by a new constant. */
11838 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11839 {
000d8d44 11840 tree tem = build_int_cst (TREE_TYPE (arg1),
70582b3a 11841 TYPE_PRECISION (type));
43a5d30b 11842 tem = const_binop (MINUS_EXPR, tem, arg1);
db3927fb 11843 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
0aee4751
KH
11844 }
11845
11846 /* If we have a rotate of a bit operation with the rotate count and
11847 the second operand of the bit operation both constant,
11848 permute the two operations. */
11849 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11850 && (TREE_CODE (arg0) == BIT_AND_EXPR
11851 || TREE_CODE (arg0) == BIT_IOR_EXPR
11852 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11853 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
db3927fb
AH
11854 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11855 fold_build2_loc (loc, code, type,
7f20a5b7 11856 TREE_OPERAND (arg0, 0), arg1),
db3927fb 11857 fold_build2_loc (loc, code, type,
7f20a5b7 11858 TREE_OPERAND (arg0, 1), arg1));
0aee4751 11859
70582b3a
RG
11860 /* Two consecutive rotates adding up to the precision of the
11861 type can be ignored. */
0aee4751
KH
11862 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11863 && TREE_CODE (arg0) == RROTATE_EXPR
11864 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11865 && TREE_INT_CST_HIGH (arg1) == 0
11866 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11867 && ((TREE_INT_CST_LOW (arg1)
11868 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
70582b3a 11869 == (unsigned int) TYPE_PRECISION (type)))
0aee4751
KH
11870 return TREE_OPERAND (arg0, 0);
11871
22164c3d
JJ
11872 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11873 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11874 if the latter can be further optimized. */
11875 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11876 && TREE_CODE (arg0) == BIT_AND_EXPR
11877 && TREE_CODE (arg1) == INTEGER_CST
11878 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11879 {
db3927fb
AH
11880 tree mask = fold_build2_loc (loc, code, type,
11881 fold_convert_loc (loc, type,
11882 TREE_OPERAND (arg0, 1)),
22164c3d 11883 arg1);
db3927fb
AH
11884 tree shift = fold_build2_loc (loc, code, type,
11885 fold_convert_loc (loc, type,
11886 TREE_OPERAND (arg0, 0)),
22164c3d 11887 arg1);
db3927fb 11888 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
22164c3d
JJ
11889 if (tem)
11890 return tem;
11891 }
11892
fd6c76f4 11893 return NULL_TREE;
0aee4751
KH
11894
11895 case MIN_EXPR:
11896 if (operand_equal_p (arg0, arg1, 0))
db3927fb 11897 return omit_one_operand_loc (loc, type, arg0, arg1);
0aee4751
KH
11898 if (INTEGRAL_TYPE_P (type)
11899 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
db3927fb
AH
11900 return omit_one_operand_loc (loc, type, arg1, arg0);
11901 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
292f30c5
EB
11902 if (tem)
11903 return tem;
0aee4751
KH
11904 goto associate;
11905
11906 case MAX_EXPR:
11907 if (operand_equal_p (arg0, arg1, 0))
db3927fb 11908 return omit_one_operand_loc (loc, type, arg0, arg1);
0aee4751
KH
11909 if (INTEGRAL_TYPE_P (type)
11910 && TYPE_MAX_VALUE (type)
11911 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
db3927fb
AH
11912 return omit_one_operand_loc (loc, type, arg1, arg0);
11913 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
292f30c5
EB
11914 if (tem)
11915 return tem;
0aee4751
KH
11916 goto associate;
11917
11918 case TRUTH_ANDIF_EXPR:
11919 /* Note that the operands of this must be ints
11920 and their values must be 0 or 1.
11921 ("true" is a fixed value perhaps depending on the language.) */
11922 /* If first arg is constant zero, return it. */
11923 if (integer_zerop (arg0))
db3927fb 11924 return fold_convert_loc (loc, type, arg0);
0aee4751
KH
11925 case TRUTH_AND_EXPR:
11926 /* If either arg is constant true, drop it. */
11927 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
db3927fb 11928 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
0aee4751
KH
11929 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11930 /* Preserve sequence points. */
11931 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
db3927fb 11932 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751
KH
11933 /* If second arg is constant zero, result is zero, but first arg
11934 must be evaluated. */
11935 if (integer_zerop (arg1))
db3927fb 11936 return omit_one_operand_loc (loc, type, arg1, arg0);
0aee4751
KH
11937 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11938 case will be handled here. */
11939 if (integer_zerop (arg0))
db3927fb 11940 return omit_one_operand_loc (loc, type, arg0, arg1);
0aee4751
KH
11941
11942 /* !X && X is always false. */
11943 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11944 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
db3927fb 11945 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
0aee4751
KH
11946 /* X && !X is always false. */
11947 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11948 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
db3927fb 11949 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
0aee4751
KH
11950
11951 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11952 means A >= Y && A != MAX, but in this case we know that
11953 A < X <= MAX. */
11954
11955 if (!TREE_SIDE_EFFECTS (arg0)
11956 && !TREE_SIDE_EFFECTS (arg1))
11957 {
db3927fb 11958 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
70a9e64b 11959 if (tem && !operand_equal_p (tem, arg0, 0))
db3927fb 11960 return fold_build2_loc (loc, code, type, tem, arg1);
0aee4751 11961
db3927fb 11962 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
70a9e64b 11963 if (tem && !operand_equal_p (tem, arg1, 0))
db3927fb 11964 return fold_build2_loc (loc, code, type, arg0, tem);
0aee4751
KH
11965 }
11966
11967 truth_andor:
11968 /* We only do these simplifications if we are optimizing. */
11969 if (!optimize)
62ab45cc 11970 return NULL_TREE;
0aee4751
KH
11971
11972 /* Check for things like (A || B) && (A || C). We can convert this
11973 to A || (B && C). Note that either operator can be any of the four
11974 truth and/or operations and the transformation will still be
11975 valid. Also note that we only care about order for the
11976 ANDIF and ORIF operators. If B contains side effects, this
11977 might change the truth-value of A. */
11978 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11979 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11980 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11981 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11982 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11983 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11984 {
11985 tree a00 = TREE_OPERAND (arg0, 0);
11986 tree a01 = TREE_OPERAND (arg0, 1);
11987 tree a10 = TREE_OPERAND (arg1, 0);
11988 tree a11 = TREE_OPERAND (arg1, 1);
11989 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11990 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11991 && (code == TRUTH_AND_EXPR
11992 || code == TRUTH_OR_EXPR));
11993
11994 if (operand_equal_p (a00, a10, 0))
db3927fb
AH
11995 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11996 fold_build2_loc (loc, code, type, a01, a11));
0aee4751 11997 else if (commutative && operand_equal_p (a00, a11, 0))
db3927fb
AH
11998 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11999 fold_build2_loc (loc, code, type, a01, a10));
0aee4751 12000 else if (commutative && operand_equal_p (a01, a10, 0))
db3927fb
AH
12001 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
12002 fold_build2_loc (loc, code, type, a00, a11));
0aee4751
KH
12003
12004 /* This case if tricky because we must either have commutative
12005 operators or else A10 must not have side-effects. */
12006
12007 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12008 && operand_equal_p (a01, a11, 0))
db3927fb
AH
12009 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12010 fold_build2_loc (loc, code, type, a00, a10),
7f20a5b7 12011 a01);
0aee4751
KH
12012 }
12013
12014 /* See if we can build a range comparison. */
db3927fb 12015 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
0aee4751
KH
12016 return tem;
12017
27d0d96a
BS
12018 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
12019 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
12020 {
12021 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
12022 if (tem)
12023 return fold_build2_loc (loc, code, type, tem, arg1);
12024 }
12025
12026 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
12027 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
12028 {
12029 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
12030 if (tem)
12031 return fold_build2_loc (loc, code, type, arg0, tem);
12032 }
12033
0aee4751
KH
12034 /* Check for the possibility of merging component references. If our
12035 lhs is another similar operation, try to merge its rhs with our
12036 rhs. Then try to merge our lhs and rhs. */
12037 if (TREE_CODE (arg0) == code
db3927fb 12038 && 0 != (tem = fold_truthop (loc, code, type,
0aee4751 12039 TREE_OPERAND (arg0, 1), arg1)))
db3927fb 12040 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
0aee4751 12041
db3927fb 12042 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
0aee4751
KH
12043 return tem;
12044
62ab45cc 12045 return NULL_TREE;
0aee4751
KH
12046
12047 case TRUTH_ORIF_EXPR:
12048 /* Note that the operands of this must be ints
12049 and their values must be 0 or true.
12050 ("true" is a fixed value perhaps depending on the language.) */
12051 /* If first arg is constant true, return it. */
12052 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
db3927fb 12053 return fold_convert_loc (loc, type, arg0);
0aee4751
KH
12054 case TRUTH_OR_EXPR:
12055 /* If either arg is constant zero, drop it. */
12056 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
db3927fb 12057 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
0aee4751
KH
12058 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12059 /* Preserve sequence points. */
12060 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
db3927fb 12061 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751
KH
12062 /* If second arg is constant true, result is true, but we must
12063 evaluate first arg. */
12064 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
db3927fb 12065 return omit_one_operand_loc (loc, type, arg1, arg0);
0aee4751
KH
12066 /* Likewise for first arg, but note this only occurs here for
12067 TRUTH_OR_EXPR. */
12068 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
db3927fb 12069 return omit_one_operand_loc (loc, type, arg0, arg1);
0aee4751
KH
12070
12071 /* !X || X is always true. */
12072 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12073 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
db3927fb 12074 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
0aee4751
KH
12075 /* X || !X is always true. */
12076 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12077 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
db3927fb 12078 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
0aee4751
KH
12079
12080 goto truth_andor;
12081
12082 case TRUTH_XOR_EXPR:
12083 /* If the second arg is constant zero, drop it. */
12084 if (integer_zerop (arg1))
db3927fb 12085 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751
KH
12086 /* If the second arg is constant true, this is a logical inversion. */
12087 if (integer_onep (arg1))
90ec750d
RS
12088 {
12089 /* Only call invert_truthvalue if operand is a truth value. */
12090 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
db3927fb 12091 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
90ec750d 12092 else
db3927fb
AH
12093 tem = invert_truthvalue_loc (loc, arg0);
12094 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
90ec750d 12095 }
0aee4751
KH
12096 /* Identical arguments cancel to zero. */
12097 if (operand_equal_p (arg0, arg1, 0))
db3927fb 12098 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
0aee4751
KH
12099
12100 /* !X ^ X is always true. */
12101 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12102 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
db3927fb 12103 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
0aee4751
KH
12104
12105 /* X ^ !X is always true. */
12106 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12107 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
db3927fb 12108 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
0aee4751 12109
62ab45cc 12110 return NULL_TREE;
0aee4751
KH
12111
12112 case EQ_EXPR:
12113 case NE_EXPR:
db3927fb 12114 tem = fold_comparison (loc, code, type, op0, op1);
e26ec0bb
RS
12115 if (tem != NULL_TREE)
12116 return tem;
210dfe6e 12117
a7e1c928
AP
12118 /* bool_var != 0 becomes bool_var. */
12119 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12120 && code == NE_EXPR)
db3927fb 12121 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
e26ec0bb 12122
a7e1c928
AP
12123 /* bool_var == 1 becomes bool_var. */
12124 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12125 && code == EQ_EXPR)
db3927fb 12126 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
0aee4751 12127
7934558d
AP
12128 /* bool_var != 1 becomes !bool_var. */
12129 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12130 && code == NE_EXPR)
db3927fb
AH
12131 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12132 fold_convert_loc (loc, type, arg0));
7934558d
AP
12133
12134 /* bool_var == 0 becomes !bool_var. */
12135 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12136 && code == EQ_EXPR)
db3927fb
AH
12137 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12138 fold_convert_loc (loc, type, arg0));
7934558d 12139
44e10129
MM
12140 /* !exp != 0 becomes !exp */
12141 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12142 && code == NE_EXPR)
12143 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12144
0aee4751
KH
12145 /* If this is an equality comparison of the address of two non-weak,
12146 unaliased symbols neither of which are extern (since we do not
12147 have access to attributes for externs), then we know the result. */
e26ec0bb 12148 if (TREE_CODE (arg0) == ADDR_EXPR
820cc88f 12149 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
0aee4751
KH
12150 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12151 && ! lookup_attribute ("alias",
12152 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12153 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12154 && TREE_CODE (arg1) == ADDR_EXPR
820cc88f 12155 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
0aee4751
KH
12156 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12157 && ! lookup_attribute ("alias",
12158 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12159 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
59f7a202
JL
12160 {
12161 /* We know that we're looking at the address of two
12162 non-weak, unaliased, static _DECL nodes.
12163
12164 It is both wasteful and incorrect to call operand_equal_p
12165 to compare the two ADDR_EXPR nodes. It is wasteful in that
12166 all we need to do is test pointer equality for the arguments
12167 to the two ADDR_EXPR nodes. It is incorrect to use
12168 operand_equal_p as that function is NOT equivalent to a
12169 C equality test. It can in fact return false for two
12170 objects which would test as equal using the C equality
12171 operator. */
12172 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12173 return constant_boolean_node (equal
12174 ? code == EQ_EXPR : code != EQ_EXPR,
12175 type);
12176 }
0aee4751 12177
e26ec0bb
RS
12178 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12179 a MINUS_EXPR of a constant, we can convert it into a comparison with
12180 a revised constant as long as no overflow occurs. */
12181 if (TREE_CODE (arg1) == INTEGER_CST
12182 && (TREE_CODE (arg0) == PLUS_EXPR
12183 || TREE_CODE (arg0) == MINUS_EXPR)
12184 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12185 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12186 ? MINUS_EXPR : PLUS_EXPR,
db3927fb
AH
12187 fold_convert_loc (loc, TREE_TYPE (arg0),
12188 arg1),
43a5d30b 12189 TREE_OPERAND (arg0, 1)))
455f14dd 12190 && !TREE_OVERFLOW (tem))
db3927fb 12191 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
0eeb03e6 12192
e26ec0bb
RS
12193 /* Similarly for a NEGATE_EXPR. */
12194 if (TREE_CODE (arg0) == NEGATE_EXPR
12195 && TREE_CODE (arg1) == INTEGER_CST
12196 && 0 != (tem = negate_expr (arg1))
12197 && TREE_CODE (tem) == INTEGER_CST
455f14dd 12198 && !TREE_OVERFLOW (tem))
db3927fb 12199 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
0eeb03e6 12200
cf06e5c1
RS
12201 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12202 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12203 && TREE_CODE (arg1) == INTEGER_CST
12204 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
db3927fb
AH
12205 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12206 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12207 fold_convert_loc (loc,
12208 TREE_TYPE (arg0),
12209 arg1),
cf06e5c1
RS
12210 TREE_OPERAND (arg0, 1)));
12211
6b12efe9
RG
12212 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12213 if ((TREE_CODE (arg0) == PLUS_EXPR
12214 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12215 || TREE_CODE (arg0) == MINUS_EXPR)
a31498d2 12216 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
a31498d2
RG
12217 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12218 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12219 {
6b12efe9 12220 tree val = TREE_OPERAND (arg0, 1);
db3927fb
AH
12221 return omit_two_operands_loc (loc, type,
12222 fold_build2_loc (loc, code, type,
6b12efe9
RG
12223 val,
12224 build_int_cst (TREE_TYPE (val),
12225 0)),
12226 TREE_OPERAND (arg0, 0), arg1);
12227 }
12228
12229 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12230 if (TREE_CODE (arg0) == MINUS_EXPR
12231 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12232 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12233 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12234 {
db3927fb 12235 return omit_two_operands_loc (loc, type,
6b12efe9
RG
12236 code == NE_EXPR
12237 ? boolean_true_node : boolean_false_node,
12238 TREE_OPERAND (arg0, 1), arg1);
a31498d2
RG
12239 }
12240
e26ec0bb
RS
12241 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12242 for !=. Don't do this for ordered comparisons due to overflow. */
12243 if (TREE_CODE (arg0) == MINUS_EXPR
12244 && integer_zerop (arg1))
db3927fb 12245 return fold_build2_loc (loc, code, type,
e26ec0bb 12246 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
0eeb03e6 12247
e26ec0bb
RS
12248 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12249 if (TREE_CODE (arg0) == ABS_EXPR
12250 && (integer_zerop (arg1) || real_zerop (arg1)))
db3927fb 12251 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
0eeb03e6 12252
e26ec0bb
RS
12253 /* If this is an EQ or NE comparison with zero and ARG0 is
12254 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12255 two operations, but the latter can be done in one less insn
12256 on machines that have only two-operand insns or on which a
12257 constant cannot be the first operand. */
12258 if (TREE_CODE (arg0) == BIT_AND_EXPR
12259 && integer_zerop (arg1))
12260 {
12261 tree arg00 = TREE_OPERAND (arg0, 0);
12262 tree arg01 = TREE_OPERAND (arg0, 1);
12263 if (TREE_CODE (arg00) == LSHIFT_EXPR
12264 && integer_onep (TREE_OPERAND (arg00, 0)))
5abe9685 12265 {
db3927fb 12266 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
5abe9685 12267 arg01, TREE_OPERAND (arg00, 1));
db3927fb 12268 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
5abe9685 12269 build_int_cst (TREE_TYPE (arg0), 1));
db3927fb
AH
12270 return fold_build2_loc (loc, code, type,
12271 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12272 arg1);
5abe9685
RG
12273 }
12274 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12275 && integer_onep (TREE_OPERAND (arg01, 0)))
12276 {
db3927fb 12277 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
5abe9685 12278 arg00, TREE_OPERAND (arg01, 1));
db3927fb 12279 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
5abe9685 12280 build_int_cst (TREE_TYPE (arg0), 1));
db3927fb
AH
12281 return fold_build2_loc (loc, code, type,
12282 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12283 arg1);
5abe9685 12284 }
e26ec0bb
RS
12285 }
12286
12287 /* If this is an NE or EQ comparison of zero against the result of a
12288 signed MOD operation whose second operand is a power of 2, make
12289 the MOD operation unsigned since it is simpler and equivalent. */
12290 if (integer_zerop (arg1)
12291 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12292 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12293 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12294 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12295 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12296 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12297 {
ca5ba2a3 12298 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
db3927fb
AH
12299 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12300 fold_convert_loc (loc, newtype,
12301 TREE_OPERAND (arg0, 0)),
12302 fold_convert_loc (loc, newtype,
12303 TREE_OPERAND (arg0, 1)));
e26ec0bb 12304
db3927fb
AH
12305 return fold_build2_loc (loc, code, type, newmod,
12306 fold_convert_loc (loc, newtype, arg1));
e26ec0bb
RS
12307 }
12308
a861485c
RS
12309 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12310 C1 is a valid shift constant, and C2 is a power of two, i.e.
12311 a single bit. */
12312 if (TREE_CODE (arg0) == BIT_AND_EXPR
12313 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12314 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12315 == INTEGER_CST
12316 && integer_pow2p (TREE_OPERAND (arg0, 1))
12317 && integer_zerop (arg1))
12318 {
12319 tree itype = TREE_TYPE (arg0);
12320 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12321 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12322
12323 /* Check for a valid shift count. */
12324 if (TREE_INT_CST_HIGH (arg001) == 0
12325 && TREE_INT_CST_LOW (arg001) < prec)
12326 {
12327 tree arg01 = TREE_OPERAND (arg0, 1);
12328 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12329 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12330 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12331 can be rewritten as (X & (C2 << C1)) != 0. */
0ad12cd3 12332 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
a861485c 12333 {
db3927fb
AH
12334 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12335 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12336 return fold_build2_loc (loc, code, type, tem, arg1);
a861485c
RS
12337 }
12338 /* Otherwise, for signed (arithmetic) shifts,
12339 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12340 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12341 else if (!TYPE_UNSIGNED (itype))
db3927fb 12342 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
a861485c
RS
12343 arg000, build_int_cst (itype, 0));
12344 /* Otherwise, of unsigned (logical) shifts,
12345 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12346 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12347 else
db3927fb 12348 return omit_one_operand_loc (loc, type,
a861485c
RS
12349 code == EQ_EXPR ? integer_one_node
12350 : integer_zero_node,
12351 arg000);
12352 }
12353 }
12354
e26ec0bb
RS
12355 /* If this is an NE comparison of zero with an AND of one, remove the
12356 comparison since the AND will give the correct value. */
12357 if (code == NE_EXPR
12358 && integer_zerop (arg1)
12359 && TREE_CODE (arg0) == BIT_AND_EXPR
12360 && integer_onep (TREE_OPERAND (arg0, 1)))
db3927fb 12361 return fold_convert_loc (loc, type, arg0);
e26ec0bb
RS
12362
12363 /* If we have (A & C) == C where C is a power of 2, convert this into
12364 (A & C) != 0. Similarly for NE_EXPR. */
12365 if (TREE_CODE (arg0) == BIT_AND_EXPR
12366 && integer_pow2p (TREE_OPERAND (arg0, 1))
12367 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
db3927fb
AH
12368 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12369 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12370 integer_zero_node));
e26ec0bb
RS
12371
12372 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12373 bit, then fold the expression into A < 0 or A >= 0. */
db3927fb 12374 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
e26ec0bb
RS
12375 if (tem)
12376 return tem;
12377
12378 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12379 Similarly for NE_EXPR. */
12380 if (TREE_CODE (arg0) == BIT_AND_EXPR
12381 && TREE_CODE (arg1) == INTEGER_CST
12382 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12383 {
db3927fb 12384 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
e26ec0bb
RS
12385 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12386 TREE_OPERAND (arg0, 1));
db3927fb 12387 tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
e26ec0bb
RS
12388 arg1, notc);
12389 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12390 if (integer_nonzerop (dandnotc))
db3927fb 12391 return omit_one_operand_loc (loc, type, rslt, arg0);
e26ec0bb
RS
12392 }
12393
12394 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12395 Similarly for NE_EXPR. */
12396 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12397 && TREE_CODE (arg1) == INTEGER_CST
12398 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12399 {
db3927fb
AH
12400 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12401 tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
e26ec0bb
RS
12402 TREE_OPERAND (arg0, 1), notd);
12403 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12404 if (integer_nonzerop (candnotd))
db3927fb 12405 return omit_one_operand_loc (loc, type, rslt, arg0);
e26ec0bb
RS
12406 }
12407
45dc13b9
JJ
12408 /* If this is a comparison of a field, we may be able to simplify it. */
12409 if ((TREE_CODE (arg0) == COMPONENT_REF
12410 || TREE_CODE (arg0) == BIT_FIELD_REF)
12411 /* Handle the constant case even without -O
12412 to make sure the warnings are given. */
12413 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12414 {
db3927fb 12415 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
45dc13b9
JJ
12416 if (t1)
12417 return t1;
12418 }
12419
e26ec0bb
RS
12420 /* Optimize comparisons of strlen vs zero to a compare of the
12421 first character of the string vs zero. To wit,
12422 strlen(ptr) == 0 => *ptr == 0
12423 strlen(ptr) != 0 => *ptr != 0
12424 Other cases should reduce to one of these two (or a constant)
12425 due to the return value of strlen being unsigned. */
12426 if (TREE_CODE (arg0) == CALL_EXPR
12427 && integer_zerop (arg1))
12428 {
12429 tree fndecl = get_callee_fndecl (arg0);
e26ec0bb
RS
12430
12431 if (fndecl
12432 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12433 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
5039610b
SL
12434 && call_expr_nargs (arg0) == 1
12435 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
e26ec0bb 12436 {
db3927fb
AH
12437 tree iref = build_fold_indirect_ref_loc (loc,
12438 CALL_EXPR_ARG (arg0, 0));
12439 return fold_build2_loc (loc, code, type, iref,
e26ec0bb
RS
12440 build_int_cst (TREE_TYPE (iref), 0));
12441 }
12442 }
12443
12444 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12445 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12446 if (TREE_CODE (arg0) == RSHIFT_EXPR
12447 && integer_zerop (arg1)
12448 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12449 {
12450 tree arg00 = TREE_OPERAND (arg0, 0);
12451 tree arg01 = TREE_OPERAND (arg0, 1);
12452 tree itype = TREE_TYPE (arg00);
12453 if (TREE_INT_CST_HIGH (arg01) == 0
12454 && TREE_INT_CST_LOW (arg01)
12455 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12456 {
12457 if (TYPE_UNSIGNED (itype))
12458 {
12753674 12459 itype = signed_type_for (itype);
db3927fb 12460 arg00 = fold_convert_loc (loc, itype, arg00);
e26ec0bb 12461 }
db3927fb 12462 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
e26ec0bb
RS
12463 type, arg00, build_int_cst (itype, 0));
12464 }
12465 }
12466
eb8dffe0
RS
12467 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12468 if (integer_zerop (arg1)
12469 && TREE_CODE (arg0) == BIT_XOR_EXPR)
db3927fb 12470 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
eb8dffe0
RS
12471 TREE_OPERAND (arg0, 1));
12472
12473 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12474 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12475 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
db3927fb 12476 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
eb8dffe0
RS
12477 build_int_cst (TREE_TYPE (arg1), 0));
12478 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12479 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12480 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12481 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
db3927fb 12482 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
eb8dffe0
RS
12483 build_int_cst (TREE_TYPE (arg1), 0));
12484
12485 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12486 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12487 && TREE_CODE (arg1) == INTEGER_CST
12488 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
db3927fb
AH
12489 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12490 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
eb8dffe0
RS
12491 TREE_OPERAND (arg0, 1), arg1));
12492
5881ad5d
RS
12493 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12494 (X & C) == 0 when C is a single bit. */
12495 if (TREE_CODE (arg0) == BIT_AND_EXPR
12496 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12497 && integer_zerop (arg1)
12498 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12499 {
db3927fb 12500 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
5881ad5d
RS
12501 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12502 TREE_OPERAND (arg0, 1));
db3927fb 12503 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
5881ad5d
RS
12504 type, tem, arg1);
12505 }
12506
12507 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12508 constant C is a power of two, i.e. a single bit. */
12509 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12510 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12511 && integer_zerop (arg1)
12512 && integer_pow2p (TREE_OPERAND (arg0, 1))
12513 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12514 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12515 {
12516 tree arg00 = TREE_OPERAND (arg0, 0);
db3927fb 12517 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
5881ad5d
RS
12518 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12519 }
12520
12521 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12522 when is C is a power of two, i.e. a single bit. */
12523 if (TREE_CODE (arg0) == BIT_AND_EXPR
12524 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12525 && integer_zerop (arg1)
12526 && integer_pow2p (TREE_OPERAND (arg0, 1))
12527 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12528 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12529 {
12530 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
db3927fb 12531 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
5881ad5d 12532 arg000, TREE_OPERAND (arg0, 1));
db3927fb 12533 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
5881ad5d
RS
12534 tem, build_int_cst (TREE_TYPE (tem), 0));
12535 }
12536
e26ec0bb
RS
12537 if (integer_zerop (arg1)
12538 && tree_expr_nonzero_p (arg0))
12539 {
12540 tree res = constant_boolean_node (code==NE_EXPR, type);
db3927fb 12541 return omit_one_operand_loc (loc, type, res, arg0);
e26ec0bb 12542 }
c159ffe7
RS
12543
12544 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12545 if (TREE_CODE (arg0) == NEGATE_EXPR
12546 && TREE_CODE (arg1) == NEGATE_EXPR)
db3927fb 12547 return fold_build2_loc (loc, code, type,
c159ffe7
RS
12548 TREE_OPERAND (arg0, 0),
12549 TREE_OPERAND (arg1, 0));
12550
015e23f4
RS
12551 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12552 if (TREE_CODE (arg0) == BIT_AND_EXPR
12553 && TREE_CODE (arg1) == BIT_AND_EXPR)
12554 {
12555 tree arg00 = TREE_OPERAND (arg0, 0);
12556 tree arg01 = TREE_OPERAND (arg0, 1);
12557 tree arg10 = TREE_OPERAND (arg1, 0);
12558 tree arg11 = TREE_OPERAND (arg1, 1);
12559 tree itype = TREE_TYPE (arg0);
12560
12561 if (operand_equal_p (arg01, arg11, 0))
db3927fb
AH
12562 return fold_build2_loc (loc, code, type,
12563 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12564 fold_build2_loc (loc,
12565 BIT_XOR_EXPR, itype,
015e23f4
RS
12566 arg00, arg10),
12567 arg01),
12568 build_int_cst (itype, 0));
12569
12570 if (operand_equal_p (arg01, arg10, 0))
db3927fb
AH
12571 return fold_build2_loc (loc, code, type,
12572 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12573 fold_build2_loc (loc,
12574 BIT_XOR_EXPR, itype,
015e23f4
RS
12575 arg00, arg11),
12576 arg01),
12577 build_int_cst (itype, 0));
12578
12579 if (operand_equal_p (arg00, arg11, 0))
db3927fb
AH
12580 return fold_build2_loc (loc, code, type,
12581 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12582 fold_build2_loc (loc,
12583 BIT_XOR_EXPR, itype,
015e23f4
RS
12584 arg01, arg10),
12585 arg00),
12586 build_int_cst (itype, 0));
12587
12588 if (operand_equal_p (arg00, arg10, 0))
db3927fb
AH
12589 return fold_build2_loc (loc, code, type,
12590 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12591 fold_build2_loc (loc,
12592 BIT_XOR_EXPR, itype,
015e23f4
RS
12593 arg01, arg11),
12594 arg00),
12595 build_int_cst (itype, 0));
12596 }
12597
cf06e5c1
RS
12598 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12599 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12600 {
12601 tree arg00 = TREE_OPERAND (arg0, 0);
12602 tree arg01 = TREE_OPERAND (arg0, 1);
12603 tree arg10 = TREE_OPERAND (arg1, 0);
12604 tree arg11 = TREE_OPERAND (arg1, 1);
12605 tree itype = TREE_TYPE (arg0);
12606
12607 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12608 operand_equal_p guarantees no side-effects so we don't need
12609 to use omit_one_operand on Z. */
12610 if (operand_equal_p (arg01, arg11, 0))
db3927fb 12611 return fold_build2_loc (loc, code, type, arg00, arg10);
cf06e5c1 12612 if (operand_equal_p (arg01, arg10, 0))
db3927fb 12613 return fold_build2_loc (loc, code, type, arg00, arg11);
cf06e5c1 12614 if (operand_equal_p (arg00, arg11, 0))
db3927fb 12615 return fold_build2_loc (loc, code, type, arg01, arg10);
cf06e5c1 12616 if (operand_equal_p (arg00, arg10, 0))
db3927fb 12617 return fold_build2_loc (loc, code, type, arg01, arg11);
cf06e5c1
RS
12618
12619 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12620 if (TREE_CODE (arg01) == INTEGER_CST
12621 && TREE_CODE (arg11) == INTEGER_CST)
db3927fb
AH
12622 return fold_build2_loc (loc, code, type,
12623 fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
12624 fold_build2_loc (loc,
12625 BIT_XOR_EXPR, itype,
cf06e5c1
RS
12626 arg01, arg11)),
12627 arg10);
12628 }
23b9463b
RS
12629
12630 /* Attempt to simplify equality/inequality comparisons of complex
12631 values. Only lower the comparison if the result is known or
12632 can be simplified to a single scalar comparison. */
12633 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12634 || TREE_CODE (arg0) == COMPLEX_CST)
12635 && (TREE_CODE (arg1) == COMPLEX_EXPR
12636 || TREE_CODE (arg1) == COMPLEX_CST))
12637 {
12638 tree real0, imag0, real1, imag1;
12639 tree rcond, icond;
12640
12641 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12642 {
12643 real0 = TREE_OPERAND (arg0, 0);
12644 imag0 = TREE_OPERAND (arg0, 1);
12645 }
12646 else
12647 {
12648 real0 = TREE_REALPART (arg0);
12649 imag0 = TREE_IMAGPART (arg0);
12650 }
12651
12652 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12653 {
12654 real1 = TREE_OPERAND (arg1, 0);
12655 imag1 = TREE_OPERAND (arg1, 1);
12656 }
12657 else
12658 {
12659 real1 = TREE_REALPART (arg1);
12660 imag1 = TREE_IMAGPART (arg1);
12661 }
12662
db3927fb 12663 rcond = fold_binary_loc (loc, code, type, real0, real1);
23b9463b
RS
12664 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12665 {
12666 if (integer_zerop (rcond))
12667 {
12668 if (code == EQ_EXPR)
db3927fb 12669 return omit_two_operands_loc (loc, type, boolean_false_node,
23b9463b 12670 imag0, imag1);
db3927fb 12671 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
23b9463b
RS
12672 }
12673 else
12674 {
12675 if (code == NE_EXPR)
db3927fb 12676 return omit_two_operands_loc (loc, type, boolean_true_node,
23b9463b 12677 imag0, imag1);
db3927fb 12678 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
23b9463b
RS
12679 }
12680 }
12681
db3927fb 12682 icond = fold_binary_loc (loc, code, type, imag0, imag1);
23b9463b
RS
12683 if (icond && TREE_CODE (icond) == INTEGER_CST)
12684 {
12685 if (integer_zerop (icond))
12686 {
12687 if (code == EQ_EXPR)
db3927fb 12688 return omit_two_operands_loc (loc, type, boolean_false_node,
23b9463b 12689 real0, real1);
db3927fb 12690 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
23b9463b
RS
12691 }
12692 else
12693 {
12694 if (code == NE_EXPR)
db3927fb 12695 return omit_two_operands_loc (loc, type, boolean_true_node,
23b9463b 12696 real0, real1);
db3927fb 12697 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
23b9463b
RS
12698 }
12699 }
12700 }
12701
e26ec0bb
RS
12702 return NULL_TREE;
12703
12704 case LT_EXPR:
12705 case GT_EXPR:
12706 case LE_EXPR:
12707 case GE_EXPR:
db3927fb 12708 tem = fold_comparison (loc, code, type, op0, op1);
e26ec0bb
RS
12709 if (tem != NULL_TREE)
12710 return tem;
12711
12712 /* Transform comparisons of the form X +- C CMP X. */
12713 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12714 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12715 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12716 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12717 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
eeef0e45 12718 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
e26ec0bb
RS
12719 {
12720 tree arg01 = TREE_OPERAND (arg0, 1);
12721 enum tree_code code0 = TREE_CODE (arg0);
12722 int is_positive;
12723
12724 if (TREE_CODE (arg01) == REAL_CST)
12725 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12726 else
12727 is_positive = tree_int_cst_sgn (arg01);
12728
12729 /* (X - c) > X becomes false. */
12730 if (code == GT_EXPR
12731 && ((code0 == MINUS_EXPR && is_positive >= 0)
12732 || (code0 == PLUS_EXPR && is_positive <= 0)))
6ac01510
ILT
12733 {
12734 if (TREE_CODE (arg01) == INTEGER_CST
12735 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12736 fold_overflow_warning (("assuming signed overflow does not "
12737 "occur when assuming that (X - c) > X "
12738 "is always false"),
12739 WARN_STRICT_OVERFLOW_ALL);
12740 return constant_boolean_node (0, type);
12741 }
e26ec0bb
RS
12742
12743 /* Likewise (X + c) < X becomes false. */
12744 if (code == LT_EXPR
12745 && ((code0 == PLUS_EXPR && is_positive >= 0)
12746 || (code0 == MINUS_EXPR && is_positive <= 0)))
6ac01510
ILT
12747 {
12748 if (TREE_CODE (arg01) == INTEGER_CST
12749 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12750 fold_overflow_warning (("assuming signed overflow does not "
12751 "occur when assuming that "
12752 "(X + c) < X is always false"),
12753 WARN_STRICT_OVERFLOW_ALL);
12754 return constant_boolean_node (0, type);
12755 }
e26ec0bb
RS
12756
12757 /* Convert (X - c) <= X to true. */
12758 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12759 && code == LE_EXPR
0eeb03e6
JM
12760 && ((code0 == MINUS_EXPR && is_positive >= 0)
12761 || (code0 == PLUS_EXPR && is_positive <= 0)))
6ac01510
ILT
12762 {
12763 if (TREE_CODE (arg01) == INTEGER_CST
12764 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12765 fold_overflow_warning (("assuming signed overflow does not "
12766 "occur when assuming that "
12767 "(X - c) <= X is always true"),
12768 WARN_STRICT_OVERFLOW_ALL);
12769 return constant_boolean_node (1, type);
12770 }
0eeb03e6
JM
12771
12772 /* Convert (X + c) >= X to true. */
12773 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12774 && code == GE_EXPR
12775 && ((code0 == PLUS_EXPR && is_positive >= 0)
12776 || (code0 == MINUS_EXPR && is_positive <= 0)))
6ac01510
ILT
12777 {
12778 if (TREE_CODE (arg01) == INTEGER_CST
12779 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12780 fold_overflow_warning (("assuming signed overflow does not "
12781 "occur when assuming that "
12782 "(X + c) >= X is always true"),
12783 WARN_STRICT_OVERFLOW_ALL);
12784 return constant_boolean_node (1, type);
12785 }
0eeb03e6
JM
12786
12787 if (TREE_CODE (arg01) == INTEGER_CST)
12788 {
12789 /* Convert X + c > X and X - c < X to true for integers. */
12790 if (code == GT_EXPR
12791 && ((code0 == PLUS_EXPR && is_positive > 0)
12792 || (code0 == MINUS_EXPR && is_positive < 0)))
6ac01510
ILT
12793 {
12794 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12795 fold_overflow_warning (("assuming signed overflow does "
12796 "not occur when assuming that "
12797 "(X + c) > X is always true"),
12798 WARN_STRICT_OVERFLOW_ALL);
12799 return constant_boolean_node (1, type);
12800 }
0eeb03e6
JM
12801
12802 if (code == LT_EXPR
12803 && ((code0 == MINUS_EXPR && is_positive > 0)
12804 || (code0 == PLUS_EXPR && is_positive < 0)))
6ac01510
ILT
12805 {
12806 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12807 fold_overflow_warning (("assuming signed overflow does "
12808 "not occur when assuming that "
12809 "(X - c) < X is always true"),
12810 WARN_STRICT_OVERFLOW_ALL);
12811 return constant_boolean_node (1, type);
12812 }
0eeb03e6
JM
12813
12814 /* Convert X + c <= X and X - c >= X to false for integers. */
12815 if (code == LE_EXPR
12816 && ((code0 == PLUS_EXPR && is_positive > 0)
12817 || (code0 == MINUS_EXPR && is_positive < 0)))
6ac01510
ILT
12818 {
12819 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12820 fold_overflow_warning (("assuming signed overflow does "
12821 "not occur when assuming that "
12822 "(X + c) <= X is always false"),
12823 WARN_STRICT_OVERFLOW_ALL);
12824 return constant_boolean_node (0, type);
12825 }
0eeb03e6
JM
12826
12827 if (code == GE_EXPR
12828 && ((code0 == MINUS_EXPR && is_positive > 0)
12829 || (code0 == PLUS_EXPR && is_positive < 0)))
6ac01510
ILT
12830 {
12831 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12832 fold_overflow_warning (("assuming signed overflow does "
12833 "not occur when assuming that "
f870ab63 12834 "(X - c) >= X is always false"),
6ac01510
ILT
12835 WARN_STRICT_OVERFLOW_ALL);
12836 return constant_boolean_node (0, type);
12837 }
0eeb03e6
JM
12838 }
12839 }
12840
0aee4751 12841 /* Comparisons with the highest or lowest possible integer of
f0dbdfbb 12842 the specified precision will have known values. */
0aee4751 12843 {
f0dbdfbb
EB
12844 tree arg1_type = TREE_TYPE (arg1);
12845 unsigned int width = TYPE_PRECISION (arg1_type);
0aee4751
KH
12846
12847 if (TREE_CODE (arg1) == INTEGER_CST
0aee4751 12848 && width <= 2 * HOST_BITS_PER_WIDE_INT
f0dbdfbb 12849 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
0aee4751
KH
12850 {
12851 HOST_WIDE_INT signed_max_hi;
12852 unsigned HOST_WIDE_INT signed_max_lo;
12853 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12854
12855 if (width <= HOST_BITS_PER_WIDE_INT)
12856 {
12857 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12858 - 1;
12859 signed_max_hi = 0;
12860 max_hi = 0;
12861
f0dbdfbb 12862 if (TYPE_UNSIGNED (arg1_type))
0aee4751
KH
12863 {
12864 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12865 min_lo = 0;
12866 min_hi = 0;
12867 }
12868 else
12869 {
12870 max_lo = signed_max_lo;
12871 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12872 min_hi = -1;
12873 }
12874 }
12875 else
12876 {
12877 width -= HOST_BITS_PER_WIDE_INT;
12878 signed_max_lo = -1;
12879 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12880 - 1;
12881 max_lo = -1;
12882 min_lo = 0;
12883
f0dbdfbb 12884 if (TYPE_UNSIGNED (arg1_type))
0aee4751
KH
12885 {
12886 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12887 min_hi = 0;
12888 }
12889 else
12890 {
12891 max_hi = signed_max_hi;
12892 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12893 }
12894 }
12895
12896 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12897 && TREE_INT_CST_LOW (arg1) == max_lo)
12898 switch (code)
12899 {
12900 case GT_EXPR:
db3927fb 12901 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
0aee4751
KH
12902
12903 case GE_EXPR:
db3927fb 12904 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
0aee4751
KH
12905
12906 case LE_EXPR:
db3927fb 12907 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
0aee4751
KH
12908
12909 case LT_EXPR:
db3927fb 12910 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
0aee4751
KH
12911
12912 /* The GE_EXPR and LT_EXPR cases above are not normally
12913 reached because of previous transformations. */
12914
12915 default:
12916 break;
12917 }
12918 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12919 == max_hi
12920 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12921 switch (code)
12922 {
12923 case GT_EXPR:
000d8d44 12924 arg1 = const_binop (PLUS_EXPR, arg1,
43a5d30b 12925 build_int_cst (TREE_TYPE (arg1), 1));
db3927fb
AH
12926 return fold_build2_loc (loc, EQ_EXPR, type,
12927 fold_convert_loc (loc,
12928 TREE_TYPE (arg1), arg0),
86122f72 12929 arg1);
0aee4751 12930 case LE_EXPR:
000d8d44 12931 arg1 = const_binop (PLUS_EXPR, arg1,
43a5d30b 12932 build_int_cst (TREE_TYPE (arg1), 1));
db3927fb
AH
12933 return fold_build2_loc (loc, NE_EXPR, type,
12934 fold_convert_loc (loc, TREE_TYPE (arg1),
12935 arg0),
86122f72 12936 arg1);
0aee4751
KH
12937 default:
12938 break;
12939 }
12940 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12941 == min_hi
12942 && TREE_INT_CST_LOW (arg1) == min_lo)
12943 switch (code)
12944 {
12945 case LT_EXPR:
db3927fb 12946 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
0aee4751
KH
12947
12948 case LE_EXPR:
db3927fb 12949 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
0aee4751
KH
12950
12951 case GE_EXPR:
db3927fb 12952 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
0aee4751
KH
12953
12954 case GT_EXPR:
db3927fb 12955 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
0aee4751
KH
12956
12957 default:
12958 break;
12959 }
12960 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12961 == min_hi
12962 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12963 switch (code)
12964 {
12965 case GE_EXPR:
43a5d30b 12966 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
db3927fb
AH
12967 return fold_build2_loc (loc, NE_EXPR, type,
12968 fold_convert_loc (loc,
12969 TREE_TYPE (arg1), arg0),
86122f72 12970 arg1);
0aee4751 12971 case LT_EXPR:
43a5d30b 12972 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
db3927fb
AH
12973 return fold_build2_loc (loc, EQ_EXPR, type,
12974 fold_convert_loc (loc, TREE_TYPE (arg1),
12975 arg0),
86122f72 12976 arg1);
0aee4751
KH
12977 default:
12978 break;
12979 }
12980
5cdc4a26 12981 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
0aee4751 12982 && TREE_INT_CST_LOW (arg1) == signed_max_lo
f0dbdfbb
EB
12983 && TYPE_UNSIGNED (arg1_type)
12984 /* We will flip the signedness of the comparison operator
12985 associated with the mode of arg1, so the sign bit is
12986 specified by this mode. Check that arg1 is the signed
12987 max associated with this sign bit. */
12988 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
0aee4751 12989 /* signed_type does not work on pointer types. */
f0dbdfbb 12990 && INTEGRAL_TYPE_P (arg1_type))
0aee4751
KH
12991 {
12992 /* The following case also applies to X < signed_max+1
12993 and X >= signed_max+1 because previous transformations. */
12994 if (code == LE_EXPR || code == GT_EXPR)
12995 {
86122f72 12996 tree st;
12753674 12997 st = signed_type_for (TREE_TYPE (arg1));
db3927fb
AH
12998 return fold_build2_loc (loc,
12999 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13000 type, fold_convert_loc (loc, st, arg0),
86122f72 13001 build_int_cst (st, 0));
0aee4751
KH
13002 }
13003 }
13004 }
13005 }
13006
0aee4751
KH
13007 /* If we are comparing an ABS_EXPR with a constant, we can
13008 convert all the cases into explicit comparisons, but they may
13009 well not be faster than doing the ABS and one comparison.
13010 But ABS (X) <= C is a range comparison, which becomes a subtraction
13011 and a comparison, and is probably faster. */
e26ec0bb
RS
13012 if (code == LE_EXPR
13013 && TREE_CODE (arg1) == INTEGER_CST
13014 && TREE_CODE (arg0) == ABS_EXPR
13015 && ! TREE_SIDE_EFFECTS (arg0)
13016 && (0 != (tem = negate_expr (arg1)))
13017 && TREE_CODE (tem) == INTEGER_CST
455f14dd 13018 && !TREE_OVERFLOW (tem))
db3927fb 13019 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
7f20a5b7
KH
13020 build2 (GE_EXPR, type,
13021 TREE_OPERAND (arg0, 0), tem),
13022 build2 (LE_EXPR, type,
13023 TREE_OPERAND (arg0, 0), arg1));
0aee4751
KH
13024
13025 /* Convert ABS_EXPR<x> >= 0 to true. */
6ac01510 13026 strict_overflow_p = false;
e26ec0bb 13027 if (code == GE_EXPR
e26ec0bb
RS
13028 && (integer_zerop (arg1)
13029 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6ac01510
ILT
13030 && real_zerop (arg1)))
13031 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13032 {
13033 if (strict_overflow_p)
13034 fold_overflow_warning (("assuming signed overflow does not occur "
13035 "when simplifying comparison of "
13036 "absolute value and zero"),
13037 WARN_STRICT_OVERFLOW_CONDITIONAL);
db3927fb 13038 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6ac01510 13039 }
0aee4751
KH
13040
13041 /* Convert ABS_EXPR<x> < 0 to false. */
6ac01510 13042 strict_overflow_p = false;
e26ec0bb 13043 if (code == LT_EXPR
6ac01510
ILT
13044 && (integer_zerop (arg1) || real_zerop (arg1))
13045 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13046 {
13047 if (strict_overflow_p)
13048 fold_overflow_warning (("assuming signed overflow does not occur "
13049 "when simplifying comparison of "
13050 "absolute value and zero"),
13051 WARN_STRICT_OVERFLOW_CONDITIONAL);
db3927fb 13052 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6ac01510 13053 }
0aee4751 13054
0aee4751
KH
13055 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13056 and similarly for >= into !=. */
13057 if ((code == LT_EXPR || code == GE_EXPR)
13058 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13059 && TREE_CODE (arg1) == LSHIFT_EXPR
13060 && integer_onep (TREE_OPERAND (arg1, 0)))
c9019218
JJ
13061 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13062 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13063 TREE_OPERAND (arg1, 1)),
13064 build_int_cst (TREE_TYPE (arg0), 0));
0aee4751 13065
e26ec0bb
RS
13066 if ((code == LT_EXPR || code == GE_EXPR)
13067 && TYPE_UNSIGNED (TREE_TYPE (arg0))
1043771b 13068 && CONVERT_EXPR_P (arg1)
e26ec0bb
RS
13069 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13070 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
db3927fb 13071 {
c9019218
JJ
13072 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13073 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13074 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13075 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13076 build_int_cst (TREE_TYPE (arg0), 0));
db3927fb 13077 }
0aee4751 13078
e26ec0bb 13079 return NULL_TREE;
0aee4751
KH
13080
13081 case UNORDERED_EXPR:
13082 case ORDERED_EXPR:
13083 case UNLT_EXPR:
13084 case UNLE_EXPR:
13085 case UNGT_EXPR:
13086 case UNGE_EXPR:
13087 case UNEQ_EXPR:
13088 case LTGT_EXPR:
13089 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13090 {
13091 t1 = fold_relational_const (code, type, arg0, arg1);
13092 if (t1 != NULL_TREE)
13093 return t1;
13094 }
13095
13096 /* If the first operand is NaN, the result is constant. */
13097 if (TREE_CODE (arg0) == REAL_CST
13098 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13099 && (code != LTGT_EXPR || ! flag_trapping_math))
13100 {
13101 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13102 ? integer_zero_node
13103 : integer_one_node;
db3927fb 13104 return omit_one_operand_loc (loc, type, t1, arg1);
0aee4751
KH
13105 }
13106
13107 /* If the second operand is NaN, the result is constant. */
13108 if (TREE_CODE (arg1) == REAL_CST
13109 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13110 && (code != LTGT_EXPR || ! flag_trapping_math))
13111 {
13112 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13113 ? integer_zero_node
13114 : integer_one_node;
db3927fb 13115 return omit_one_operand_loc (loc, type, t1, arg0);
0aee4751
KH
13116 }
13117
13118 /* Simplify unordered comparison of something with itself. */
13119 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13120 && operand_equal_p (arg0, arg1, 0))
13121 return constant_boolean_node (1, type);
13122
13123 if (code == LTGT_EXPR
13124 && !flag_trapping_math
13125 && operand_equal_p (arg0, arg1, 0))
13126 return constant_boolean_node (0, type);
13127
13128 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13129 {
13130 tree targ0 = strip_float_extensions (arg0);
13131 tree targ1 = strip_float_extensions (arg1);
13132 tree newtype = TREE_TYPE (targ0);
13133
13134 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13135 newtype = TREE_TYPE (targ1);
13136
13137 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
db3927fb
AH
13138 return fold_build2_loc (loc, code, type,
13139 fold_convert_loc (loc, newtype, targ0),
13140 fold_convert_loc (loc, newtype, targ1));
0aee4751
KH
13141 }
13142
62ab45cc 13143 return NULL_TREE;
0aee4751
KH
13144
13145 case COMPOUND_EXPR:
13146 /* When pedantic, a compound expression can be neither an lvalue
13147 nor an integer constant expression. */
13148 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
62ab45cc 13149 return NULL_TREE;
0aee4751
KH
13150 /* Don't let (0, 0) be null pointer constant. */
13151 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
db3927fb
AH
13152 : fold_convert_loc (loc, type, arg1);
13153 return pedantic_non_lvalue_loc (loc, tem);
0aee4751
KH
13154
13155 case COMPLEX_EXPR:
fd6c76f4
RS
13156 if ((TREE_CODE (arg0) == REAL_CST
13157 && TREE_CODE (arg1) == REAL_CST)
13158 || (TREE_CODE (arg0) == INTEGER_CST
13159 && TREE_CODE (arg1) == INTEGER_CST))
0aee4751 13160 return build_complex (type, arg0, arg1);
62ab45cc 13161 return NULL_TREE;
0aee4751 13162
cb4819f0
KH
13163 case ASSERT_EXPR:
13164 /* An ASSERT_EXPR should never be passed to fold_binary. */
13165 gcc_unreachable ();
13166
0aee4751 13167 default:
62ab45cc 13168 return NULL_TREE;
0aee4751
KH
13169 } /* switch (code) */
13170}
13171
c703e618
EB
13172/* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13173 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13174 of GOTO_EXPR. */
8c900457
GL
13175
13176static tree
c703e618 13177contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
8c900457
GL
13178{
13179 switch (TREE_CODE (*tp))
13180 {
13181 case LABEL_EXPR:
13182 return *tp;
c703e618 13183
8c900457
GL
13184 case GOTO_EXPR:
13185 *walk_subtrees = 0;
c703e618
EB
13186
13187 /* ... fall through ... */
13188
8c900457
GL
13189 default:
13190 return NULL_TREE;
13191 }
13192}
13193
c703e618
EB
13194/* Return whether the sub-tree ST contains a label which is accessible from
13195 outside the sub-tree. */
8c900457
GL
13196
13197static bool
13198contains_label_p (tree st)
13199{
c703e618
EB
13200 return
13201 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
8c900457
GL
13202}
13203
7cf57259
KH
13204/* Fold a ternary expression of code CODE and type TYPE with operands
13205 OP0, OP1, and OP2. Return the folded expression if folding is
13206 successful. Otherwise, return NULL_TREE. */
9bdae6af 13207
721425b6 13208tree
db3927fb 13209fold_ternary_loc (location_t loc, enum tree_code code, tree type,
16949072 13210 tree op0, tree op1, tree op2)
9bdae6af 13211{
9bdae6af 13212 tree tem;
16949072 13213 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
9bdae6af 13214 enum tree_code_class kind = TREE_CODE_CLASS (code);
9bdae6af
KH
13215
13216 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13217 && TREE_CODE_LENGTH (code) == 3);
13218
3ea2c264
KH
13219 /* Strip any conversions that don't change the mode. This is safe
13220 for every expression, except for a comparison expression because
13221 its signedness is derived from its operands. So, in the latter
13222 case, only strip conversions that don't change the signedness.
9bdae6af 13223
3ea2c264
KH
13224 Note that this is done as an internal manipulation within the
13225 constant folder, in order to find the simplest representation of
13226 the arguments so that their form can be studied. In any cases,
13227 the appropriate type conversions should be put back in the tree
13228 that will get out of the constant folder. */
13229 if (op0)
13230 {
13231 arg0 = op0;
13232 STRIP_NOPS (arg0);
13233 }
9bdae6af 13234
3ea2c264
KH
13235 if (op1)
13236 {
13237 arg1 = op1;
13238 STRIP_NOPS (arg1);
9bdae6af
KH
13239 }
13240
16949072
RG
13241 if (op2)
13242 {
13243 arg2 = op2;
13244 STRIP_NOPS (arg2);
13245 }
13246
9bdae6af
KH
13247 switch (code)
13248 {
13249 case COMPONENT_REF:
13250 if (TREE_CODE (arg0) == CONSTRUCTOR
13251 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13252 {
4038c495
GB
13253 unsigned HOST_WIDE_INT idx;
13254 tree field, value;
13255 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13256 if (field == arg1)
13257 return value;
9bdae6af 13258 }
62ab45cc 13259 return NULL_TREE;
9bdae6af
KH
13260
13261 case COND_EXPR:
13262 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13263 so all simple results must be passed through pedantic_non_lvalue. */
13264 if (TREE_CODE (arg0) == INTEGER_CST)
13265 {
8c900457 13266 tree unused_op = integer_zerop (arg0) ? op1 : op2;
3ea2c264 13267 tem = integer_zerop (arg0) ? op2 : op1;
9bdae6af
KH
13268 /* Only optimize constant conditions when the selected branch
13269 has the same type as the COND_EXPR. This avoids optimizing
8c900457
GL
13270 away "c ? x : throw", where the throw has a void type.
13271 Avoid throwing away that operand which contains label. */
13272 if ((!TREE_SIDE_EFFECTS (unused_op)
13273 || !contains_label_p (unused_op))
13274 && (! VOID_TYPE_P (TREE_TYPE (tem))
13275 || VOID_TYPE_P (type)))
db3927fb 13276 return pedantic_non_lvalue_loc (loc, tem);
62ab45cc 13277 return NULL_TREE;
9bdae6af 13278 }
3ea2c264 13279 if (operand_equal_p (arg1, op2, 0))
db3927fb 13280 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
9bdae6af
KH
13281
13282 /* If we have A op B ? A : C, we may be able to convert this to a
13283 simpler expression, depending on the operation and the values
13284 of B and C. Signed zeros prevent all of these transformations,
13285 for reasons given above each one.
13286
13287 Also try swapping the arguments and inverting the conditional. */
13288 if (COMPARISON_CLASS_P (arg0)
13289 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13290 arg1, TREE_OPERAND (arg0, 1))
13291 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13292 {
db3927fb 13293 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
9bdae6af
KH
13294 if (tem)
13295 return tem;
13296 }
13297
13298 if (COMPARISON_CLASS_P (arg0)
13299 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
3ea2c264 13300 op2,
9bdae6af 13301 TREE_OPERAND (arg0, 1))
3ea2c264 13302 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
9bdae6af 13303 {
db3927fb 13304 tem = fold_truth_not_expr (loc, arg0);
d817ed3b 13305 if (tem && COMPARISON_CLASS_P (tem))
9bdae6af 13306 {
db3927fb 13307 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
9bdae6af
KH
13308 if (tem)
13309 return tem;
13310 }
13311 }
13312
13313 /* If the second operand is simpler than the third, swap them
13314 since that produces better jump optimization results. */
3dac16bd
RG
13315 if (truth_value_p (TREE_CODE (arg0))
13316 && tree_swap_operands_p (op1, op2, false))
9bdae6af
KH
13317 {
13318 /* See if this can be inverted. If it can't, possibly because
13319 it was a floating-point inequality comparison, don't do
13320 anything. */
db3927fb 13321 tem = fold_truth_not_expr (loc, arg0);
d817ed3b 13322 if (tem)
db3927fb 13323 return fold_build3_loc (loc, code, type, tem, op2, op1);
9bdae6af
KH
13324 }
13325
13326 /* Convert A ? 1 : 0 to simply A. */
3ea2c264
KH
13327 if (integer_onep (op1)
13328 && integer_zerop (op2)
13329 /* If we try to convert OP0 to our type, the
9bdae6af
KH
13330 call to fold will try to move the conversion inside
13331 a COND, which will recurse. In that case, the COND_EXPR
13332 is probably the best choice, so leave it alone. */
13333 && type == TREE_TYPE (arg0))
db3927fb 13334 return pedantic_non_lvalue_loc (loc, arg0);
9bdae6af
KH
13335
13336 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13337 over COND_EXPR in cases such as floating point comparisons. */
3ea2c264
KH
13338 if (integer_zerop (op1)
13339 && integer_onep (op2)
9bdae6af 13340 && truth_value_p (TREE_CODE (arg0)))
db3927fb
AH
13341 return pedantic_non_lvalue_loc (loc,
13342 fold_convert_loc (loc, type,
13343 invert_truthvalue_loc (loc,
13344 arg0)));
9bdae6af
KH
13345
13346 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13347 if (TREE_CODE (arg0) == LT_EXPR
789e604d
JJ
13348 && integer_zerop (TREE_OPERAND (arg0, 1))
13349 && integer_zerop (op2)
13350 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13351 {
13352 /* sign_bit_p only checks ARG1 bits within A's precision.
13353 If <sign bit of A> has wider type than A, bits outside
13354 of A's precision in <sign bit of A> need to be checked.
13355 If they are all 0, this optimization needs to be done
13356 in unsigned A's type, if they are all 1 in signed A's type,
13357 otherwise this can't be done. */
13358 if (TYPE_PRECISION (TREE_TYPE (tem))
13359 < TYPE_PRECISION (TREE_TYPE (arg1))
13360 && TYPE_PRECISION (TREE_TYPE (tem))
13361 < TYPE_PRECISION (type))
13362 {
13363 unsigned HOST_WIDE_INT mask_lo;
13364 HOST_WIDE_INT mask_hi;
13365 int inner_width, outer_width;
13366 tree tem_type;
13367
13368 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13369 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13370 if (outer_width > TYPE_PRECISION (type))
13371 outer_width = TYPE_PRECISION (type);
13372
13373 if (outer_width > HOST_BITS_PER_WIDE_INT)
13374 {
13375 mask_hi = ((unsigned HOST_WIDE_INT) -1
13376 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13377 mask_lo = -1;
13378 }
13379 else
13380 {
13381 mask_hi = 0;
13382 mask_lo = ((unsigned HOST_WIDE_INT) -1
13383 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13384 }
13385 if (inner_width > HOST_BITS_PER_WIDE_INT)
13386 {
13387 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13388 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13389 mask_lo = 0;
13390 }
13391 else
13392 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13393 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13394
13395 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13396 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13397 {
12753674 13398 tem_type = signed_type_for (TREE_TYPE (tem));
db3927fb 13399 tem = fold_convert_loc (loc, tem_type, tem);
789e604d
JJ
13400 }
13401 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13402 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13403 {
ca5ba2a3 13404 tem_type = unsigned_type_for (TREE_TYPE (tem));
db3927fb 13405 tem = fold_convert_loc (loc, tem_type, tem);
789e604d
JJ
13406 }
13407 else
13408 tem = NULL;
13409 }
13410
13411 if (tem)
db3927fb
AH
13412 return
13413 fold_convert_loc (loc, type,
13414 fold_build2_loc (loc, BIT_AND_EXPR,
13415 TREE_TYPE (tem), tem,
13416 fold_convert_loc (loc,
13417 TREE_TYPE (tem),
13418 arg1)));
789e604d 13419 }
9bdae6af
KH
13420
13421 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13422 already handled above. */
13423 if (TREE_CODE (arg0) == BIT_AND_EXPR
13424 && integer_onep (TREE_OPERAND (arg0, 1))
3ea2c264 13425 && integer_zerop (op2)
9bdae6af
KH
13426 && integer_pow2p (arg1))
13427 {
13428 tree tem = TREE_OPERAND (arg0, 0);
13429 STRIP_NOPS (tem);
13430 if (TREE_CODE (tem) == RSHIFT_EXPR
13431 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13432 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13433 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
db3927fb 13434 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7f20a5b7 13435 TREE_OPERAND (tem, 0), arg1);
9bdae6af
KH
13436 }
13437
13438 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13439 is probably obsolete because the first operand should be a
13440 truth value (that's why we have the two cases above), but let's
13441 leave it in until we can confirm this for all front-ends. */
3ea2c264 13442 if (integer_zerop (op2)
9bdae6af
KH
13443 && TREE_CODE (arg0) == NE_EXPR
13444 && integer_zerop (TREE_OPERAND (arg0, 1))
13445 && integer_pow2p (arg1)
13446 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13447 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13448 arg1, OEP_ONLY_CONST))
db3927fb
AH
13449 return pedantic_non_lvalue_loc (loc,
13450 fold_convert_loc (loc, type,
13451 TREE_OPERAND (arg0, 0)));
9bdae6af
KH
13452
13453 /* Convert A ? B : 0 into A && B if A and B are truth values. */
3ea2c264 13454 if (integer_zerop (op2)
9bdae6af
KH
13455 && truth_value_p (TREE_CODE (arg0))
13456 && truth_value_p (TREE_CODE (arg1)))
db3927fb
AH
13457 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13458 fold_convert_loc (loc, type, arg0),
726ac11e 13459 arg1);
9bdae6af
KH
13460
13461 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
3ea2c264 13462 if (integer_onep (op2)
9bdae6af
KH
13463 && truth_value_p (TREE_CODE (arg0))
13464 && truth_value_p (TREE_CODE (arg1)))
13465 {
13466 /* Only perform transformation if ARG0 is easily inverted. */
db3927fb 13467 tem = fold_truth_not_expr (loc, arg0);
d817ed3b 13468 if (tem)
db3927fb
AH
13469 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13470 fold_convert_loc (loc, type, tem),
726ac11e 13471 arg1);
9bdae6af
KH
13472 }
13473
13474 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13475 if (integer_zerop (arg1)
13476 && truth_value_p (TREE_CODE (arg0))
3ea2c264 13477 && truth_value_p (TREE_CODE (op2)))
9bdae6af
KH
13478 {
13479 /* Only perform transformation if ARG0 is easily inverted. */
db3927fb 13480 tem = fold_truth_not_expr (loc, arg0);
d817ed3b 13481 if (tem)
db3927fb
AH
13482 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13483 fold_convert_loc (loc, type, tem),
726ac11e 13484 op2);
9bdae6af
KH
13485 }
13486
13487 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13488 if (integer_onep (arg1)
13489 && truth_value_p (TREE_CODE (arg0))
3ea2c264 13490 && truth_value_p (TREE_CODE (op2)))
db3927fb
AH
13491 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13492 fold_convert_loc (loc, type, arg0),
726ac11e 13493 op2);
9bdae6af 13494
62ab45cc 13495 return NULL_TREE;
9bdae6af
KH
13496
13497 case CALL_EXPR:
5039610b
SL
13498 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13499 of fold_ternary on them. */
13500 gcc_unreachable ();
9bdae6af 13501
dcd25113 13502 case BIT_FIELD_REF:
5773afc5
DN
13503 if ((TREE_CODE (arg0) == VECTOR_CST
13504 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
e55f42fb 13505 && type == TREE_TYPE (TREE_TYPE (arg0)))
dcd25113
JJ
13506 {
13507 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13508 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13509
13510 if (width != 0
13511 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13512 && (idx % width) == 0
13513 && (idx = idx / width)
13514 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13515 {
5773afc5
DN
13516 tree elements = NULL_TREE;
13517
13518 if (TREE_CODE (arg0) == VECTOR_CST)
13519 elements = TREE_VECTOR_CST_ELTS (arg0);
13520 else
13521 {
13522 unsigned HOST_WIDE_INT idx;
13523 tree value;
13524
13525 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13526 elements = tree_cons (NULL_TREE, value, elements);
13527 }
40182dbf 13528 while (idx-- > 0 && elements)
dcd25113 13529 elements = TREE_CHAIN (elements);
40182dbf
JJ
13530 if (elements)
13531 return TREE_VALUE (elements);
13532 else
e8160c9a 13533 return build_zero_cst (type);
dcd25113
JJ
13534 }
13535 }
ee1f1270
RG
13536
13537 /* A bit-field-ref that referenced the full argument can be stripped. */
13538 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13539 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13540 && integer_zerop (op2))
db3927fb 13541 return fold_convert_loc (loc, type, arg0);
ee1f1270 13542
dcd25113
JJ
13543 return NULL_TREE;
13544
16949072
RG
13545 case FMA_EXPR:
13546 /* For integers we can decompose the FMA if possible. */
13547 if (TREE_CODE (arg0) == INTEGER_CST
13548 && TREE_CODE (arg1) == INTEGER_CST)
13549 return fold_build2_loc (loc, PLUS_EXPR, type,
13550 const_binop (MULT_EXPR, arg0, arg1), arg2);
13551 if (integer_zerop (arg2))
13552 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13553
13554 return fold_fma (loc, type, arg0, arg1, arg2);
13555
9bdae6af 13556 default:
62ab45cc 13557 return NULL_TREE;
9bdae6af
KH
13558 } /* switch (code) */
13559}
13560
6d716ca8
RS
13561/* Perform constant folding and related simplification of EXPR.
13562 The related simplifications include x*1 => x, x*0 => 0, etc.,
13563 and application of the associative law.
13564 NOP_EXPR conversions may be removed freely (as long as we
af5bdf6a 13565 are careful not to change the type of the overall expression).
6d716ca8
RS
13566 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13567 but we can constant-fold them if they have constant operands. */
13568
5dfa45d0
JJ
13569#ifdef ENABLE_FOLD_CHECKING
13570# define fold(x) fold_1 (x)
13571static tree fold_1 (tree);
13572static
13573#endif
6d716ca8 13574tree
fa8db1f7 13575fold (tree expr)
6d716ca8 13576{
ea993805 13577 const tree t = expr;
b3694847 13578 enum tree_code code = TREE_CODE (t);
6615c446 13579 enum tree_code_class kind = TREE_CODE_CLASS (code);
62ab45cc 13580 tree tem;
db3927fb 13581 location_t loc = EXPR_LOCATION (expr);
6de9cd9a 13582
1796dff4 13583 /* Return right away if a constant. */
6615c446 13584 if (kind == tcc_constant)
1796dff4 13585 return t;
b6cc0a72 13586
5039610b
SL
13587 /* CALL_EXPR-like objects with variable numbers of operands are
13588 treated specially. */
13589 if (kind == tcc_vl_exp)
13590 {
13591 if (code == CALL_EXPR)
13592 {
db3927fb 13593 tem = fold_call_expr (loc, expr, false);
5039610b
SL
13594 return tem ? tem : expr;
13595 }
13596 return expr;
13597 }
13598
726a989a 13599 if (IS_EXPR_CODE_CLASS (kind))
659d8efa 13600 {
fbaa905c 13601 tree type = TREE_TYPE (t);
7cf57259 13602 tree op0, op1, op2;
fbaa905c 13603
659d8efa
KH
13604 switch (TREE_CODE_LENGTH (code))
13605 {
13606 case 1:
fbaa905c 13607 op0 = TREE_OPERAND (t, 0);
db3927fb 13608 tem = fold_unary_loc (loc, code, type, op0);
62ab45cc 13609 return tem ? tem : expr;
0aee4751 13610 case 2:
fbaa905c
KH
13611 op0 = TREE_OPERAND (t, 0);
13612 op1 = TREE_OPERAND (t, 1);
db3927fb 13613 tem = fold_binary_loc (loc, code, type, op0, op1);
62ab45cc 13614 return tem ? tem : expr;
9bdae6af 13615 case 3:
7cf57259
KH
13616 op0 = TREE_OPERAND (t, 0);
13617 op1 = TREE_OPERAND (t, 1);
13618 op2 = TREE_OPERAND (t, 2);
db3927fb 13619 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
62ab45cc 13620 return tem ? tem : expr;
659d8efa
KH
13621 default:
13622 break;
13623 }
13624 }
13625
6d716ca8
RS
13626 switch (code)
13627 {
39fcde8f
EB
13628 case ARRAY_REF:
13629 {
13630 tree op0 = TREE_OPERAND (t, 0);
13631 tree op1 = TREE_OPERAND (t, 1);
13632
13633 if (TREE_CODE (op1) == INTEGER_CST
13634 && TREE_CODE (op0) == CONSTRUCTOR
13635 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13636 {
13637 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13638 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13639 unsigned HOST_WIDE_INT begin = 0;
13640
13641 /* Find a matching index by means of a binary search. */
13642 while (begin != end)
13643 {
13644 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13645 tree index = VEC_index (constructor_elt, elts, middle)->index;
13646
13647 if (TREE_CODE (index) == INTEGER_CST
13648 && tree_int_cst_lt (index, op1))
13649 begin = middle + 1;
13650 else if (TREE_CODE (index) == INTEGER_CST
13651 && tree_int_cst_lt (op1, index))
13652 end = middle;
13653 else if (TREE_CODE (index) == RANGE_EXPR
13654 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13655 begin = middle + 1;
13656 else if (TREE_CODE (index) == RANGE_EXPR
13657 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13658 end = middle;
13659 else
13660 return VEC_index (constructor_elt, elts, middle)->value;
13661 }
13662 }
13663
13664 return t;
13665 }
13666
6d716ca8
RS
13667 case CONST_DECL:
13668 return fold (DECL_INITIAL (t));
13669
6d716ca8
RS
13670 default:
13671 return t;
13672 } /* switch (code) */
13673}
39dfb55a 13674
5dfa45d0
JJ
13675#ifdef ENABLE_FOLD_CHECKING
13676#undef fold
13677
ac545c64
KG
13678static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13679static void fold_check_failed (const_tree, const_tree);
13680void print_fold_checksum (const_tree);
5dfa45d0
JJ
13681
13682/* When --enable-checking=fold, compute a digest of expr before
13683 and after actual fold call to see if fold did not accidentally
13684 change original expr. */
13685
13686tree
13687fold (tree expr)
13688{
13689 tree ret;
13690 struct md5_ctx ctx;
13691 unsigned char checksum_before[16], checksum_after[16];
13692 htab_t ht;
13693
13694 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13695 md5_init_ctx (&ctx);
13696 fold_checksum_tree (expr, &ctx, ht);
13697 md5_finish_ctx (&ctx, checksum_before);
13698 htab_empty (ht);
13699
13700 ret = fold_1 (expr);
13701
13702 md5_init_ctx (&ctx);
13703 fold_checksum_tree (expr, &ctx, ht);
13704 md5_finish_ctx (&ctx, checksum_after);
13705 htab_delete (ht);
13706
13707 if (memcmp (checksum_before, checksum_after, 16))
13708 fold_check_failed (expr, ret);
13709
13710 return ret;
13711}
13712
13713void
ac545c64 13714print_fold_checksum (const_tree expr)
5dfa45d0
JJ
13715{
13716 struct md5_ctx ctx;
13717 unsigned char checksum[16], cnt;
13718 htab_t ht;
13719
13720 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13721 md5_init_ctx (&ctx);
13722 fold_checksum_tree (expr, &ctx, ht);
13723 md5_finish_ctx (&ctx, checksum);
13724 htab_delete (ht);
13725 for (cnt = 0; cnt < 16; ++cnt)
13726 fprintf (stderr, "%02x", checksum[cnt]);
13727 putc ('\n', stderr);
13728}
13729
13730static void
ac545c64 13731fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
5dfa45d0
JJ
13732{
13733 internal_error ("fold check: original tree changed by fold");
13734}
13735
13736static void
ac545c64 13737fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
5dfa45d0 13738{
0c3dbcf0 13739 void **slot;
5dfa45d0 13740 enum tree_code code;
ea6dafb0 13741 union tree_node buf;
5dfa45d0 13742 int i, len;
b8698a0f 13743
d763bb10 13744recursive_label:
5dfa45d0 13745
0bccc606 13746 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
46c5394b
DB
13747 <= sizeof (struct tree_function_decl))
13748 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
5dfa45d0
JJ
13749 if (expr == NULL)
13750 return;
0c3dbcf0 13751 slot = (void **) htab_find_slot (ht, expr, INSERT);
5dfa45d0
JJ
13752 if (*slot != NULL)
13753 return;
0c3dbcf0 13754 *slot = CONST_CAST_TREE (expr);
5dfa45d0 13755 code = TREE_CODE (expr);
6615c446
JO
13756 if (TREE_CODE_CLASS (code) == tcc_declaration
13757 && DECL_ASSEMBLER_NAME_SET_P (expr))
5dfa45d0
JJ
13758 {
13759 /* Allow DECL_ASSEMBLER_NAME to be modified. */
3f7f53c7 13760 memcpy ((char *) &buf, expr, tree_size (expr));
ac545c64 13761 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
3f7f53c7 13762 expr = (tree) &buf;
5dfa45d0 13763 }
6615c446 13764 else if (TREE_CODE_CLASS (code) == tcc_type
5cf96841
JJ
13765 && (TYPE_POINTER_TO (expr)
13766 || TYPE_REFERENCE_TO (expr)
d763bb10 13767 || TYPE_CACHED_VALUES_P (expr)
5cf96841
JJ
13768 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13769 || TYPE_NEXT_VARIANT (expr)))
5dfa45d0 13770 {
b9193259 13771 /* Allow these fields to be modified. */
ac545c64 13772 tree tmp;
3f7f53c7 13773 memcpy ((char *) &buf, expr, tree_size (expr));
ac545c64
KG
13774 expr = tmp = (tree) &buf;
13775 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13776 TYPE_POINTER_TO (tmp) = NULL;
13777 TYPE_REFERENCE_TO (tmp) = NULL;
5cf96841 13778 TYPE_NEXT_VARIANT (tmp) = NULL;
ac545c64 13779 if (TYPE_CACHED_VALUES_P (tmp))
0ebfd2c9 13780 {
ac545c64
KG
13781 TYPE_CACHED_VALUES_P (tmp) = 0;
13782 TYPE_CACHED_VALUES (tmp) = NULL;
0ebfd2c9 13783 }
5dfa45d0
JJ
13784 }
13785 md5_process_bytes (expr, tree_size (expr), ctx);
13786 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
6615c446 13787 if (TREE_CODE_CLASS (code) != tcc_type
d763bb10 13788 && TREE_CODE_CLASS (code) != tcc_declaration
70826cbb
SP
13789 && code != TREE_LIST
13790 && code != SSA_NAME)
5dfa45d0 13791 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
5dfa45d0
JJ
13792 switch (TREE_CODE_CLASS (code))
13793 {
6615c446 13794 case tcc_constant:
5dfa45d0
JJ
13795 switch (code)
13796 {
13797 case STRING_CST:
13798 md5_process_bytes (TREE_STRING_POINTER (expr),
13799 TREE_STRING_LENGTH (expr), ctx);
13800 break;
13801 case COMPLEX_CST:
13802 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13803 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13804 break;
13805 case VECTOR_CST:
13806 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13807 break;
13808 default:
13809 break;
13810 }
13811 break;
6615c446 13812 case tcc_exceptional:
5dfa45d0
JJ
13813 switch (code)
13814 {
13815 case TREE_LIST:
13816 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13817 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
d763bb10
AP
13818 expr = TREE_CHAIN (expr);
13819 goto recursive_label;
5dfa45d0
JJ
13820 break;
13821 case TREE_VEC:
13822 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13823 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13824 break;
13825 default:
13826 break;
13827 }
13828 break;
6615c446
JO
13829 case tcc_expression:
13830 case tcc_reference:
13831 case tcc_comparison:
13832 case tcc_unary:
13833 case tcc_binary:
13834 case tcc_statement:
5039610b
SL
13835 case tcc_vl_exp:
13836 len = TREE_OPERAND_LENGTH (expr);
5dfa45d0
JJ
13837 for (i = 0; i < len; ++i)
13838 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13839 break;
6615c446 13840 case tcc_declaration:
5dfa45d0
JJ
13841 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13842 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
3eb04608
DB
13843 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13844 {
13845 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13846 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13847 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13848 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13849 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13850 }
46c5394b
DB
13851 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13852 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
b8698a0f 13853
46c5394b
DB
13854 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13855 {
13856 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13857 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13858 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13859 }
5dfa45d0 13860 break;
6615c446 13861 case tcc_type:
a40de696
AP
13862 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13863 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
5dfa45d0
JJ
13864 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13865 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13866 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13867 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
a40de696
AP
13868 if (INTEGRAL_TYPE_P (expr)
13869 || SCALAR_FLOAT_TYPE_P (expr))
13870 {
13871 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13872 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13873 }
5dfa45d0 13874 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
b9193259
DJ
13875 if (TREE_CODE (expr) == RECORD_TYPE
13876 || TREE_CODE (expr) == UNION_TYPE
13877 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13878 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
5dfa45d0
JJ
13879 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13880 break;
13881 default:
13882 break;
13883 }
13884}
13885
f1b42630
AN
13886/* Helper function for outputting the checksum of a tree T. When
13887 debugging with gdb, you can "define mynext" to be "next" followed
13888 by "call debug_fold_checksum (op0)", then just trace down till the
13889 outputs differ. */
13890
24e47c76 13891DEBUG_FUNCTION void
ac545c64 13892debug_fold_checksum (const_tree t)
f1b42630
AN
13893{
13894 int i;
13895 unsigned char checksum[16];
13896 struct md5_ctx ctx;
13897 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
b8698a0f 13898
f1b42630
AN
13899 md5_init_ctx (&ctx);
13900 fold_checksum_tree (t, &ctx, ht);
13901 md5_finish_ctx (&ctx, checksum);
13902 htab_empty (ht);
13903
13904 for (i = 0; i < 16; i++)
13905 fprintf (stderr, "%d ", checksum[i]);
13906
13907 fprintf (stderr, "\n");
13908}
13909
5dfa45d0
JJ
13910#endif
13911
ba199a53 13912/* Fold a unary tree expression with code CODE of type TYPE with an
db3927fb
AH
13913 operand OP0. LOC is the location of the resulting expression.
13914 Return a folded expression if successful. Otherwise, return a tree
13915 expression with code CODE of type TYPE with an operand OP0. */
ba199a53
KH
13916
13917tree
db3927fb
AH
13918fold_build1_stat_loc (location_t loc,
13919 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
ba199a53 13920{
e2fe73f6
AP
13921 tree tem;
13922#ifdef ENABLE_FOLD_CHECKING
13923 unsigned char checksum_before[16], checksum_after[16];
13924 struct md5_ctx ctx;
13925 htab_t ht;
13926
13927 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13928 md5_init_ctx (&ctx);
13929 fold_checksum_tree (op0, &ctx, ht);
13930 md5_finish_ctx (&ctx, checksum_before);
13931 htab_empty (ht);
13932#endif
b8698a0f 13933
db3927fb 13934 tem = fold_unary_loc (loc, code, type, op0);
e2fe73f6 13935 if (!tem)
c9019218 13936 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
b8698a0f 13937
e2fe73f6
AP
13938#ifdef ENABLE_FOLD_CHECKING
13939 md5_init_ctx (&ctx);
13940 fold_checksum_tree (op0, &ctx, ht);
13941 md5_finish_ctx (&ctx, checksum_after);
13942 htab_delete (ht);
ba199a53 13943
e2fe73f6
AP
13944 if (memcmp (checksum_before, checksum_after, 16))
13945 fold_check_failed (op0, tem);
13946#endif
13947 return tem;
ba199a53
KH
13948}
13949
13950/* Fold a binary tree expression with code CODE of type TYPE with
db3927fb
AH
13951 operands OP0 and OP1. LOC is the location of the resulting
13952 expression. Return a folded expression if successful. Otherwise,
13953 return a tree expression with code CODE of type TYPE with operands
13954 OP0 and OP1. */
ba199a53
KH
13955
13956tree
db3927fb
AH
13957fold_build2_stat_loc (location_t loc,
13958 enum tree_code code, tree type, tree op0, tree op1
13959 MEM_STAT_DECL)
ba199a53 13960{
e2fe73f6
AP
13961 tree tem;
13962#ifdef ENABLE_FOLD_CHECKING
13963 unsigned char checksum_before_op0[16],
13964 checksum_before_op1[16],
13965 checksum_after_op0[16],
13966 checksum_after_op1[16];
13967 struct md5_ctx ctx;
13968 htab_t ht;
13969
13970 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13971 md5_init_ctx (&ctx);
13972 fold_checksum_tree (op0, &ctx, ht);
13973 md5_finish_ctx (&ctx, checksum_before_op0);
13974 htab_empty (ht);
13975
13976 md5_init_ctx (&ctx);
13977 fold_checksum_tree (op1, &ctx, ht);
13978 md5_finish_ctx (&ctx, checksum_before_op1);
13979 htab_empty (ht);
13980#endif
13981
db3927fb 13982 tem = fold_binary_loc (loc, code, type, op0, op1);
e2fe73f6 13983 if (!tem)
c9019218 13984 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
b8698a0f 13985
e2fe73f6
AP
13986#ifdef ENABLE_FOLD_CHECKING
13987 md5_init_ctx (&ctx);
13988 fold_checksum_tree (op0, &ctx, ht);
13989 md5_finish_ctx (&ctx, checksum_after_op0);
13990 htab_empty (ht);
13991
13992 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13993 fold_check_failed (op0, tem);
b8698a0f 13994
e2fe73f6
AP
13995 md5_init_ctx (&ctx);
13996 fold_checksum_tree (op1, &ctx, ht);
13997 md5_finish_ctx (&ctx, checksum_after_op1);
13998 htab_delete (ht);
ba199a53 13999
e2fe73f6
AP
14000 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14001 fold_check_failed (op1, tem);
14002#endif
14003 return tem;
ba199a53
KH
14004}
14005
14006/* Fold a ternary tree expression with code CODE of type TYPE with
830113fd 14007 operands OP0, OP1, and OP2. Return a folded expression if
ba199a53
KH
14008 successful. Otherwise, return a tree expression with code CODE of
14009 type TYPE with operands OP0, OP1, and OP2. */
14010
14011tree
db3927fb
AH
14012fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14013 tree op0, tree op1, tree op2 MEM_STAT_DECL)
5808968e
AP
14014{
14015 tree tem;
e2fe73f6
AP
14016#ifdef ENABLE_FOLD_CHECKING
14017 unsigned char checksum_before_op0[16],
14018 checksum_before_op1[16],
14019 checksum_before_op2[16],
14020 checksum_after_op0[16],
14021 checksum_after_op1[16],
14022 checksum_after_op2[16];
14023 struct md5_ctx ctx;
14024 htab_t ht;
14025
14026 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14027 md5_init_ctx (&ctx);
14028 fold_checksum_tree (op0, &ctx, ht);
14029 md5_finish_ctx (&ctx, checksum_before_op0);
14030 htab_empty (ht);
ba199a53 14031
e2fe73f6
AP
14032 md5_init_ctx (&ctx);
14033 fold_checksum_tree (op1, &ctx, ht);
14034 md5_finish_ctx (&ctx, checksum_before_op1);
14035 htab_empty (ht);
14036
14037 md5_init_ctx (&ctx);
14038 fold_checksum_tree (op2, &ctx, ht);
14039 md5_finish_ctx (&ctx, checksum_before_op2);
14040 htab_empty (ht);
14041#endif
5039610b
SL
14042
14043 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
db3927fb 14044 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
e2fe73f6 14045 if (!tem)
c9019218 14046 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
b8698a0f 14047
e2fe73f6
AP
14048#ifdef ENABLE_FOLD_CHECKING
14049 md5_init_ctx (&ctx);
14050 fold_checksum_tree (op0, &ctx, ht);
14051 md5_finish_ctx (&ctx, checksum_after_op0);
14052 htab_empty (ht);
14053
14054 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14055 fold_check_failed (op0, tem);
b8698a0f 14056
e2fe73f6
AP
14057 md5_init_ctx (&ctx);
14058 fold_checksum_tree (op1, &ctx, ht);
14059 md5_finish_ctx (&ctx, checksum_after_op1);
14060 htab_empty (ht);
14061
14062 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14063 fold_check_failed (op1, tem);
b8698a0f 14064
e2fe73f6
AP
14065 md5_init_ctx (&ctx);
14066 fold_checksum_tree (op2, &ctx, ht);
14067 md5_finish_ctx (&ctx, checksum_after_op2);
14068 htab_delete (ht);
14069
14070 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14071 fold_check_failed (op2, tem);
14072#endif
14073 return tem;
ba199a53
KH
14074}
14075
94a0dd7b
SL
14076/* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14077 arguments in ARGARRAY, and a null static chain.
5039610b 14078 Return a folded expression if successful. Otherwise, return a CALL_EXPR
94a0dd7b 14079 of type TYPE from the given operands as constructed by build_call_array. */
5039610b
SL
14080
14081tree
db3927fb
AH
14082fold_build_call_array_loc (location_t loc, tree type, tree fn,
14083 int nargs, tree *argarray)
5039610b
SL
14084{
14085 tree tem;
14086#ifdef ENABLE_FOLD_CHECKING
14087 unsigned char checksum_before_fn[16],
14088 checksum_before_arglist[16],
14089 checksum_after_fn[16],
14090 checksum_after_arglist[16];
14091 struct md5_ctx ctx;
14092 htab_t ht;
94a0dd7b 14093 int i;
5039610b
SL
14094
14095 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14096 md5_init_ctx (&ctx);
14097 fold_checksum_tree (fn, &ctx, ht);
14098 md5_finish_ctx (&ctx, checksum_before_fn);
14099 htab_empty (ht);
14100
14101 md5_init_ctx (&ctx);
94a0dd7b
SL
14102 for (i = 0; i < nargs; i++)
14103 fold_checksum_tree (argarray[i], &ctx, ht);
5039610b
SL
14104 md5_finish_ctx (&ctx, checksum_before_arglist);
14105 htab_empty (ht);
14106#endif
14107
db3927fb 14108 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
b8698a0f 14109
5039610b
SL
14110#ifdef ENABLE_FOLD_CHECKING
14111 md5_init_ctx (&ctx);
14112 fold_checksum_tree (fn, &ctx, ht);
14113 md5_finish_ctx (&ctx, checksum_after_fn);
14114 htab_empty (ht);
14115
14116 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14117 fold_check_failed (fn, tem);
b8698a0f 14118
5039610b 14119 md5_init_ctx (&ctx);
94a0dd7b
SL
14120 for (i = 0; i < nargs; i++)
14121 fold_checksum_tree (argarray[i], &ctx, ht);
5039610b
SL
14122 md5_finish_ctx (&ctx, checksum_after_arglist);
14123 htab_delete (ht);
14124
14125 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
94a0dd7b 14126 fold_check_failed (NULL_TREE, tem);
5039610b
SL
14127#endif
14128 return tem;
14129}
14130
a98ebe2e 14131/* Perform constant folding and related simplification of initializer
00d1b1d6 14132 expression EXPR. These behave identically to "fold_buildN" but ignore
3e4093b6
RS
14133 potential run-time traps and exceptions that fold must preserve. */
14134
00d1b1d6
JM
14135#define START_FOLD_INIT \
14136 int saved_signaling_nans = flag_signaling_nans;\
14137 int saved_trapping_math = flag_trapping_math;\
14138 int saved_rounding_math = flag_rounding_math;\
14139 int saved_trapv = flag_trapv;\
63b48197 14140 int saved_folding_initializer = folding_initializer;\
00d1b1d6
JM
14141 flag_signaling_nans = 0;\
14142 flag_trapping_math = 0;\
14143 flag_rounding_math = 0;\
63b48197
MS
14144 flag_trapv = 0;\
14145 folding_initializer = 1;
00d1b1d6
JM
14146
14147#define END_FOLD_INIT \
14148 flag_signaling_nans = saved_signaling_nans;\
14149 flag_trapping_math = saved_trapping_math;\
14150 flag_rounding_math = saved_rounding_math;\
63b48197
MS
14151 flag_trapv = saved_trapv;\
14152 folding_initializer = saved_folding_initializer;
00d1b1d6
JM
14153
14154tree
db3927fb
AH
14155fold_build1_initializer_loc (location_t loc, enum tree_code code,
14156 tree type, tree op)
00d1b1d6
JM
14157{
14158 tree result;
14159 START_FOLD_INIT;
14160
db3927fb 14161 result = fold_build1_loc (loc, code, type, op);
00d1b1d6
JM
14162
14163 END_FOLD_INIT;
14164 return result;
14165}
14166
3e4093b6 14167tree
db3927fb
AH
14168fold_build2_initializer_loc (location_t loc, enum tree_code code,
14169 tree type, tree op0, tree op1)
3e4093b6 14170{
3e4093b6 14171 tree result;
00d1b1d6
JM
14172 START_FOLD_INIT;
14173
db3927fb 14174 result = fold_build2_loc (loc, code, type, op0, op1);
3e4093b6 14175
00d1b1d6
JM
14176 END_FOLD_INIT;
14177 return result;
14178}
3e4093b6 14179
00d1b1d6 14180tree
db3927fb
AH
14181fold_build3_initializer_loc (location_t loc, enum tree_code code,
14182 tree type, tree op0, tree op1, tree op2)
00d1b1d6
JM
14183{
14184 tree result;
14185 START_FOLD_INIT;
3e4093b6 14186
db3927fb 14187 result = fold_build3_loc (loc, code, type, op0, op1, op2);
3e4093b6 14188
00d1b1d6 14189 END_FOLD_INIT;
3e4093b6
RS
14190 return result;
14191}
14192
5039610b 14193tree
db3927fb
AH
14194fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14195 int nargs, tree *argarray)
5039610b
SL
14196{
14197 tree result;
14198 START_FOLD_INIT;
14199
db3927fb 14200 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
5039610b
SL
14201
14202 END_FOLD_INIT;
14203 return result;
14204}
14205
00d1b1d6
JM
14206#undef START_FOLD_INIT
14207#undef END_FOLD_INIT
14208
c5c76735
JL
14209/* Determine if first argument is a multiple of second argument. Return 0 if
14210 it is not, or we cannot easily determined it to be.
39dfb55a 14211
c5c76735
JL
14212 An example of the sort of thing we care about (at this point; this routine
14213 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14214 fold cases do now) is discovering that
39dfb55a
JL
14215
14216 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14217
14218 is a multiple of
14219
14220 SAVE_EXPR (J * 8)
14221
c5c76735 14222 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
39dfb55a
JL
14223
14224 This code also handles discovering that
14225
14226 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14227
c5c76735 14228 is a multiple of 8 so we don't have to worry about dealing with a
39dfb55a
JL
14229 possible remainder.
14230
c5c76735
JL
14231 Note that we *look* inside a SAVE_EXPR only to determine how it was
14232 calculated; it is not safe for fold to do much of anything else with the
14233 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14234 at run time. For example, the latter example above *cannot* be implemented
14235 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14236 evaluation time of the original SAVE_EXPR is not necessarily the same at
14237 the time the new expression is evaluated. The only optimization of this
39dfb55a
JL
14238 sort that would be valid is changing
14239
14240 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
39dfb55a 14241
c5c76735 14242 divided by 8 to
39dfb55a
JL
14243
14244 SAVE_EXPR (I) * SAVE_EXPR (J)
14245
14246 (where the same SAVE_EXPR (J) is used in the original and the
14247 transformed version). */
14248
d4e70294 14249int
ac545c64 14250multiple_of_p (tree type, const_tree top, const_tree bottom)
39dfb55a
JL
14251{
14252 if (operand_equal_p (top, bottom, 0))
14253 return 1;
14254
14255 if (TREE_CODE (type) != INTEGER_TYPE)
14256 return 0;
14257
14258 switch (TREE_CODE (top))
14259 {
29317008
RH
14260 case BIT_AND_EXPR:
14261 /* Bitwise and provides a power of two multiple. If the mask is
14262 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14263 if (!integer_pow2p (bottom))
14264 return 0;
14265 /* FALLTHRU */
14266
39dfb55a
JL
14267 case MULT_EXPR:
14268 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14269 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14270
14271 case PLUS_EXPR:
14272 case MINUS_EXPR:
14273 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14274 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14275
fba2c0cd
JJ
14276 case LSHIFT_EXPR:
14277 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14278 {
14279 tree op1, t1;
14280
14281 op1 = TREE_OPERAND (top, 1);
14282 /* const_binop may not detect overflow correctly,
14283 so check for it explicitly here. */
14284 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14285 > TREE_INT_CST_LOW (op1)
14286 && TREE_INT_CST_HIGH (op1) == 0
088414c1
RS
14287 && 0 != (t1 = fold_convert (type,
14288 const_binop (LSHIFT_EXPR,
14289 size_one_node,
43a5d30b 14290 op1)))
455f14dd 14291 && !TREE_OVERFLOW (t1))
fba2c0cd
JJ
14292 return multiple_of_p (type, t1, bottom);
14293 }
14294 return 0;
14295
39dfb55a 14296 case NOP_EXPR:
c5c76735 14297 /* Can't handle conversions from non-integral or wider integral type. */
39dfb55a
JL
14298 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14299 || (TYPE_PRECISION (type)
14300 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14301 return 0;
c5c76735 14302
30f7a378 14303 /* .. fall through ... */
c5c76735 14304
39dfb55a
JL
14305 case SAVE_EXPR:
14306 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14307
9e9ef331
EB
14308 case COND_EXPR:
14309 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14310 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14311
39dfb55a 14312 case INTEGER_CST:
fba2c0cd 14313 if (TREE_CODE (bottom) != INTEGER_CST
81737468 14314 || integer_zerop (bottom)
8df83eae 14315 || (TYPE_UNSIGNED (type)
fba2c0cd
JJ
14316 && (tree_int_cst_sgn (top) < 0
14317 || tree_int_cst_sgn (bottom) < 0)))
39dfb55a 14318 return 0;
b73a6056
RS
14319 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14320 top, bottom, 0));
39dfb55a
JL
14321
14322 default:
14323 return 0;
14324 }
14325}
a36556a8 14326
e918a58a
RAE
14327/* Return true if CODE or TYPE is known to be non-negative. */
14328
14329static bool
14330tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14331{
14332 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14333 && truth_value_p (code))
14334 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14335 have a signed:1 type (where the value is -1 and 0). */
14336 return true;
14337 return false;
14338}
14339
14340/* Return true if (CODE OP0) is known to be non-negative. If the return
6ac01510
ILT
14341 value is based on the assumption that signed overflow is undefined,
14342 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14343 *STRICT_OVERFLOW_P. */
a36556a8 14344
2d3cd5d5 14345bool
e918a58a
RAE
14346tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14347 bool *strict_overflow_p)
a36556a8 14348{
e918a58a 14349 if (TYPE_UNSIGNED (type))
682d0395 14350 return true;
b49ceb45 14351
e918a58a 14352 switch (code)
a36556a8 14353 {
88e3805d 14354 case ABS_EXPR:
1ade5842
JM
14355 /* We can't return 1 if flag_wrapv is set because
14356 ABS_EXPR<INT_MIN> = INT_MIN. */
e918a58a 14357 if (!INTEGRAL_TYPE_P (type))
eeef0e45 14358 return true;
e918a58a 14359 if (TYPE_OVERFLOW_UNDEFINED (type))
6ac01510
ILT
14360 {
14361 *strict_overflow_p = true;
14362 return true;
14363 }
1ade5842 14364 break;
7dba8395 14365
e918a58a
RAE
14366 case NON_LVALUE_EXPR:
14367 case FLOAT_EXPR:
14368 case FIX_TRUNC_EXPR:
14369 return tree_expr_nonnegative_warnv_p (op0,
14370 strict_overflow_p);
f7df23be 14371
e918a58a
RAE
14372 case NOP_EXPR:
14373 {
14374 tree inner_type = TREE_TYPE (op0);
14375 tree outer_type = type;
f7df23be 14376
e918a58a
RAE
14377 if (TREE_CODE (outer_type) == REAL_TYPE)
14378 {
14379 if (TREE_CODE (inner_type) == REAL_TYPE)
14380 return tree_expr_nonnegative_warnv_p (op0,
14381 strict_overflow_p);
14382 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14383 {
14384 if (TYPE_UNSIGNED (inner_type))
14385 return true;
14386 return tree_expr_nonnegative_warnv_p (op0,
14387 strict_overflow_p);
14388 }
14389 }
14390 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14391 {
14392 if (TREE_CODE (inner_type) == REAL_TYPE)
14393 return tree_expr_nonnegative_warnv_p (op0,
14394 strict_overflow_p);
14395 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14396 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14397 && TYPE_UNSIGNED (inner_type);
14398 }
14399 }
14400 break;
14401
14402 default:
14403 return tree_simple_nonnegative_warnv_p (code, type);
14404 }
14405
14406 /* We don't know sign of `t', so be conservative and return false. */
14407 return false;
14408}
325217ed 14409
e918a58a
RAE
14410/* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14411 value is based on the assumption that signed overflow is undefined,
14412 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14413 *STRICT_OVERFLOW_P. */
14414
2d3cd5d5 14415bool
e918a58a
RAE
14416tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14417 tree op1, bool *strict_overflow_p)
14418{
14419 if (TYPE_UNSIGNED (type))
14420 return true;
14421
14422 switch (code)
14423 {
5be014d5 14424 case POINTER_PLUS_EXPR:
f7df23be 14425 case PLUS_EXPR:
e918a58a
RAE
14426 if (FLOAT_TYPE_P (type))
14427 return (tree_expr_nonnegative_warnv_p (op0,
6ac01510 14428 strict_overflow_p)
e918a58a 14429 && tree_expr_nonnegative_warnv_p (op1,
6ac01510 14430 strict_overflow_p));
96f26e41 14431
e15bb5c6 14432 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
e2cca9be 14433 both unsigned and at least 2 bits shorter than the result. */
e918a58a
RAE
14434 if (TREE_CODE (type) == INTEGER_TYPE
14435 && TREE_CODE (op0) == NOP_EXPR
14436 && TREE_CODE (op1) == NOP_EXPR)
96f26e41 14437 {
e918a58a
RAE
14438 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14439 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
8df83eae
RK
14440 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14441 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
96f26e41
RS
14442 {
14443 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14444 TYPE_PRECISION (inner2)) + 1;
e918a58a 14445 return prec < TYPE_PRECISION (type);
96f26e41
RS
14446 }
14447 }
14448 break;
f7df23be
RS
14449
14450 case MULT_EXPR:
e918a58a 14451 if (FLOAT_TYPE_P (type))
f7df23be
RS
14452 {
14453 /* x * x for floating point x is always non-negative. */
e918a58a 14454 if (operand_equal_p (op0, op1, 0))
682d0395 14455 return true;
e918a58a 14456 return (tree_expr_nonnegative_warnv_p (op0,
6ac01510 14457 strict_overflow_p)
e918a58a 14458 && tree_expr_nonnegative_warnv_p (op1,
6ac01510 14459 strict_overflow_p));
f7df23be 14460 }
96f26e41 14461
e15bb5c6 14462 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
96f26e41 14463 both unsigned and their total bits is shorter than the result. */
e918a58a 14464 if (TREE_CODE (type) == INTEGER_TYPE
cdd6a337
MLI
14465 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14466 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
96f26e41 14467 {
b8698a0f 14468 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
cdd6a337
MLI
14469 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14470 : TREE_TYPE (op0);
b8698a0f 14471 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
cdd6a337
MLI
14472 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14473 : TREE_TYPE (op1);
14474
14475 bool unsigned0 = TYPE_UNSIGNED (inner0);
14476 bool unsigned1 = TYPE_UNSIGNED (inner1);
14477
14478 if (TREE_CODE (op0) == INTEGER_CST)
14479 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14480
14481 if (TREE_CODE (op1) == INTEGER_CST)
14482 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14483
14484 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14485 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14486 {
14487 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14488 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14489 : TYPE_PRECISION (inner0);
14490
14491 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14492 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14493 : TYPE_PRECISION (inner1);
14494
14495 return precision0 + precision1 < TYPE_PRECISION (type);
14496 }
96f26e41 14497 }
682d0395 14498 return false;
f7df23be 14499
196f5a8d
VR
14500 case BIT_AND_EXPR:
14501 case MAX_EXPR:
e918a58a 14502 return (tree_expr_nonnegative_warnv_p (op0,
6ac01510 14503 strict_overflow_p)
e918a58a 14504 || tree_expr_nonnegative_warnv_p (op1,
6ac01510 14505 strict_overflow_p));
196f5a8d
VR
14506
14507 case BIT_IOR_EXPR:
14508 case BIT_XOR_EXPR:
14509 case MIN_EXPR:
14510 case RDIV_EXPR:
ada11335
KG
14511 case TRUNC_DIV_EXPR:
14512 case CEIL_DIV_EXPR:
14513 case FLOOR_DIV_EXPR:
14514 case ROUND_DIV_EXPR:
e918a58a 14515 return (tree_expr_nonnegative_warnv_p (op0,
6ac01510 14516 strict_overflow_p)
e918a58a 14517 && tree_expr_nonnegative_warnv_p (op1,
6ac01510 14518 strict_overflow_p));
96f26e41 14519
ada11335
KG
14520 case TRUNC_MOD_EXPR:
14521 case CEIL_MOD_EXPR:
14522 case FLOOR_MOD_EXPR:
14523 case ROUND_MOD_EXPR:
e918a58a 14524 return tree_expr_nonnegative_warnv_p (op0,
6ac01510 14525 strict_overflow_p);
e918a58a
RAE
14526 default:
14527 return tree_simple_nonnegative_warnv_p (code, type);
14528 }
96f26e41 14529
e918a58a
RAE
14530 /* We don't know sign of `t', so be conservative and return false. */
14531 return false;
14532}
96f26e41 14533
e918a58a
RAE
14534/* Return true if T is known to be non-negative. If the return
14535 value is based on the assumption that signed overflow is undefined,
14536 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14537 *STRICT_OVERFLOW_P. */
14538
2d3cd5d5 14539bool
e918a58a
RAE
14540tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14541{
14542 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14543 return true;
14544
07c40d0b 14545 switch (TREE_CODE (t))
e918a58a 14546 {
e918a58a
RAE
14547 case INTEGER_CST:
14548 return tree_int_cst_sgn (t) >= 0;
14549
14550 case REAL_CST:
14551 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14552
14553 case FIXED_CST:
14554 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
196f5a8d
VR
14555
14556 case COND_EXPR:
6ac01510
ILT
14557 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14558 strict_overflow_p)
14559 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14560 strict_overflow_p));
e918a58a
RAE
14561 default:
14562 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14563 TREE_TYPE (t));
14564 }
14565 /* We don't know sign of `t', so be conservative and return false. */
14566 return false;
14567}
b1500d00 14568
a1a6e271
RAE
14569/* Return true if T is known to be non-negative. If the return
14570 value is based on the assumption that signed overflow is undefined,
14571 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14572 *STRICT_OVERFLOW_P. */
14573
14574bool
726a989a 14575tree_call_nonnegative_warnv_p (tree type, tree fndecl,
a1a6e271
RAE
14576 tree arg0, tree arg1, bool *strict_overflow_p)
14577{
14578 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14579 switch (DECL_FUNCTION_CODE (fndecl))
14580 {
14581 CASE_FLT_FN (BUILT_IN_ACOS):
14582 CASE_FLT_FN (BUILT_IN_ACOSH):
14583 CASE_FLT_FN (BUILT_IN_CABS):
14584 CASE_FLT_FN (BUILT_IN_COSH):
14585 CASE_FLT_FN (BUILT_IN_ERFC):
14586 CASE_FLT_FN (BUILT_IN_EXP):
14587 CASE_FLT_FN (BUILT_IN_EXP10):
14588 CASE_FLT_FN (BUILT_IN_EXP2):
14589 CASE_FLT_FN (BUILT_IN_FABS):
14590 CASE_FLT_FN (BUILT_IN_FDIM):
14591 CASE_FLT_FN (BUILT_IN_HYPOT):
14592 CASE_FLT_FN (BUILT_IN_POW10):
14593 CASE_INT_FN (BUILT_IN_FFS):
14594 CASE_INT_FN (BUILT_IN_PARITY):
14595 CASE_INT_FN (BUILT_IN_POPCOUNT):
14596 case BUILT_IN_BSWAP32:
14597 case BUILT_IN_BSWAP64:
14598 /* Always true. */
14599 return true;
14600
14601 CASE_FLT_FN (BUILT_IN_SQRT):
14602 /* sqrt(-0.0) is -0.0. */
14603 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14604 return true;
14605 return tree_expr_nonnegative_warnv_p (arg0,
14606 strict_overflow_p);
14607
14608 CASE_FLT_FN (BUILT_IN_ASINH):
14609 CASE_FLT_FN (BUILT_IN_ATAN):
14610 CASE_FLT_FN (BUILT_IN_ATANH):
14611 CASE_FLT_FN (BUILT_IN_CBRT):
14612 CASE_FLT_FN (BUILT_IN_CEIL):
14613 CASE_FLT_FN (BUILT_IN_ERF):
14614 CASE_FLT_FN (BUILT_IN_EXPM1):
14615 CASE_FLT_FN (BUILT_IN_FLOOR):
14616 CASE_FLT_FN (BUILT_IN_FMOD):
14617 CASE_FLT_FN (BUILT_IN_FREXP):
14618 CASE_FLT_FN (BUILT_IN_LCEIL):
14619 CASE_FLT_FN (BUILT_IN_LDEXP):
14620 CASE_FLT_FN (BUILT_IN_LFLOOR):
14621 CASE_FLT_FN (BUILT_IN_LLCEIL):
14622 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14623 CASE_FLT_FN (BUILT_IN_LLRINT):
14624 CASE_FLT_FN (BUILT_IN_LLROUND):
14625 CASE_FLT_FN (BUILT_IN_LRINT):
14626 CASE_FLT_FN (BUILT_IN_LROUND):
14627 CASE_FLT_FN (BUILT_IN_MODF):
14628 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14629 CASE_FLT_FN (BUILT_IN_RINT):
14630 CASE_FLT_FN (BUILT_IN_ROUND):
14631 CASE_FLT_FN (BUILT_IN_SCALB):
14632 CASE_FLT_FN (BUILT_IN_SCALBLN):
14633 CASE_FLT_FN (BUILT_IN_SCALBN):
14634 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14635 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14636 CASE_FLT_FN (BUILT_IN_SINH):
14637 CASE_FLT_FN (BUILT_IN_TANH):
14638 CASE_FLT_FN (BUILT_IN_TRUNC):
14639 /* True if the 1st argument is nonnegative. */
14640 return tree_expr_nonnegative_warnv_p (arg0,
14641 strict_overflow_p);
14642
14643 CASE_FLT_FN (BUILT_IN_FMAX):
14644 /* True if the 1st OR 2nd arguments are nonnegative. */
14645 return (tree_expr_nonnegative_warnv_p (arg0,
14646 strict_overflow_p)
14647 || (tree_expr_nonnegative_warnv_p (arg1,
14648 strict_overflow_p)));
14649
14650 CASE_FLT_FN (BUILT_IN_FMIN):
14651 /* True if the 1st AND 2nd arguments are nonnegative. */
14652 return (tree_expr_nonnegative_warnv_p (arg0,
14653 strict_overflow_p)
14654 && (tree_expr_nonnegative_warnv_p (arg1,
14655 strict_overflow_p)));
14656
14657 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14658 /* True if the 2nd argument is nonnegative. */
14659 return tree_expr_nonnegative_warnv_p (arg1,
14660 strict_overflow_p);
14661
14662 CASE_FLT_FN (BUILT_IN_POWI):
14663 /* True if the 1st argument is nonnegative or the second
14664 argument is an even integer. */
d0599470
RAE
14665 if (TREE_CODE (arg1) == INTEGER_CST
14666 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14667 return true;
a1a6e271
RAE
14668 return tree_expr_nonnegative_warnv_p (arg0,
14669 strict_overflow_p);
14670
14671 CASE_FLT_FN (BUILT_IN_POW):
14672 /* True if the 1st argument is nonnegative or the second
14673 argument is an even integer valued real. */
14674 if (TREE_CODE (arg1) == REAL_CST)
14675 {
14676 REAL_VALUE_TYPE c;
14677 HOST_WIDE_INT n;
14678
14679 c = TREE_REAL_CST (arg1);
14680 n = real_to_integer (&c);
14681 if ((n & 1) == 0)
14682 {
14683 REAL_VALUE_TYPE cint;
14684 real_from_integer (&cint, VOIDmode, n,
14685 n < 0 ? -1 : 0, 0);
14686 if (real_identical (&c, &cint))
14687 return true;
14688 }
14689 }
14690 return tree_expr_nonnegative_warnv_p (arg0,
14691 strict_overflow_p);
14692
14693 default:
14694 break;
14695 }
726a989a 14696 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
a1a6e271
RAE
14697 type);
14698}
14699
e918a58a
RAE
14700/* Return true if T is known to be non-negative. If the return
14701 value is based on the assumption that signed overflow is undefined,
14702 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14703 *STRICT_OVERFLOW_P. */
96f26e41 14704
2d3cd5d5 14705bool
e918a58a
RAE
14706tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14707{
07c40d0b 14708 enum tree_code code = TREE_CODE (t);
e918a58a
RAE
14709 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14710 return true;
96f26e41 14711
e918a58a
RAE
14712 switch (code)
14713 {
3a5b9284
RH
14714 case TARGET_EXPR:
14715 {
14716 tree temp = TARGET_EXPR_SLOT (t);
14717 t = TARGET_EXPR_INITIAL (t);
14718
14719 /* If the initializer is non-void, then it's a normal expression
14720 that will be assigned to the slot. */
14721 if (!VOID_TYPE_P (t))
6ac01510 14722 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
3a5b9284
RH
14723
14724 /* Otherwise, the initializer sets the slot in some way. One common
14725 way is an assignment statement at the end of the initializer. */
14726 while (1)
14727 {
14728 if (TREE_CODE (t) == BIND_EXPR)
14729 t = expr_last (BIND_EXPR_BODY (t));
14730 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14731 || TREE_CODE (t) == TRY_CATCH_EXPR)
14732 t = expr_last (TREE_OPERAND (t, 0));
14733 else if (TREE_CODE (t) == STATEMENT_LIST)
14734 t = expr_last (t);
14735 else
14736 break;
14737 }
726a989a
RB
14738 if (TREE_CODE (t) == MODIFY_EXPR
14739 && TREE_OPERAND (t, 0) == temp)
14740 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
6ac01510 14741 strict_overflow_p);
3a5b9284 14742
682d0395 14743 return false;
3a5b9284
RH
14744 }
14745
07bae5ad 14746 case CALL_EXPR:
2f503025 14747 {
a1a6e271
RAE
14748 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14749 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14750
726a989a 14751 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
a1a6e271
RAE
14752 get_callee_fndecl (t),
14753 arg0,
14754 arg1,
14755 strict_overflow_p);
2f503025 14756 }
e918a58a
RAE
14757 case COMPOUND_EXPR:
14758 case MODIFY_EXPR:
726a989a 14759 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
e918a58a
RAE
14760 strict_overflow_p);
14761 case BIND_EXPR:
14762 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14763 strict_overflow_p);
14764 case SAVE_EXPR:
14765 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14766 strict_overflow_p);
07bae5ad 14767
a36556a8 14768 default:
e918a58a
RAE
14769 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14770 TREE_TYPE (t));
a36556a8 14771 }
96f26e41
RS
14772
14773 /* We don't know sign of `t', so be conservative and return false. */
682d0395 14774 return false;
a36556a8
ZW
14775}
14776
e918a58a
RAE
14777/* Return true if T is known to be non-negative. If the return
14778 value is based on the assumption that signed overflow is undefined,
14779 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14780 *STRICT_OVERFLOW_P. */
14781
14782bool
14783tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14784{
14785 enum tree_code code;
14786 if (t == error_mark_node)
14787 return false;
14788
14789 code = TREE_CODE (t);
14790 switch (TREE_CODE_CLASS (code))
14791 {
14792 case tcc_binary:
14793 case tcc_comparison:
14794 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14795 TREE_TYPE (t),
14796 TREE_OPERAND (t, 0),
14797 TREE_OPERAND (t, 1),
14798 strict_overflow_p);
14799
14800 case tcc_unary:
14801 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14802 TREE_TYPE (t),
14803 TREE_OPERAND (t, 0),
14804 strict_overflow_p);
14805
14806 case tcc_constant:
14807 case tcc_declaration:
14808 case tcc_reference:
14809 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14810
14811 default:
14812 break;
14813 }
14814
14815 switch (code)
14816 {
14817 case TRUTH_AND_EXPR:
14818 case TRUTH_OR_EXPR:
14819 case TRUTH_XOR_EXPR:
14820 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14821 TREE_TYPE (t),
14822 TREE_OPERAND (t, 0),
14823 TREE_OPERAND (t, 1),
14824 strict_overflow_p);
14825 case TRUTH_NOT_EXPR:
14826 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14827 TREE_TYPE (t),
14828 TREE_OPERAND (t, 0),
14829 strict_overflow_p);
14830
14831 case COND_EXPR:
14832 case CONSTRUCTOR:
14833 case OBJ_TYPE_REF:
14834 case ASSERT_EXPR:
14835 case ADDR_EXPR:
14836 case WITH_SIZE_EXPR:
e918a58a 14837 case SSA_NAME:
e918a58a
RAE
14838 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14839
14840 default:
14841 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14842 }
14843}
14844
6ac01510
ILT
14845/* Return true if `t' is known to be non-negative. Handle warnings
14846 about undefined signed overflow. */
14847
14848bool
14849tree_expr_nonnegative_p (tree t)
14850{
14851 bool ret, strict_overflow_p;
14852
14853 strict_overflow_p = false;
14854 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14855 if (strict_overflow_p)
14856 fold_overflow_warning (("assuming signed overflow does not occur when "
14857 "determining that expression is always "
14858 "non-negative"),
14859 WARN_STRICT_OVERFLOW_MISC);
14860 return ret;
14861}
14862
74dd418c
RAE
14863
14864/* Return true when (CODE OP0) is an address and is known to be nonzero.
8e7b3a43 14865 For floating point we further ensure that T is not denormal.
6ac01510
ILT
14866 Similar logic is present in nonzero_address in rtlanal.h.
14867
14868 If the return value is based on the assumption that signed overflow
14869 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14870 change *STRICT_OVERFLOW_P. */
8e7b3a43 14871
2d3cd5d5 14872bool
74dd418c
RAE
14873tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14874 bool *strict_overflow_p)
8e7b3a43 14875{
74dd418c
RAE
14876 switch (code)
14877 {
14878 case ABS_EXPR:
14879 return tree_expr_nonzero_warnv_p (op0,
14880 strict_overflow_p);
8e7b3a43 14881
74dd418c
RAE
14882 case NOP_EXPR:
14883 {
14884 tree inner_type = TREE_TYPE (op0);
14885 tree outer_type = type;
8e7b3a43 14886
74dd418c
RAE
14887 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14888 && tree_expr_nonzero_warnv_p (op0,
14889 strict_overflow_p));
14890 }
14891 break;
b16caf72 14892
74dd418c
RAE
14893 case NON_LVALUE_EXPR:
14894 return tree_expr_nonzero_warnv_p (op0,
6ac01510 14895 strict_overflow_p);
8e7b3a43 14896
74dd418c
RAE
14897 default:
14898 break;
14899 }
14900
14901 return false;
14902}
14903
14904/* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14905 For floating point we further ensure that T is not denormal.
14906 Similar logic is present in nonzero_address in rtlanal.h.
14907
14908 If the return value is based on the assumption that signed overflow
14909 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14910 change *STRICT_OVERFLOW_P. */
8e7b3a43 14911
2d3cd5d5 14912bool
74dd418c
RAE
14913tree_binary_nonzero_warnv_p (enum tree_code code,
14914 tree type,
14915 tree op0,
14916 tree op1, bool *strict_overflow_p)
14917{
14918 bool sub_strict_overflow_p;
14919 switch (code)
14920 {
5be014d5 14921 case POINTER_PLUS_EXPR:
8e7b3a43 14922 case PLUS_EXPR:
eeef0e45 14923 if (TYPE_OVERFLOW_UNDEFINED (type))
8e7b3a43
KH
14924 {
14925 /* With the presence of negative values it is hard
14926 to say something. */
6ac01510 14927 sub_strict_overflow_p = false;
74dd418c 14928 if (!tree_expr_nonnegative_warnv_p (op0,
6ac01510 14929 &sub_strict_overflow_p)
74dd418c 14930 || !tree_expr_nonnegative_warnv_p (op1,
6ac01510 14931 &sub_strict_overflow_p))
8e7b3a43
KH
14932 return false;
14933 /* One of operands must be positive and the other non-negative. */
6ac01510
ILT
14934 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14935 overflows, on a twos-complement machine the sum of two
14936 nonnegative numbers can never be zero. */
74dd418c 14937 return (tree_expr_nonzero_warnv_p (op0,
6ac01510 14938 strict_overflow_p)
74dd418c 14939 || tree_expr_nonzero_warnv_p (op1,
6ac01510 14940 strict_overflow_p));
8e7b3a43
KH
14941 }
14942 break;
14943
14944 case MULT_EXPR:
eeef0e45 14945 if (TYPE_OVERFLOW_UNDEFINED (type))
8e7b3a43 14946 {
74dd418c 14947 if (tree_expr_nonzero_warnv_p (op0,
6ac01510 14948 strict_overflow_p)
74dd418c 14949 && tree_expr_nonzero_warnv_p (op1,
6ac01510
ILT
14950 strict_overflow_p))
14951 {
14952 *strict_overflow_p = true;
14953 return true;
14954 }
8e7b3a43
KH
14955 }
14956 break;
14957
74dd418c
RAE
14958 case MIN_EXPR:
14959 sub_strict_overflow_p = false;
14960 if (tree_expr_nonzero_warnv_p (op0,
14961 &sub_strict_overflow_p)
14962 && tree_expr_nonzero_warnv_p (op1,
14963 &sub_strict_overflow_p))
14964 {
14965 if (sub_strict_overflow_p)
14966 *strict_overflow_p = true;
14967 }
14968 break;
8e7b3a43 14969
74dd418c
RAE
14970 case MAX_EXPR:
14971 sub_strict_overflow_p = false;
14972 if (tree_expr_nonzero_warnv_p (op0,
14973 &sub_strict_overflow_p))
14974 {
14975 if (sub_strict_overflow_p)
14976 *strict_overflow_p = true;
14977
14978 /* When both operands are nonzero, then MAX must be too. */
14979 if (tree_expr_nonzero_warnv_p (op1,
14980 strict_overflow_p))
14981 return true;
14982
14983 /* MAX where operand 0 is positive is positive. */
14984 return tree_expr_nonnegative_warnv_p (op0,
14985 strict_overflow_p);
14986 }
14987 /* MAX where operand 1 is positive is positive. */
14988 else if (tree_expr_nonzero_warnv_p (op1,
14989 &sub_strict_overflow_p)
14990 && tree_expr_nonnegative_warnv_p (op1,
14991 &sub_strict_overflow_p))
14992 {
14993 if (sub_strict_overflow_p)
14994 *strict_overflow_p = true;
14995 return true;
14996 }
14997 break;
14998
14999 case BIT_IOR_EXPR:
15000 return (tree_expr_nonzero_warnv_p (op1,
15001 strict_overflow_p)
15002 || tree_expr_nonzero_warnv_p (op0,
15003 strict_overflow_p));
15004
15005 default:
8e7b3a43 15006 break;
74dd418c 15007 }
8e7b3a43 15008
74dd418c
RAE
15009 return false;
15010}
15011
15012/* Return true when T is an address and is known to be nonzero.
15013 For floating point we further ensure that T is not denormal.
15014 Similar logic is present in nonzero_address in rtlanal.h.
15015
15016 If the return value is based on the assumption that signed overflow
15017 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15018 change *STRICT_OVERFLOW_P. */
15019
2d3cd5d5 15020bool
74dd418c
RAE
15021tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15022{
15023 bool sub_strict_overflow_p;
15024 switch (TREE_CODE (t))
15025 {
74dd418c
RAE
15026 case INTEGER_CST:
15027 return !integer_zerop (t);
15028
15029 case ADDR_EXPR:
88f19756 15030 {
3d7a712a
RG
15031 tree base = TREE_OPERAND (t, 0);
15032 if (!DECL_P (base))
15033 base = get_base_address (base);
88f19756
RH
15034
15035 if (!base)
15036 return false;
15037
4d35e75c
PB
15038 /* Weak declarations may link to NULL. Other things may also be NULL
15039 so protect with -fdelete-null-pointer-checks; but not variables
15040 allocated on the stack. */
15041 if (DECL_P (base)
15042 && (flag_delete_null_pointer_checks
3d7a712a
RG
15043 || (DECL_CONTEXT (base)
15044 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15045 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
b45f0e58 15046 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
88f19756
RH
15047
15048 /* Constants are never weak. */
6615c446 15049 if (CONSTANT_CLASS_P (base))
88f19756
RH
15050 return true;
15051
15052 return false;
15053 }
8e7b3a43
KH
15054
15055 case COND_EXPR:
6ac01510
ILT
15056 sub_strict_overflow_p = false;
15057 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15058 &sub_strict_overflow_p)
15059 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15060 &sub_strict_overflow_p))
15061 {
15062 if (sub_strict_overflow_p)
15063 *strict_overflow_p = true;
15064 return true;
15065 }
15066 break;
8e7b3a43 15067
74dd418c 15068 default:
6ac01510 15069 break;
74dd418c
RAE
15070 }
15071 return false;
15072}
8e7b3a43 15073
74dd418c
RAE
15074/* Return true when T is an address and is known to be nonzero.
15075 For floating point we further ensure that T is not denormal.
15076 Similar logic is present in nonzero_address in rtlanal.h.
6ac01510 15077
74dd418c
RAE
15078 If the return value is based on the assumption that signed overflow
15079 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15080 change *STRICT_OVERFLOW_P. */
8e7b3a43 15081
74dd418c
RAE
15082bool
15083tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15084{
15085 tree type = TREE_TYPE (t);
15086 enum tree_code code;
15087
15088 /* Doing something useful for floating point would need more work. */
15089 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15090 return false;
15091
15092 code = TREE_CODE (t);
15093 switch (TREE_CODE_CLASS (code))
15094 {
15095 case tcc_unary:
15096 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15097 strict_overflow_p);
15098 case tcc_binary:
15099 case tcc_comparison:
15100 return tree_binary_nonzero_warnv_p (code, type,
15101 TREE_OPERAND (t, 0),
15102 TREE_OPERAND (t, 1),
6ac01510 15103 strict_overflow_p);
74dd418c
RAE
15104 case tcc_constant:
15105 case tcc_declaration:
15106 case tcc_reference:
15107 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15108
15109 default:
8e7b3a43 15110 break;
74dd418c
RAE
15111 }
15112
15113 switch (code)
15114 {
15115 case TRUTH_NOT_EXPR:
15116 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15117 strict_overflow_p);
15118
15119 case TRUTH_AND_EXPR:
15120 case TRUTH_OR_EXPR:
15121 case TRUTH_XOR_EXPR:
15122 return tree_binary_nonzero_warnv_p (code, type,
15123 TREE_OPERAND (t, 0),
15124 TREE_OPERAND (t, 1),
15125 strict_overflow_p);
15126
15127 case COND_EXPR:
15128 case CONSTRUCTOR:
15129 case OBJ_TYPE_REF:
15130 case ASSERT_EXPR:
15131 case ADDR_EXPR:
15132 case WITH_SIZE_EXPR:
74dd418c 15133 case SSA_NAME:
74dd418c 15134 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8e7b3a43
KH
15135
15136 case COMPOUND_EXPR:
15137 case MODIFY_EXPR:
15138 case BIND_EXPR:
726a989a 15139 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
6ac01510 15140 strict_overflow_p);
8e7b3a43
KH
15141
15142 case SAVE_EXPR:
6ac01510
ILT
15143 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15144 strict_overflow_p);
8e7b3a43 15145
4db8040c
JM
15146 case CALL_EXPR:
15147 return alloca_call_p (t);
15148
8e7b3a43
KH
15149 default:
15150 break;
15151 }
15152 return false;
15153}
15154
6ac01510
ILT
15155/* Return true when T is an address and is known to be nonzero.
15156 Handle warnings about undefined signed overflow. */
15157
15158bool
15159tree_expr_nonzero_p (tree t)
15160{
15161 bool ret, strict_overflow_p;
15162
15163 strict_overflow_p = false;
15164 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15165 if (strict_overflow_p)
15166 fold_overflow_warning (("assuming signed overflow does not occur when "
15167 "determining that expression is always "
15168 "non-zero"),
15169 WARN_STRICT_OVERFLOW_MISC);
15170 return ret;
15171}
15172
6de9cd9a
DN
15173/* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15174 attempt to fold the expression to a constant without modifying TYPE,
15175 OP0 or OP1.
15176
15177 If the expression could be simplified to a constant, then return
15178 the constant. If the expression would not be simplified to a
41704a38 15179 constant, then return NULL_TREE. */
6de9cd9a
DN
15180
15181tree
b52d5eaa 15182fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
6de9cd9a 15183{
054632e8
RS
15184 tree tem = fold_binary (code, type, op0, op1);
15185 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
6de9cd9a
DN
15186}
15187
15188/* Given the components of a unary expression CODE, TYPE and OP0,
15189 attempt to fold the expression to a constant without modifying
d1822754 15190 TYPE or OP0.
6de9cd9a
DN
15191
15192 If the expression could be simplified to a constant, then return
15193 the constant. If the expression would not be simplified to a
41704a38 15194 constant, then return NULL_TREE. */
6de9cd9a
DN
15195
15196tree
b52d5eaa 15197fold_unary_to_constant (enum tree_code code, tree type, tree op0)
6de9cd9a 15198{
054632e8
RS
15199 tree tem = fold_unary (code, type, op0);
15200 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
6de9cd9a
DN
15201}
15202
15203/* If EXP represents referencing an element in a constant string
15204 (either via pointer arithmetic or array indexing), return the
15205 tree representing the value accessed, otherwise return NULL. */
15206
15207tree
15208fold_read_from_constant_string (tree exp)
15209{
8e3dc7a3
RG
15210 if ((TREE_CODE (exp) == INDIRECT_REF
15211 || TREE_CODE (exp) == ARRAY_REF)
15212 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
6de9cd9a
DN
15213 {
15214 tree exp1 = TREE_OPERAND (exp, 0);
15215 tree index;
15216 tree string;
db3927fb 15217 location_t loc = EXPR_LOCATION (exp);
6de9cd9a
DN
15218
15219 if (TREE_CODE (exp) == INDIRECT_REF)
44de5aeb 15220 string = string_constant (exp1, &index);
6de9cd9a
DN
15221 else
15222 {
44de5aeb 15223 tree low_bound = array_ref_low_bound (exp);
db3927fb 15224 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
d1822754 15225
6de9cd9a
DN
15226 /* Optimize the special-case of a zero lower bound.
15227
15228 We convert the low_bound to sizetype to avoid some problems
15229 with constant folding. (E.g. suppose the lower bound is 1,
15230 and its mode is QI. Without the conversion,l (ARRAY
15231 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fa10beec 15232 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6de9cd9a 15233 if (! integer_zerop (low_bound))
db3927fb
AH
15234 index = size_diffop_loc (loc, index,
15235 fold_convert_loc (loc, sizetype, low_bound));
6de9cd9a
DN
15236
15237 string = exp1;
15238 }
15239
15240 if (string
f9c3744b 15241 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
6de9cd9a
DN
15242 && TREE_CODE (string) == STRING_CST
15243 && TREE_CODE (index) == INTEGER_CST
15244 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15245 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15246 == MODE_INT)
15247 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
0c4d4efb
DJ
15248 return build_int_cst_type (TREE_TYPE (exp),
15249 (TREE_STRING_POINTER (string)
15250 [TREE_INT_CST_LOW (index)]));
6de9cd9a
DN
15251 }
15252 return NULL;
15253}
15254
33d13fac 15255/* Return the tree for neg (ARG0) when ARG0 is known to be either
325217ed 15256 an integer constant, real, or fixed-point constant.
33d13fac
KH
15257
15258 TYPE is the type of the result. */
15259
15260static tree
15261fold_negate_const (tree arg0, tree type)
15262{
15263 tree t = NULL_TREE;
15264
0bccc606 15265 switch (TREE_CODE (arg0))
33d13fac 15266 {
0bccc606
NS
15267 case INTEGER_CST:
15268 {
9589f23e
AS
15269 double_int val = tree_to_double_int (arg0);
15270 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15271
15272 t = force_fit_type_double (type, val, 1,
b8fca551 15273 (overflow | TREE_OVERFLOW (arg0))
d95787e6 15274 && !TYPE_UNSIGNED (type));
0bccc606
NS
15275 break;
15276 }
3e6688a7 15277
0bccc606 15278 case REAL_CST:
d49b6e1e 15279 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
0bccc606 15280 break;
d1822754 15281
325217ed
CF
15282 case FIXED_CST:
15283 {
15284 FIXED_VALUE_TYPE f;
15285 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15286 &(TREE_FIXED_CST (arg0)), NULL,
15287 TYPE_SATURATING (type));
15288 t = build_fixed (type, f);
15289 /* Propagate overflow flags. */
15290 if (overflow_p | TREE_OVERFLOW (arg0))
28ddeea1 15291 TREE_OVERFLOW (t) = 1;
325217ed
CF
15292 break;
15293 }
15294
0bccc606
NS
15295 default:
15296 gcc_unreachable ();
15297 }
3e6688a7 15298
33d13fac
KH
15299 return t;
15300}
15301
73c4ab99
KH
15302/* Return the tree for abs (ARG0) when ARG0 is known to be either
15303 an integer constant or real constant.
15304
15305 TYPE is the type of the result. */
15306
9655d83b 15307tree
73c4ab99
KH
15308fold_abs_const (tree arg0, tree type)
15309{
15310 tree t = NULL_TREE;
15311
0bccc606 15312 switch (TREE_CODE (arg0))
73c4ab99 15313 {
0bccc606 15314 case INTEGER_CST:
9589f23e
AS
15315 {
15316 double_int val = tree_to_double_int (arg0);
15317
15318 /* If the value is unsigned or non-negative, then the absolute value
15319 is the same as the ordinary value. */
15320 if (TYPE_UNSIGNED (type)
15321 || !double_int_negative_p (val))
15322 t = arg0;
15323
15324 /* If the value is negative, then the absolute value is
15325 its negation. */
15326 else
15327 {
15328 int overflow;
15329
15330 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15331 t = force_fit_type_double (type, val, -1,
15332 overflow | TREE_OVERFLOW (arg0));
15333 }
15334 }
0bccc606 15335 break;
3e6688a7 15336
0bccc606 15337 case REAL_CST:
73c4ab99 15338 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
d49b6e1e 15339 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
73c4ab99 15340 else
0bccc606
NS
15341 t = arg0;
15342 break;
3e6688a7 15343
0bccc606
NS
15344 default:
15345 gcc_unreachable ();
73c4ab99 15346 }
3e6688a7 15347
73c4ab99
KH
15348 return t;
15349}
15350
a653e758
RS
15351/* Return the tree for not (ARG0) when ARG0 is known to be an integer
15352 constant. TYPE is the type of the result. */
15353
15354static tree
9589f23e 15355fold_not_const (const_tree arg0, tree type)
a653e758 15356{
9589f23e 15357 double_int val;
a653e758 15358
0bccc606 15359 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
3e6688a7 15360
9589f23e
AS
15361 val = double_int_not (tree_to_double_int (arg0));
15362 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
a653e758
RS
15363}
15364
8e7b3a43
KH
15365/* Given CODE, a relational operator, the target type, TYPE and two
15366 constant operands OP0 and OP1, return the result of the
15367 relational operation. If the result is not a compile time
15368 constant, then return NULL_TREE. */
15369
15370static tree
15371fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15372{
1382f0f0 15373 int result, invert;
8e7b3a43
KH
15374
15375 /* From here on, the only cases we handle are when the result is
ee8db92b
RS
15376 known to be a constant. */
15377
15378 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15379 {
adb8e07e
RS
15380 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15381 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15382
ee8db92b 15383 /* Handle the cases where either operand is a NaN. */
adb8e07e 15384 if (real_isnan (c0) || real_isnan (c1))
ee8db92b
RS
15385 {
15386 switch (code)
15387 {
15388 case EQ_EXPR:
15389 case ORDERED_EXPR:
15390 result = 0;
15391 break;
15392
15393 case NE_EXPR:
15394 case UNORDERED_EXPR:
15395 case UNLT_EXPR:
15396 case UNLE_EXPR:
15397 case UNGT_EXPR:
15398 case UNGE_EXPR:
15399 case UNEQ_EXPR:
15400 result = 1;
15401 break;
15402
15403 case LT_EXPR:
15404 case LE_EXPR:
15405 case GT_EXPR:
15406 case GE_EXPR:
15407 case LTGT_EXPR:
15408 if (flag_trapping_math)
15409 return NULL_TREE;
15410 result = 0;
15411 break;
15412
15413 default:
0bccc606 15414 gcc_unreachable ();
ee8db92b
RS
15415 }
15416
15417 return constant_boolean_node (result, type);
15418 }
15419
adb8e07e 15420 return constant_boolean_node (real_compare (code, c0, c1), type);
ee8db92b
RS
15421 }
15422
325217ed
CF
15423 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15424 {
15425 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15426 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15427 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15428 }
15429
23b9463b
RS
15430 /* Handle equality/inequality of complex constants. */
15431 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15432 {
15433 tree rcond = fold_relational_const (code, type,
15434 TREE_REALPART (op0),
15435 TREE_REALPART (op1));
15436 tree icond = fold_relational_const (code, type,
15437 TREE_IMAGPART (op0),
15438 TREE_IMAGPART (op1));
15439 if (code == EQ_EXPR)
15440 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15441 else if (code == NE_EXPR)
15442 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15443 else
15444 return NULL_TREE;
15445 }
15446
ee8db92b 15447 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
8e7b3a43
KH
15448
15449 To compute GT, swap the arguments and do LT.
15450 To compute GE, do LT and invert the result.
15451 To compute LE, swap the arguments, do LT and invert the result.
15452 To compute NE, do EQ and invert the result.
15453
15454 Therefore, the code below must handle only EQ and LT. */
15455
15456 if (code == LE_EXPR || code == GT_EXPR)
15457 {
1382f0f0
RS
15458 tree tem = op0;
15459 op0 = op1;
15460 op1 = tem;
8e7b3a43
KH
15461 code = swap_tree_comparison (code);
15462 }
15463
15464 /* Note that it is safe to invert for real values here because we
ee8db92b 15465 have already handled the one case that it matters. */
8e7b3a43 15466
8e7b3a43
KH
15467 invert = 0;
15468 if (code == NE_EXPR || code == GE_EXPR)
15469 {
15470 invert = 1;
d1a7edaf 15471 code = invert_tree_comparison (code, false);
8e7b3a43
KH
15472 }
15473
15474 /* Compute a result for LT or EQ if args permit;
15475 Otherwise return T. */
15476 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15477 {
15478 if (code == EQ_EXPR)
1382f0f0
RS
15479 result = tree_int_cst_equal (op0, op1);
15480 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15481 result = INT_CST_LT_UNSIGNED (op0, op1);
8e7b3a43 15482 else
1382f0f0 15483 result = INT_CST_LT (op0, op1);
8e7b3a43 15484 }
1382f0f0 15485 else
8e7b3a43
KH
15486 return NULL_TREE;
15487
15488 if (invert)
1382f0f0
RS
15489 result ^= 1;
15490 return constant_boolean_node (result, type);
8e7b3a43
KH
15491}
15492
3a687f8b
MM
15493/* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15494 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15495 itself. */
0ad28dde
AP
15496
15497tree
15498fold_build_cleanup_point_expr (tree type, tree expr)
15499{
15500 /* If the expression does not have side effects then we don't have to wrap
15501 it with a cleanup point expression. */
15502 if (!TREE_SIDE_EFFECTS (expr))
15503 return expr;
0e256a82
AP
15504
15505 /* If the expression is a return, check to see if the expression inside the
15506 return has no side effects or the right hand side of the modify expression
15507 inside the return. If either don't have side effects set we don't need to
15508 wrap the expression in a cleanup point expression. Note we don't check the
15509 left hand side of the modify because it should always be a return decl. */
15510 if (TREE_CODE (expr) == RETURN_EXPR)
15511 {
15512 tree op = TREE_OPERAND (expr, 0);
15513 if (!op || !TREE_SIDE_EFFECTS (op))
15514 return expr;
15515 op = TREE_OPERAND (op, 1);
15516 if (!TREE_SIDE_EFFECTS (op))
15517 return expr;
15518 }
b8698a0f 15519
0ad28dde
AP
15520 return build1 (CLEANUP_POINT_EXPR, type, expr);
15521}
15522
30d2e943
RG
15523/* Given a pointer value OP0 and a type TYPE, return a simplified version
15524 of an indirection through OP0, or NULL_TREE if no simplification is
15525 possible. */
cd3ce9b4 15526
095ecc24 15527tree
db3927fb 15528fold_indirect_ref_1 (location_t loc, tree type, tree op0)
cd3ce9b4 15529{
30d2e943 15530 tree sub = op0;
cd3ce9b4
JM
15531 tree subtype;
15532
6033ae2a 15533 STRIP_NOPS (sub);
6a720599
JM
15534 subtype = TREE_TYPE (sub);
15535 if (!POINTER_TYPE_P (subtype))
15536 return NULL_TREE;
15537
cd3ce9b4
JM
15538 if (TREE_CODE (sub) == ADDR_EXPR)
15539 {
15540 tree op = TREE_OPERAND (sub, 0);
15541 tree optype = TREE_TYPE (op);
f9f63ff2
AP
15542 /* *&CONST_DECL -> to the value of the const decl. */
15543 if (TREE_CODE (op) == CONST_DECL)
15544 return DECL_INITIAL (op);
41b9109a 15545 /* *&p => p; make sure to handle *&"str"[cst] here. */
30d2e943 15546 if (type == optype)
41b9109a
RG
15547 {
15548 tree fop = fold_read_from_constant_string (op);
15549 if (fop)
15550 return fop;
15551 else
15552 return op;
15553 }
cd3ce9b4
JM
15554 /* *(foo *)&fooarray => fooarray[0] */
15555 else if (TREE_CODE (optype) == ARRAY_TYPE
30d2e943 15556 && type == TREE_TYPE (optype))
0d56ab33
AP
15557 {
15558 tree type_domain = TYPE_DOMAIN (optype);
15559 tree min_val = size_zero_node;
15560 if (type_domain && TYPE_MIN_VALUE (type_domain))
15561 min_val = TYPE_MIN_VALUE (type_domain);
c9019218
JJ
15562 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15563 NULL_TREE, NULL_TREE);
0d56ab33 15564 }
4853940c
AP
15565 /* *(foo *)&complexfoo => __real__ complexfoo */
15566 else if (TREE_CODE (optype) == COMPLEX_TYPE
15567 && type == TREE_TYPE (optype))
db3927fb 15568 return fold_build1_loc (loc, REALPART_EXPR, type, op);
0890b981
AP
15569 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15570 else if (TREE_CODE (optype) == VECTOR_TYPE
15571 && type == TREE_TYPE (optype))
15572 {
15573 tree part_width = TYPE_SIZE (type);
15574 tree index = bitsize_int (0);
db3927fb 15575 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
0890b981 15576 }
cd3ce9b4
JM
15577 }
15578
a12bdb97
AP
15579 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15580 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
b8698a0f 15581 {
a12bdb97
AP
15582 tree op00 = TREE_OPERAND (sub, 0);
15583 tree op01 = TREE_OPERAND (sub, 1);
b8698a0f 15584
a12bdb97 15585 STRIP_NOPS (op00);
7bf8ca76 15586 if (TREE_CODE (op00) == ADDR_EXPR)
b8698a0f 15587 {
7bf8ca76
JM
15588 tree op00type;
15589 op00 = TREE_OPERAND (op00, 0);
15590 op00type = TREE_TYPE (op00);
b8698a0f 15591
7bf8ca76
JM
15592 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15593 if (TREE_CODE (op00type) == VECTOR_TYPE
15594 && type == TREE_TYPE (op00type))
15595 {
15596 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15597 tree part_width = TYPE_SIZE (type);
15598 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15599 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15600 tree index = bitsize_int (indexi);
a12bdb97 15601
7bf8ca76
JM
15602 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
15603 return fold_build3_loc (loc,
15604 BIT_FIELD_REF, type, op00,
15605 part_width, index);
a12bdb97 15606
7bf8ca76
JM
15607 }
15608 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15609 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15610 && type == TREE_TYPE (op00type))
15611 {
15612 tree size = TYPE_SIZE_UNIT (type);
15613 if (tree_int_cst_equal (size, op01))
15614 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15615 }
15616 /* ((foo *)&fooarray)[1] => fooarray[1] */
15617 else if (TREE_CODE (op00type) == ARRAY_TYPE
15618 && type == TREE_TYPE (op00type))
15619 {
15620 tree type_domain = TYPE_DOMAIN (op00type);
15621 tree min_val = size_zero_node;
15622 if (type_domain && TYPE_MIN_VALUE (type_domain))
15623 min_val = TYPE_MIN_VALUE (type_domain);
15624 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15625 TYPE_SIZE_UNIT (type));
15626 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
c9019218
JJ
15627 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15628 NULL_TREE, NULL_TREE);
7bf8ca76 15629 }
4853940c
AP
15630 }
15631 }
b8698a0f 15632
cd3ce9b4 15633 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
cd3ce9b4 15634 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
30d2e943 15635 && type == TREE_TYPE (TREE_TYPE (subtype)))
cd3ce9b4 15636 {
0d56ab33
AP
15637 tree type_domain;
15638 tree min_val = size_zero_node;
db3927fb 15639 sub = build_fold_indirect_ref_loc (loc, sub);
0d56ab33
AP
15640 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15641 if (type_domain && TYPE_MIN_VALUE (type_domain))
15642 min_val = TYPE_MIN_VALUE (type_domain);
c9019218
JJ
15643 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15644 NULL_TREE);
cd3ce9b4
JM
15645 }
15646
6a720599
JM
15647 return NULL_TREE;
15648}
15649
15650/* Builds an expression for an indirection through T, simplifying some
15651 cases. */
15652
15653tree
db3927fb 15654build_fold_indirect_ref_loc (location_t loc, tree t)
6a720599 15655{
30d2e943 15656 tree type = TREE_TYPE (TREE_TYPE (t));
db3927fb 15657 tree sub = fold_indirect_ref_1 (loc, type, t);
6a720599
JM
15658
15659 if (sub)
15660 return sub;
db3927fb 15661
c9019218 15662 return build1_loc (loc, INDIRECT_REF, type, t);
6a720599
JM
15663}
15664
15665/* Given an INDIRECT_REF T, return either T or a simplified version. */
15666
15667tree
db3927fb 15668fold_indirect_ref_loc (location_t loc, tree t)
6a720599 15669{
db3927fb 15670 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
6a720599
JM
15671
15672 if (sub)
15673 return sub;
15674 else
15675 return t;
cd3ce9b4
JM
15676}
15677
9675412f
RS
15678/* Strip non-trapping, non-side-effecting tree nodes from an expression
15679 whose result is ignored. The type of the returned tree need not be
15680 the same as the original expression. */
15681
15682tree
15683fold_ignored_result (tree t)
15684{
15685 if (!TREE_SIDE_EFFECTS (t))
15686 return integer_zero_node;
15687
15688 for (;;)
15689 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15690 {
6615c446 15691 case tcc_unary:
9675412f
RS
15692 t = TREE_OPERAND (t, 0);
15693 break;
15694
6615c446
JO
15695 case tcc_binary:
15696 case tcc_comparison:
9675412f
RS
15697 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15698 t = TREE_OPERAND (t, 0);
15699 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15700 t = TREE_OPERAND (t, 1);
15701 else
15702 return t;
15703 break;
15704
6615c446 15705 case tcc_expression:
9675412f
RS
15706 switch (TREE_CODE (t))
15707 {
15708 case COMPOUND_EXPR:
15709 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15710 return t;
15711 t = TREE_OPERAND (t, 0);
15712 break;
15713
15714 case COND_EXPR:
15715 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15716 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15717 return t;
15718 t = TREE_OPERAND (t, 0);
15719 break;
15720
15721 default:
15722 return t;
15723 }
15724 break;
15725
15726 default:
15727 return t;
15728 }
15729}
15730
15931954
RH
15731/* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15732 This can only be applied to objects of a sizetype. */
15733
15734tree
db3927fb 15735round_up_loc (location_t loc, tree value, int divisor)
15931954 15736{
0a936b12 15737 tree div = NULL_TREE;
15931954 15738
0bccc606 15739 gcc_assert (divisor > 0);
15931954
RH
15740 if (divisor == 1)
15741 return value;
15742
15931954 15743 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
0a936b12
NS
15744 have to do anything. Only do this when we are not given a const,
15745 because in that case, this check is more expensive than just
8c27b7d4 15746 doing it. */
0a936b12
NS
15747 if (TREE_CODE (value) != INTEGER_CST)
15748 {
ce552f75 15749 div = build_int_cst (TREE_TYPE (value), divisor);
0a936b12
NS
15750
15751 if (multiple_of_p (TREE_TYPE (value), value, div))
15752 return value;
15753 }
15931954
RH
15754
15755 /* If divisor is a power of two, simplify this to bit manipulation. */
15756 if (divisor == (divisor & -divisor))
15757 {
74890d7b
RS
15758 if (TREE_CODE (value) == INTEGER_CST)
15759 {
9589f23e 15760 double_int val = tree_to_double_int (value);
bcf52d7b 15761 bool overflow_p;
74890d7b 15762
9589f23e 15763 if ((val.low & (divisor - 1)) == 0)
74890d7b
RS
15764 return value;
15765
bcf52d7b 15766 overflow_p = TREE_OVERFLOW (value);
9589f23e
AS
15767 val.low &= ~(divisor - 1);
15768 val.low += divisor;
15769 if (val.low == 0)
74890d7b 15770 {
9589f23e
AS
15771 val.high++;
15772 if (val.high == 0)
bcf52d7b 15773 overflow_p = true;
74890d7b 15774 }
bcf52d7b 15775
9589f23e 15776 return force_fit_type_double (TREE_TYPE (value), val,
bcf52d7b 15777 -1, overflow_p);
74890d7b
RS
15778 }
15779 else
15780 {
bcf52d7b
RS
15781 tree t;
15782
74890d7b 15783 t = build_int_cst (TREE_TYPE (value), divisor - 1);
db3927fb 15784 value = size_binop_loc (loc, PLUS_EXPR, value, t);
74890d7b 15785 t = build_int_cst (TREE_TYPE (value), -divisor);
db3927fb 15786 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
74890d7b 15787 }
15931954
RH
15788 }
15789 else
15790 {
0a936b12 15791 if (!div)
ce552f75 15792 div = build_int_cst (TREE_TYPE (value), divisor);
db3927fb
AH
15793 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15794 value = size_binop_loc (loc, MULT_EXPR, value, div);
15931954
RH
15795 }
15796
15797 return value;
15798}
15799
15800/* Likewise, but round down. */
15801
15802tree
db3927fb 15803round_down_loc (location_t loc, tree value, int divisor)
15931954 15804{
0a936b12 15805 tree div = NULL_TREE;
15931954 15806
0bccc606 15807 gcc_assert (divisor > 0);
15931954
RH
15808 if (divisor == 1)
15809 return value;
15810
15931954 15811 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
0a936b12
NS
15812 have to do anything. Only do this when we are not given a const,
15813 because in that case, this check is more expensive than just
8c27b7d4 15814 doing it. */
0a936b12
NS
15815 if (TREE_CODE (value) != INTEGER_CST)
15816 {
ce552f75 15817 div = build_int_cst (TREE_TYPE (value), divisor);
0a936b12
NS
15818
15819 if (multiple_of_p (TREE_TYPE (value), value, div))
15820 return value;
15821 }
15931954
RH
15822
15823 /* If divisor is a power of two, simplify this to bit manipulation. */
15824 if (divisor == (divisor & -divisor))
15825 {
0a936b12 15826 tree t;
3e6688a7 15827
7d60be94 15828 t = build_int_cst (TREE_TYPE (value), -divisor);
db3927fb 15829 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15931954
RH
15830 }
15831 else
15832 {
0a936b12 15833 if (!div)
ce552f75 15834 div = build_int_cst (TREE_TYPE (value), divisor);
db3927fb
AH
15835 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15836 value = size_binop_loc (loc, MULT_EXPR, value, div);
15931954
RH
15837 }
15838
15839 return value;
15840}
2f4675b4 15841
7299dbfb
ZD
15842/* Returns the pointer to the base of the object addressed by EXP and
15843 extracts the information about the offset of the access, storing it
15844 to PBITPOS and POFFSET. */
15845
15846static tree
15847split_address_to_core_and_offset (tree exp,
15848 HOST_WIDE_INT *pbitpos, tree *poffset)
15849{
15850 tree core;
15851 enum machine_mode mode;
15852 int unsignedp, volatilep;
15853 HOST_WIDE_INT bitsize;
db3927fb 15854 location_t loc = EXPR_LOCATION (exp);
7299dbfb
ZD
15855
15856 if (TREE_CODE (exp) == ADDR_EXPR)
15857 {
15858 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
2614034e
EB
15859 poffset, &mode, &unsignedp, &volatilep,
15860 false);
db3927fb 15861 core = build_fold_addr_expr_loc (loc, core);
7299dbfb
ZD
15862 }
15863 else
15864 {
15865 core = exp;
15866 *pbitpos = 0;
15867 *poffset = NULL_TREE;
15868 }
15869
15870 return core;
15871}
15872
2f4675b4 15873/* Returns true if addresses of E1 and E2 differ by a constant, false
7299dbfb 15874 otherwise. If they do, E1 - E2 is stored in *DIFF. */
2f4675b4
ZD
15875
15876bool
15877ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15878{
15879 tree core1, core2;
2f4675b4
ZD
15880 HOST_WIDE_INT bitpos1, bitpos2;
15881 tree toffset1, toffset2, tdiff, type;
3e6688a7 15882
7299dbfb
ZD
15883 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15884 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
2f4675b4
ZD
15885
15886 if (bitpos1 % BITS_PER_UNIT != 0
15887 || bitpos2 % BITS_PER_UNIT != 0
15888 || !operand_equal_p (core1, core2, 0))
15889 return false;
15890
15891 if (toffset1 && toffset2)
15892 {
15893 type = TREE_TYPE (toffset1);
15894 if (type != TREE_TYPE (toffset2))
15895 toffset2 = fold_convert (type, toffset2);
15896
7f20a5b7 15897 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
87de2376 15898 if (!cst_and_fits_in_hwi (tdiff))
2f4675b4
ZD
15899 return false;
15900
87de2376 15901 *diff = int_cst_value (tdiff);
2f4675b4
ZD
15902 }
15903 else if (toffset1 || toffset2)
15904 {
15905 /* If only one of the offsets is non-constant, the difference cannot
15906 be a constant. */
15907 return false;
15908 }
15909 else
15910 *diff = 0;
15911
15912 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15913 return true;
15914}
e3bb43c0
RS
15915
15916/* Simplify the floating point expression EXP when the sign of the
15917 result is not significant. Return NULL_TREE if no simplification
15918 is possible. */
15919
15920tree
15921fold_strip_sign_ops (tree exp)
15922{
15923 tree arg0, arg1;
db3927fb 15924 location_t loc = EXPR_LOCATION (exp);
e3bb43c0
RS
15925
15926 switch (TREE_CODE (exp))
15927 {
15928 case ABS_EXPR:
15929 case NEGATE_EXPR:
15930 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15931 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15932
15933 case MULT_EXPR:
15934 case RDIV_EXPR:
15935 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15936 return NULL_TREE;
15937 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15938 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15939 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
db3927fb 15940 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
7f20a5b7
KH
15941 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15942 arg1 ? arg1 : TREE_OPERAND (exp, 1));
e3bb43c0
RS
15943 break;
15944
b7e85170
KG
15945 case COMPOUND_EXPR:
15946 arg0 = TREE_OPERAND (exp, 0);
15947 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15948 if (arg1)
db3927fb 15949 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
b7e85170 15950 break;
b8698a0f 15951
b7e85170
KG
15952 case COND_EXPR:
15953 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15954 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15955 if (arg0 || arg1)
db3927fb
AH
15956 return fold_build3_loc (loc,
15957 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
b7e85170
KG
15958 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15959 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15960 break;
b8698a0f 15961
b81e7144 15962 case CALL_EXPR:
6af46feb
KG
15963 {
15964 const enum built_in_function fcode = builtin_mathfn_code (exp);
15965 switch (fcode)
15966 {
15967 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15968 /* Strip copysign function call, return the 1st argument. */
5039610b
SL
15969 arg0 = CALL_EXPR_ARG (exp, 0);
15970 arg1 = CALL_EXPR_ARG (exp, 1);
db3927fb 15971 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
6af46feb
KG
15972
15973 default:
15974 /* Strip sign ops from the argument of "odd" math functions. */
15975 if (negate_mathfn_p (fcode))
15976 {
5039610b 15977 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
6af46feb 15978 if (arg0)
db3927fb 15979 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
6af46feb
KG
15980 }
15981 break;
b81e7144 15982 }
6af46feb 15983 }
b81e7144
KG
15984 break;
15985
e3bb43c0
RS
15986 default:
15987 break;
15988 }
15989 return NULL_TREE;
15990}