1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
47 #include "coretypes.h"
56 #include "diagnostic-core.h"
60 #include "langhooks.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
67 int folding_initializer
= 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code
{
91 static bool negate_mathfn_p (enum built_in_function
);
92 static bool negate_expr_p (tree
);
93 static tree
negate_expr (tree
);
94 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
95 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
96 static tree
const_binop (enum tree_code
, tree
, tree
);
97 static enum comparison_code
comparison_to_compcode (enum tree_code
);
98 static enum tree_code
compcode_to_comparison (enum comparison_code
);
99 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
100 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
101 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
102 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
103 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
104 static tree
make_bit_field_ref (location_t
, tree
, tree
,
105 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
106 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
108 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
110 enum machine_mode
*, int *, int *,
112 static int all_ones_mask_p (const_tree
, int);
113 static tree
sign_bit_p (tree
, const_tree
);
114 static int simple_operand_p (const_tree
);
115 static bool simple_operand_p_2 (tree
);
116 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
117 static tree
range_predecessor (tree
);
118 static tree
range_successor (tree
);
119 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
120 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
121 static tree
unextend (tree
, int, int, tree
);
122 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
124 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
125 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
126 static tree
fold_binary_op_with_conditional_arg (location_t
,
127 enum tree_code
, tree
,
130 static tree
fold_mathfn_compare (location_t
,
131 enum built_in_function
, enum tree_code
,
133 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
134 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
135 static bool reorder_operands_p (const_tree
, const_tree
);
136 static tree
fold_negate_const (tree
, tree
);
137 static tree
fold_not_const (const_tree
, tree
);
138 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
139 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142 Otherwise, return LOC. */
145 expr_location_or (tree t
, location_t loc
)
147 location_t tloc
= EXPR_LOCATION (t
);
148 return tloc
!= UNKNOWN_LOCATION
? tloc
: loc
;
151 /* Similar to protected_set_expr_location, but never modify x in place,
152 if location can and needs to be set, unshare it. */
155 protected_set_expr_location_unshare (tree x
, location_t loc
)
157 if (CAN_HAVE_LOCATION_P (x
)
158 && EXPR_LOCATION (x
) != loc
159 && !(TREE_CODE (x
) == SAVE_EXPR
160 || TREE_CODE (x
) == TARGET_EXPR
161 || TREE_CODE (x
) == BIND_EXPR
))
164 SET_EXPR_LOCATION (x
, loc
);
170 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
171 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
172 and SUM1. Then this yields nonzero if overflow occurred during the
175 Overflow occurs if A and B have the same sign, but A and SUM differ in
176 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
178 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
180 /* If ARG2 divides ARG1 with zero remainder, carries out the division
181 of type CODE and returns the quotient.
182 Otherwise returns NULL_TREE. */
185 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
190 /* The sign of the division is according to operand two, that
191 does the correct thing for POINTER_PLUS_EXPR where we want
192 a signed division. */
193 uns
= TYPE_UNSIGNED (TREE_TYPE (arg2
));
194 if (TREE_CODE (TREE_TYPE (arg2
)) == INTEGER_TYPE
195 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2
)))
198 quo
= double_int_divmod (tree_to_double_int (arg1
),
199 tree_to_double_int (arg2
),
202 if (double_int_zero_p (rem
))
203 return build_int_cst_wide (TREE_TYPE (arg1
), quo
.low
, quo
.high
);
208 /* This is nonzero if we should defer warnings about undefined
209 overflow. This facility exists because these warnings are a
210 special case. The code to estimate loop iterations does not want
211 to issue any warnings, since it works with expressions which do not
212 occur in user code. Various bits of cleanup code call fold(), but
213 only use the result if it has certain characteristics (e.g., is a
214 constant); that code only wants to issue a warning if the result is
217 static int fold_deferring_overflow_warnings
;
219 /* If a warning about undefined overflow is deferred, this is the
220 warning. Note that this may cause us to turn two warnings into
221 one, but that is fine since it is sufficient to only give one
222 warning per expression. */
224 static const char* fold_deferred_overflow_warning
;
226 /* If a warning about undefined overflow is deferred, this is the
227 level at which the warning should be emitted. */
229 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
231 /* Start deferring overflow warnings. We could use a stack here to
232 permit nested calls, but at present it is not necessary. */
235 fold_defer_overflow_warnings (void)
237 ++fold_deferring_overflow_warnings
;
240 /* Stop deferring overflow warnings. If there is a pending warning,
241 and ISSUE is true, then issue the warning if appropriate. STMT is
242 the statement with which the warning should be associated (used for
243 location information); STMT may be NULL. CODE is the level of the
244 warning--a warn_strict_overflow_code value. This function will use
245 the smaller of CODE and the deferred code when deciding whether to
246 issue the warning. CODE may be zero to mean to always use the
250 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
255 gcc_assert (fold_deferring_overflow_warnings
> 0);
256 --fold_deferring_overflow_warnings
;
257 if (fold_deferring_overflow_warnings
> 0)
259 if (fold_deferred_overflow_warning
!= NULL
261 && code
< (int) fold_deferred_overflow_code
)
262 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
266 warnmsg
= fold_deferred_overflow_warning
;
267 fold_deferred_overflow_warning
= NULL
;
269 if (!issue
|| warnmsg
== NULL
)
272 if (gimple_no_warning_p (stmt
))
275 /* Use the smallest code level when deciding to issue the
277 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
278 code
= fold_deferred_overflow_code
;
280 if (!issue_strict_overflow_warning (code
))
284 locus
= input_location
;
286 locus
= gimple_location (stmt
);
287 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
290 /* Stop deferring overflow warnings, ignoring any deferred
294 fold_undefer_and_ignore_overflow_warnings (void)
296 fold_undefer_overflow_warnings (false, NULL
, 0);
299 /* Whether we are deferring overflow warnings. */
302 fold_deferring_overflow_warnings_p (void)
304 return fold_deferring_overflow_warnings
> 0;
307 /* This is called when we fold something based on the fact that signed
308 overflow is undefined. */
311 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
313 if (fold_deferring_overflow_warnings
> 0)
315 if (fold_deferred_overflow_warning
== NULL
316 || wc
< fold_deferred_overflow_code
)
318 fold_deferred_overflow_warning
= gmsgid
;
319 fold_deferred_overflow_code
= wc
;
322 else if (issue_strict_overflow_warning (wc
))
323 warning (OPT_Wstrict_overflow
, gmsgid
);
326 /* Return true if the built-in mathematical function specified by CODE
327 is odd, i.e. -f(x) == f(-x). */
330 negate_mathfn_p (enum built_in_function code
)
334 CASE_FLT_FN (BUILT_IN_ASIN
):
335 CASE_FLT_FN (BUILT_IN_ASINH
):
336 CASE_FLT_FN (BUILT_IN_ATAN
):
337 CASE_FLT_FN (BUILT_IN_ATANH
):
338 CASE_FLT_FN (BUILT_IN_CASIN
):
339 CASE_FLT_FN (BUILT_IN_CASINH
):
340 CASE_FLT_FN (BUILT_IN_CATAN
):
341 CASE_FLT_FN (BUILT_IN_CATANH
):
342 CASE_FLT_FN (BUILT_IN_CBRT
):
343 CASE_FLT_FN (BUILT_IN_CPROJ
):
344 CASE_FLT_FN (BUILT_IN_CSIN
):
345 CASE_FLT_FN (BUILT_IN_CSINH
):
346 CASE_FLT_FN (BUILT_IN_CTAN
):
347 CASE_FLT_FN (BUILT_IN_CTANH
):
348 CASE_FLT_FN (BUILT_IN_ERF
):
349 CASE_FLT_FN (BUILT_IN_LLROUND
):
350 CASE_FLT_FN (BUILT_IN_LROUND
):
351 CASE_FLT_FN (BUILT_IN_ROUND
):
352 CASE_FLT_FN (BUILT_IN_SIN
):
353 CASE_FLT_FN (BUILT_IN_SINH
):
354 CASE_FLT_FN (BUILT_IN_TAN
):
355 CASE_FLT_FN (BUILT_IN_TANH
):
356 CASE_FLT_FN (BUILT_IN_TRUNC
):
359 CASE_FLT_FN (BUILT_IN_LLRINT
):
360 CASE_FLT_FN (BUILT_IN_LRINT
):
361 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
362 CASE_FLT_FN (BUILT_IN_RINT
):
363 return !flag_rounding_math
;
371 /* Check whether we may negate an integer constant T without causing
375 may_negate_without_overflow_p (const_tree t
)
377 unsigned HOST_WIDE_INT val
;
381 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
383 type
= TREE_TYPE (t
);
384 if (TYPE_UNSIGNED (type
))
387 prec
= TYPE_PRECISION (type
);
388 if (prec
> HOST_BITS_PER_WIDE_INT
)
390 if (TREE_INT_CST_LOW (t
) != 0)
392 prec
-= HOST_BITS_PER_WIDE_INT
;
393 val
= TREE_INT_CST_HIGH (t
);
396 val
= TREE_INT_CST_LOW (t
);
397 if (prec
< HOST_BITS_PER_WIDE_INT
)
398 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
399 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
402 /* Determine whether an expression T can be cheaply negated using
403 the function negate_expr without introducing undefined overflow. */
406 negate_expr_p (tree t
)
413 type
= TREE_TYPE (t
);
416 switch (TREE_CODE (t
))
419 if (TYPE_OVERFLOW_WRAPS (type
))
422 /* Check that -CST will not overflow type. */
423 return may_negate_without_overflow_p (t
);
425 return (INTEGRAL_TYPE_P (type
)
426 && TYPE_OVERFLOW_WRAPS (type
));
433 /* We want to canonicalize to positive real constants. Pretend
434 that only negative ones can be easily negated. */
435 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
438 return negate_expr_p (TREE_REALPART (t
))
439 && negate_expr_p (TREE_IMAGPART (t
));
442 return negate_expr_p (TREE_OPERAND (t
, 0))
443 && negate_expr_p (TREE_OPERAND (t
, 1));
446 return negate_expr_p (TREE_OPERAND (t
, 0));
449 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
450 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
452 /* -(A + B) -> (-B) - A. */
453 if (negate_expr_p (TREE_OPERAND (t
, 1))
454 && reorder_operands_p (TREE_OPERAND (t
, 0),
455 TREE_OPERAND (t
, 1)))
457 /* -(A + B) -> (-A) - B. */
458 return negate_expr_p (TREE_OPERAND (t
, 0));
461 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
462 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
463 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
464 && reorder_operands_p (TREE_OPERAND (t
, 0),
465 TREE_OPERAND (t
, 1));
468 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
474 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
475 return negate_expr_p (TREE_OPERAND (t
, 1))
476 || negate_expr_p (TREE_OPERAND (t
, 0));
484 /* In general we can't negate A / B, because if A is INT_MIN and
485 B is 1, we may turn this into INT_MIN / -1 which is undefined
486 and actually traps on some architectures. But if overflow is
487 undefined, we can negate, because - (INT_MIN / 1) is an
489 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
490 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
492 return negate_expr_p (TREE_OPERAND (t
, 1))
493 || negate_expr_p (TREE_OPERAND (t
, 0));
496 /* Negate -((double)float) as (double)(-float). */
497 if (TREE_CODE (type
) == REAL_TYPE
)
499 tree tem
= strip_float_extensions (t
);
501 return negate_expr_p (tem
);
506 /* Negate -f(x) as f(-x). */
507 if (negate_mathfn_p (builtin_mathfn_code (t
)))
508 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
512 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
513 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
515 tree op1
= TREE_OPERAND (t
, 1);
516 if (TREE_INT_CST_HIGH (op1
) == 0
517 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
518 == TREE_INT_CST_LOW (op1
))
529 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
530 simplification is possible.
531 If negate_expr_p would return true for T, NULL_TREE will never be
535 fold_negate_expr (location_t loc
, tree t
)
537 tree type
= TREE_TYPE (t
);
540 switch (TREE_CODE (t
))
542 /* Convert - (~A) to A + 1. */
544 if (INTEGRAL_TYPE_P (type
))
545 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
546 build_int_cst (type
, 1));
550 tem
= fold_negate_const (t
, type
);
551 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
552 || !TYPE_OVERFLOW_TRAPS (type
))
557 tem
= fold_negate_const (t
, type
);
558 /* Two's complement FP formats, such as c4x, may overflow. */
559 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
564 tem
= fold_negate_const (t
, type
);
569 tree rpart
= negate_expr (TREE_REALPART (t
));
570 tree ipart
= negate_expr (TREE_IMAGPART (t
));
572 if ((TREE_CODE (rpart
) == REAL_CST
573 && TREE_CODE (ipart
) == REAL_CST
)
574 || (TREE_CODE (rpart
) == INTEGER_CST
575 && TREE_CODE (ipart
) == INTEGER_CST
))
576 return build_complex (type
, rpart
, ipart
);
581 if (negate_expr_p (t
))
582 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
583 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
584 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
588 if (negate_expr_p (t
))
589 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
590 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
594 return TREE_OPERAND (t
, 0);
597 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
598 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
600 /* -(A + B) -> (-B) - A. */
601 if (negate_expr_p (TREE_OPERAND (t
, 1))
602 && reorder_operands_p (TREE_OPERAND (t
, 0),
603 TREE_OPERAND (t
, 1)))
605 tem
= negate_expr (TREE_OPERAND (t
, 1));
606 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
607 tem
, TREE_OPERAND (t
, 0));
610 /* -(A + B) -> (-A) - B. */
611 if (negate_expr_p (TREE_OPERAND (t
, 0)))
613 tem
= negate_expr (TREE_OPERAND (t
, 0));
614 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
615 tem
, TREE_OPERAND (t
, 1));
621 /* - (A - B) -> B - A */
622 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
623 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
624 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
625 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
626 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
630 if (TYPE_UNSIGNED (type
))
636 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
638 tem
= TREE_OPERAND (t
, 1);
639 if (negate_expr_p (tem
))
640 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
641 TREE_OPERAND (t
, 0), negate_expr (tem
));
642 tem
= TREE_OPERAND (t
, 0);
643 if (negate_expr_p (tem
))
644 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
645 negate_expr (tem
), TREE_OPERAND (t
, 1));
654 /* In general we can't negate A / B, because if A is INT_MIN and
655 B is 1, we may turn this into INT_MIN / -1 which is undefined
656 and actually traps on some architectures. But if overflow is
657 undefined, we can negate, because - (INT_MIN / 1) is an
659 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
661 const char * const warnmsg
= G_("assuming signed overflow does not "
662 "occur when negating a division");
663 tem
= TREE_OPERAND (t
, 1);
664 if (negate_expr_p (tem
))
666 if (INTEGRAL_TYPE_P (type
)
667 && (TREE_CODE (tem
) != INTEGER_CST
668 || integer_onep (tem
)))
669 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
670 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
671 TREE_OPERAND (t
, 0), negate_expr (tem
));
673 tem
= TREE_OPERAND (t
, 0);
674 if (negate_expr_p (tem
))
676 if (INTEGRAL_TYPE_P (type
)
677 && (TREE_CODE (tem
) != INTEGER_CST
678 || tree_int_cst_equal (tem
, TYPE_MIN_VALUE (type
))))
679 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
680 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
681 negate_expr (tem
), TREE_OPERAND (t
, 1));
687 /* Convert -((double)float) into (double)(-float). */
688 if (TREE_CODE (type
) == REAL_TYPE
)
690 tem
= strip_float_extensions (t
);
691 if (tem
!= t
&& negate_expr_p (tem
))
692 return fold_convert_loc (loc
, type
, negate_expr (tem
));
697 /* Negate -f(x) as f(-x). */
698 if (negate_mathfn_p (builtin_mathfn_code (t
))
699 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
703 fndecl
= get_callee_fndecl (t
);
704 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
705 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
710 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
711 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
713 tree op1
= TREE_OPERAND (t
, 1);
714 if (TREE_INT_CST_HIGH (op1
) == 0
715 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
716 == TREE_INT_CST_LOW (op1
))
718 tree ntype
= TYPE_UNSIGNED (type
)
719 ? signed_type_for (type
)
720 : unsigned_type_for (type
);
721 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
722 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
723 return fold_convert_loc (loc
, type
, temp
);
735 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
736 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
748 loc
= EXPR_LOCATION (t
);
749 type
= TREE_TYPE (t
);
752 tem
= fold_negate_expr (loc
, t
);
754 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
755 return fold_convert_loc (loc
, type
, tem
);
758 /* Split a tree IN into a constant, literal and variable parts that could be
759 combined with CODE to make IN. "constant" means an expression with
760 TREE_CONSTANT but that isn't an actual constant. CODE must be a
761 commutative arithmetic operation. Store the constant part into *CONP,
762 the literal in *LITP and return the variable part. If a part isn't
763 present, set it to null. If the tree does not decompose in this way,
764 return the entire tree as the variable part and the other parts as null.
766 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
767 case, we negate an operand that was subtracted. Except if it is a
768 literal for which we use *MINUS_LITP instead.
770 If NEGATE_P is true, we are negating all of IN, again except a literal
771 for which we use *MINUS_LITP instead.
773 If IN is itself a literal or constant, return it as appropriate.
775 Note that we do not guarantee that any of the three values will be the
776 same type as IN, but they will have the same signedness and mode. */
779 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
780 tree
*minus_litp
, int negate_p
)
788 /* Strip any conversions that don't change the machine mode or signedness. */
789 STRIP_SIGN_NOPS (in
);
791 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
792 || TREE_CODE (in
) == FIXED_CST
)
794 else if (TREE_CODE (in
) == code
795 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
796 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
797 /* We can associate addition and subtraction together (even
798 though the C standard doesn't say so) for integers because
799 the value is not affected. For reals, the value might be
800 affected, so we can't. */
801 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
802 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
804 tree op0
= TREE_OPERAND (in
, 0);
805 tree op1
= TREE_OPERAND (in
, 1);
806 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
807 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
809 /* First see if either of the operands is a literal, then a constant. */
810 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
811 || TREE_CODE (op0
) == FIXED_CST
)
812 *litp
= op0
, op0
= 0;
813 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
814 || TREE_CODE (op1
) == FIXED_CST
)
815 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
817 if (op0
!= 0 && TREE_CONSTANT (op0
))
818 *conp
= op0
, op0
= 0;
819 else if (op1
!= 0 && TREE_CONSTANT (op1
))
820 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
822 /* If we haven't dealt with either operand, this is not a case we can
823 decompose. Otherwise, VAR is either of the ones remaining, if any. */
824 if (op0
!= 0 && op1
!= 0)
829 var
= op1
, neg_var_p
= neg1_p
;
831 /* Now do any needed negations. */
833 *minus_litp
= *litp
, *litp
= 0;
835 *conp
= negate_expr (*conp
);
837 var
= negate_expr (var
);
839 else if (TREE_CONSTANT (in
))
847 *minus_litp
= *litp
, *litp
= 0;
848 else if (*minus_litp
)
849 *litp
= *minus_litp
, *minus_litp
= 0;
850 *conp
= negate_expr (*conp
);
851 var
= negate_expr (var
);
857 /* Re-associate trees split by the above function. T1 and T2 are
858 either expressions to associate or null. Return the new
859 expression, if any. LOC is the location of the new expression. If
860 we build an operation, do it in TYPE and with CODE. */
863 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
870 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
871 try to fold this since we will have infinite recursion. But do
872 deal with any NEGATE_EXPRs. */
873 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
874 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
876 if (code
== PLUS_EXPR
)
878 if (TREE_CODE (t1
) == NEGATE_EXPR
)
879 return build2_loc (loc
, MINUS_EXPR
, type
,
880 fold_convert_loc (loc
, type
, t2
),
881 fold_convert_loc (loc
, type
,
882 TREE_OPERAND (t1
, 0)));
883 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
884 return build2_loc (loc
, MINUS_EXPR
, type
,
885 fold_convert_loc (loc
, type
, t1
),
886 fold_convert_loc (loc
, type
,
887 TREE_OPERAND (t2
, 0)));
888 else if (integer_zerop (t2
))
889 return fold_convert_loc (loc
, type
, t1
);
891 else if (code
== MINUS_EXPR
)
893 if (integer_zerop (t2
))
894 return fold_convert_loc (loc
, type
, t1
);
897 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
898 fold_convert_loc (loc
, type
, t2
));
901 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
902 fold_convert_loc (loc
, type
, t2
));
905 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
906 for use in int_const_binop, size_binop and size_diffop. */
909 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
911 if (TREE_CODE (type1
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type1
))
913 if (TREE_CODE (type2
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type2
))
928 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
929 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
930 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
934 /* Combine two integer constants ARG1 and ARG2 under operation CODE
935 to produce a new constant. Return NULL_TREE if we don't know how
936 to evaluate CODE at compile-time. */
939 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
941 double_int op1
, op2
, res
, tmp
;
943 tree type
= TREE_TYPE (arg1
);
944 bool uns
= TYPE_UNSIGNED (type
);
946 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
947 bool overflow
= false;
949 op1
= tree_to_double_int (arg1
);
950 op2
= tree_to_double_int (arg2
);
955 res
= double_int_ior (op1
, op2
);
959 res
= double_int_xor (op1
, op2
);
963 res
= double_int_and (op1
, op2
);
967 res
= double_int_rshift (op1
, double_int_to_shwi (op2
),
968 TYPE_PRECISION (type
), !uns
);
972 /* It's unclear from the C standard whether shifts can overflow.
973 The following code ignores overflow; perhaps a C standard
974 interpretation ruling is needed. */
975 res
= double_int_lshift (op1
, double_int_to_shwi (op2
),
976 TYPE_PRECISION (type
), !uns
);
980 res
= double_int_rrotate (op1
, double_int_to_shwi (op2
),
981 TYPE_PRECISION (type
));
985 res
= double_int_lrotate (op1
, double_int_to_shwi (op2
),
986 TYPE_PRECISION (type
));
990 overflow
= add_double (op1
.low
, op1
.high
, op2
.low
, op2
.high
,
991 &res
.low
, &res
.high
);
995 neg_double (op2
.low
, op2
.high
, &res
.low
, &res
.high
);
996 add_double (op1
.low
, op1
.high
, res
.low
, res
.high
,
997 &res
.low
, &res
.high
);
998 overflow
= OVERFLOW_SUM_SIGN (res
.high
, op2
.high
, op1
.high
);
1002 overflow
= mul_double (op1
.low
, op1
.high
, op2
.low
, op2
.high
,
1003 &res
.low
, &res
.high
);
1006 case TRUNC_DIV_EXPR
:
1007 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1008 case EXACT_DIV_EXPR
:
1009 /* This is a shortcut for a common special case. */
1010 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1011 && !TREE_OVERFLOW (arg1
)
1012 && !TREE_OVERFLOW (arg2
)
1013 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1015 if (code
== CEIL_DIV_EXPR
)
1016 op1
.low
+= op2
.low
- 1;
1018 res
.low
= op1
.low
/ op2
.low
, res
.high
= 0;
1022 /* ... fall through ... */
1024 case ROUND_DIV_EXPR
:
1025 if (double_int_zero_p (op2
))
1027 if (double_int_one_p (op2
))
1032 if (double_int_equal_p (op1
, op2
)
1033 && ! double_int_zero_p (op1
))
1035 res
= double_int_one
;
1038 overflow
= div_and_round_double (code
, uns
,
1039 op1
.low
, op1
.high
, op2
.low
, op2
.high
,
1040 &res
.low
, &res
.high
,
1041 &tmp
.low
, &tmp
.high
);
1044 case TRUNC_MOD_EXPR
:
1045 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1046 /* This is a shortcut for a common special case. */
1047 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1048 && !TREE_OVERFLOW (arg1
)
1049 && !TREE_OVERFLOW (arg2
)
1050 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1052 if (code
== CEIL_MOD_EXPR
)
1053 op1
.low
+= op2
.low
- 1;
1054 res
.low
= op1
.low
% op2
.low
, res
.high
= 0;
1058 /* ... fall through ... */
1060 case ROUND_MOD_EXPR
:
1061 if (double_int_zero_p (op2
))
1063 overflow
= div_and_round_double (code
, uns
,
1064 op1
.low
, op1
.high
, op2
.low
, op2
.high
,
1065 &tmp
.low
, &tmp
.high
,
1066 &res
.low
, &res
.high
);
1070 res
= double_int_min (op1
, op2
, uns
);
1074 res
= double_int_max (op1
, op2
, uns
);
1081 t
= force_fit_type_double (TREE_TYPE (arg1
), res
, 1,
1082 ((!uns
|| is_sizetype
) && overflow
)
1083 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1088 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1089 constant. We assume ARG1 and ARG2 have the same data type, or at least
1090 are the same kind of constant and the same machine mode. Return zero if
1091 combining the constants is not allowed in the current operating mode. */
1094 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1096 /* Sanity check for the recursive cases. */
1103 if (TREE_CODE (arg1
) == INTEGER_CST
)
1104 return int_const_binop (code
, arg1
, arg2
);
1106 if (TREE_CODE (arg1
) == REAL_CST
)
1108 enum machine_mode mode
;
1111 REAL_VALUE_TYPE value
;
1112 REAL_VALUE_TYPE result
;
1116 /* The following codes are handled by real_arithmetic. */
1131 d1
= TREE_REAL_CST (arg1
);
1132 d2
= TREE_REAL_CST (arg2
);
1134 type
= TREE_TYPE (arg1
);
1135 mode
= TYPE_MODE (type
);
1137 /* Don't perform operation if we honor signaling NaNs and
1138 either operand is a NaN. */
1139 if (HONOR_SNANS (mode
)
1140 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1143 /* Don't perform operation if it would raise a division
1144 by zero exception. */
1145 if (code
== RDIV_EXPR
1146 && REAL_VALUES_EQUAL (d2
, dconst0
)
1147 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1150 /* If either operand is a NaN, just return it. Otherwise, set up
1151 for floating-point trap; we return an overflow. */
1152 if (REAL_VALUE_ISNAN (d1
))
1154 else if (REAL_VALUE_ISNAN (d2
))
1157 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1158 real_convert (&result
, mode
, &value
);
1160 /* Don't constant fold this floating point operation if
1161 the result has overflowed and flag_trapping_math. */
1162 if (flag_trapping_math
1163 && MODE_HAS_INFINITIES (mode
)
1164 && REAL_VALUE_ISINF (result
)
1165 && !REAL_VALUE_ISINF (d1
)
1166 && !REAL_VALUE_ISINF (d2
))
1169 /* Don't constant fold this floating point operation if the
1170 result may dependent upon the run-time rounding mode and
1171 flag_rounding_math is set, or if GCC's software emulation
1172 is unable to accurately represent the result. */
1173 if ((flag_rounding_math
1174 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1175 && (inexact
|| !real_identical (&result
, &value
)))
1178 t
= build_real (type
, result
);
1180 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1184 if (TREE_CODE (arg1
) == FIXED_CST
)
1186 FIXED_VALUE_TYPE f1
;
1187 FIXED_VALUE_TYPE f2
;
1188 FIXED_VALUE_TYPE result
;
1193 /* The following codes are handled by fixed_arithmetic. */
1199 case TRUNC_DIV_EXPR
:
1200 f2
= TREE_FIXED_CST (arg2
);
1205 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1206 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1214 f1
= TREE_FIXED_CST (arg1
);
1215 type
= TREE_TYPE (arg1
);
1216 sat_p
= TYPE_SATURATING (type
);
1217 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1218 t
= build_fixed (type
, result
);
1219 /* Propagate overflow flags. */
1220 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1221 TREE_OVERFLOW (t
) = 1;
1225 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1227 tree type
= TREE_TYPE (arg1
);
1228 tree r1
= TREE_REALPART (arg1
);
1229 tree i1
= TREE_IMAGPART (arg1
);
1230 tree r2
= TREE_REALPART (arg2
);
1231 tree i2
= TREE_IMAGPART (arg2
);
1238 real
= const_binop (code
, r1
, r2
);
1239 imag
= const_binop (code
, i1
, i2
);
1243 if (COMPLEX_FLOAT_TYPE_P (type
))
1244 return do_mpc_arg2 (arg1
, arg2
, type
,
1245 /* do_nonfinite= */ folding_initializer
,
1248 real
= const_binop (MINUS_EXPR
,
1249 const_binop (MULT_EXPR
, r1
, r2
),
1250 const_binop (MULT_EXPR
, i1
, i2
));
1251 imag
= const_binop (PLUS_EXPR
,
1252 const_binop (MULT_EXPR
, r1
, i2
),
1253 const_binop (MULT_EXPR
, i1
, r2
));
1257 if (COMPLEX_FLOAT_TYPE_P (type
))
1258 return do_mpc_arg2 (arg1
, arg2
, type
,
1259 /* do_nonfinite= */ folding_initializer
,
1262 case TRUNC_DIV_EXPR
:
1264 case FLOOR_DIV_EXPR
:
1265 case ROUND_DIV_EXPR
:
1266 if (flag_complex_method
== 0)
1268 /* Keep this algorithm in sync with
1269 tree-complex.c:expand_complex_div_straight().
1271 Expand complex division to scalars, straightforward algorithm.
1272 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1276 = const_binop (PLUS_EXPR
,
1277 const_binop (MULT_EXPR
, r2
, r2
),
1278 const_binop (MULT_EXPR
, i2
, i2
));
1280 = const_binop (PLUS_EXPR
,
1281 const_binop (MULT_EXPR
, r1
, r2
),
1282 const_binop (MULT_EXPR
, i1
, i2
));
1284 = const_binop (MINUS_EXPR
,
1285 const_binop (MULT_EXPR
, i1
, r2
),
1286 const_binop (MULT_EXPR
, r1
, i2
));
1288 real
= const_binop (code
, t1
, magsquared
);
1289 imag
= const_binop (code
, t2
, magsquared
);
1293 /* Keep this algorithm in sync with
1294 tree-complex.c:expand_complex_div_wide().
1296 Expand complex division to scalars, modified algorithm to minimize
1297 overflow with wide input ranges. */
1298 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1299 fold_abs_const (r2
, TREE_TYPE (type
)),
1300 fold_abs_const (i2
, TREE_TYPE (type
)));
1302 if (integer_nonzerop (compare
))
1304 /* In the TRUE branch, we compute
1306 div = (br * ratio) + bi;
1307 tr = (ar * ratio) + ai;
1308 ti = (ai * ratio) - ar;
1311 tree ratio
= const_binop (code
, r2
, i2
);
1312 tree div
= const_binop (PLUS_EXPR
, i2
,
1313 const_binop (MULT_EXPR
, r2
, ratio
));
1314 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1315 real
= const_binop (PLUS_EXPR
, real
, i1
);
1316 real
= const_binop (code
, real
, div
);
1318 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1319 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1320 imag
= const_binop (code
, imag
, div
);
1324 /* In the FALSE branch, we compute
1326 divisor = (d * ratio) + c;
1327 tr = (b * ratio) + a;
1328 ti = b - (a * ratio);
1331 tree ratio
= const_binop (code
, i2
, r2
);
1332 tree div
= const_binop (PLUS_EXPR
, r2
,
1333 const_binop (MULT_EXPR
, i2
, ratio
));
1335 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1336 real
= const_binop (PLUS_EXPR
, real
, r1
);
1337 real
= const_binop (code
, real
, div
);
1339 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1340 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1341 imag
= const_binop (code
, imag
, div
);
1351 return build_complex (type
, real
, imag
);
1354 if (TREE_CODE (arg1
) == VECTOR_CST
1355 && TREE_CODE (arg2
) == VECTOR_CST
)
1357 tree type
= TREE_TYPE(arg1
);
1358 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1359 tree
*elts
= XALLOCAVEC (tree
, count
);
1361 for (i
= 0; i
< count
; i
++)
1363 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1364 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1366 elts
[i
] = const_binop (code
, elem1
, elem2
);
1368 /* It is possible that const_binop cannot handle the given
1369 code and return NULL_TREE */
1370 if(elts
[i
] == NULL_TREE
)
1374 return build_vector (type
, elts
);
1379 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1380 indicates which particular sizetype to create. */
1383 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1385 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1388 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1389 is a tree code. The type of the result is taken from the operands.
1390 Both must be equivalent integer types, ala int_binop_types_match_p.
1391 If the operands are constant, so is the result. */
1394 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1396 tree type
= TREE_TYPE (arg0
);
1398 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1399 return error_mark_node
;
1401 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1404 /* Handle the special case of two integer constants faster. */
1405 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1407 /* And some specific cases even faster than that. */
1408 if (code
== PLUS_EXPR
)
1410 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1412 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1415 else if (code
== MINUS_EXPR
)
1417 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1420 else if (code
== MULT_EXPR
)
1422 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1426 /* Handle general case of two integer constants. */
1427 return int_const_binop (code
, arg0
, arg1
);
1430 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1433 /* Given two values, either both of sizetype or both of bitsizetype,
1434 compute the difference between the two values. Return the value
1435 in signed type corresponding to the type of the operands. */
1438 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1440 tree type
= TREE_TYPE (arg0
);
1443 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1446 /* If the type is already signed, just do the simple thing. */
1447 if (!TYPE_UNSIGNED (type
))
1448 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1450 if (type
== sizetype
)
1452 else if (type
== bitsizetype
)
1453 ctype
= sbitsizetype
;
1455 ctype
= signed_type_for (type
);
1457 /* If either operand is not a constant, do the conversions to the signed
1458 type and subtract. The hardware will do the right thing with any
1459 overflow in the subtraction. */
1460 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1461 return size_binop_loc (loc
, MINUS_EXPR
,
1462 fold_convert_loc (loc
, ctype
, arg0
),
1463 fold_convert_loc (loc
, ctype
, arg1
));
1465 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1466 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1467 overflow) and negate (which can't either). Special-case a result
1468 of zero while we're here. */
1469 if (tree_int_cst_equal (arg0
, arg1
))
1470 return build_int_cst (ctype
, 0);
1471 else if (tree_int_cst_lt (arg1
, arg0
))
1472 return fold_convert_loc (loc
, ctype
,
1473 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1475 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1476 fold_convert_loc (loc
, ctype
,
1477 size_binop_loc (loc
,
1482 /* A subroutine of fold_convert_const handling conversions of an
1483 INTEGER_CST to another integer type. */
1486 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1490 /* Given an integer constant, make new constant with new type,
1491 appropriately sign-extended or truncated. */
1492 t
= force_fit_type_double (type
, tree_to_double_int (arg1
),
1493 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1494 (TREE_INT_CST_HIGH (arg1
) < 0
1495 && (TYPE_UNSIGNED (type
)
1496 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1497 | TREE_OVERFLOW (arg1
));
1502 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1503 to an integer type. */
1506 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1511 /* The following code implements the floating point to integer
1512 conversion rules required by the Java Language Specification,
1513 that IEEE NaNs are mapped to zero and values that overflow
1514 the target precision saturate, i.e. values greater than
1515 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1516 are mapped to INT_MIN. These semantics are allowed by the
1517 C and C++ standards that simply state that the behavior of
1518 FP-to-integer conversion is unspecified upon overflow. */
1522 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1526 case FIX_TRUNC_EXPR
:
1527 real_trunc (&r
, VOIDmode
, &x
);
1534 /* If R is NaN, return zero and show we have an overflow. */
1535 if (REAL_VALUE_ISNAN (r
))
1538 val
= double_int_zero
;
1541 /* See if R is less than the lower bound or greater than the
1546 tree lt
= TYPE_MIN_VALUE (type
);
1547 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1548 if (REAL_VALUES_LESS (r
, l
))
1551 val
= tree_to_double_int (lt
);
1557 tree ut
= TYPE_MAX_VALUE (type
);
1560 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1561 if (REAL_VALUES_LESS (u
, r
))
1564 val
= tree_to_double_int (ut
);
1570 real_to_integer2 ((HOST_WIDE_INT
*) &val
.low
, &val
.high
, &r
);
1572 t
= force_fit_type_double (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1576 /* A subroutine of fold_convert_const handling conversions of a
1577 FIXED_CST to an integer type. */
1580 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1583 double_int temp
, temp_trunc
;
1586 /* Right shift FIXED_CST to temp by fbit. */
1587 temp
= TREE_FIXED_CST (arg1
).data
;
1588 mode
= TREE_FIXED_CST (arg1
).mode
;
1589 if (GET_MODE_FBIT (mode
) < 2 * HOST_BITS_PER_WIDE_INT
)
1591 temp
= double_int_rshift (temp
, GET_MODE_FBIT (mode
),
1592 HOST_BITS_PER_DOUBLE_INT
,
1593 SIGNED_FIXED_POINT_MODE_P (mode
));
1595 /* Left shift temp to temp_trunc by fbit. */
1596 temp_trunc
= double_int_lshift (temp
, GET_MODE_FBIT (mode
),
1597 HOST_BITS_PER_DOUBLE_INT
,
1598 SIGNED_FIXED_POINT_MODE_P (mode
));
1602 temp
= double_int_zero
;
1603 temp_trunc
= double_int_zero
;
1606 /* If FIXED_CST is negative, we need to round the value toward 0.
1607 By checking if the fractional bits are not zero to add 1 to temp. */
1608 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1609 && double_int_negative_p (temp_trunc
)
1610 && !double_int_equal_p (TREE_FIXED_CST (arg1
).data
, temp_trunc
))
1611 temp
= double_int_add (temp
, double_int_one
);
1613 /* Given a fixed-point constant, make new constant with new type,
1614 appropriately sign-extended or truncated. */
1615 t
= force_fit_type_double (type
, temp
, -1,
1616 (double_int_negative_p (temp
)
1617 && (TYPE_UNSIGNED (type
)
1618 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1619 | TREE_OVERFLOW (arg1
));
1624 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1625 to another floating point type. */
1628 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1630 REAL_VALUE_TYPE value
;
1633 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1634 t
= build_real (type
, value
);
1636 /* If converting an infinity or NAN to a representation that doesn't
1637 have one, set the overflow bit so that we can produce some kind of
1638 error message at the appropriate point if necessary. It's not the
1639 most user-friendly message, but it's better than nothing. */
1640 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1641 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1642 TREE_OVERFLOW (t
) = 1;
1643 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1644 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1645 TREE_OVERFLOW (t
) = 1;
1646 /* Regular overflow, conversion produced an infinity in a mode that
1647 can't represent them. */
1648 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1649 && REAL_VALUE_ISINF (value
)
1650 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1651 TREE_OVERFLOW (t
) = 1;
1653 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1657 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1658 to a floating point type. */
1661 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1663 REAL_VALUE_TYPE value
;
1666 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1667 t
= build_real (type
, value
);
1669 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1673 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1674 to another fixed-point type. */
1677 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1679 FIXED_VALUE_TYPE value
;
1683 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1684 TYPE_SATURATING (type
));
1685 t
= build_fixed (type
, value
);
1687 /* Propagate overflow flags. */
1688 if (overflow_p
| TREE_OVERFLOW (arg1
))
1689 TREE_OVERFLOW (t
) = 1;
1693 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1694 to a fixed-point type. */
1697 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1699 FIXED_VALUE_TYPE value
;
1703 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
1704 TREE_INT_CST (arg1
),
1705 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1706 TYPE_SATURATING (type
));
1707 t
= build_fixed (type
, value
);
1709 /* Propagate overflow flags. */
1710 if (overflow_p
| TREE_OVERFLOW (arg1
))
1711 TREE_OVERFLOW (t
) = 1;
1715 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1716 to a fixed-point type. */
1719 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1721 FIXED_VALUE_TYPE value
;
1725 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1726 &TREE_REAL_CST (arg1
),
1727 TYPE_SATURATING (type
));
1728 t
= build_fixed (type
, value
);
1730 /* Propagate overflow flags. */
1731 if (overflow_p
| TREE_OVERFLOW (arg1
))
1732 TREE_OVERFLOW (t
) = 1;
1736 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1737 type TYPE. If no simplification can be done return NULL_TREE. */
1740 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1742 if (TREE_TYPE (arg1
) == type
)
1745 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1746 || TREE_CODE (type
) == OFFSET_TYPE
)
1748 if (TREE_CODE (arg1
) == INTEGER_CST
)
1749 return fold_convert_const_int_from_int (type
, arg1
);
1750 else if (TREE_CODE (arg1
) == REAL_CST
)
1751 return fold_convert_const_int_from_real (code
, type
, arg1
);
1752 else if (TREE_CODE (arg1
) == FIXED_CST
)
1753 return fold_convert_const_int_from_fixed (type
, arg1
);
1755 else if (TREE_CODE (type
) == REAL_TYPE
)
1757 if (TREE_CODE (arg1
) == INTEGER_CST
)
1758 return build_real_from_int_cst (type
, arg1
);
1759 else if (TREE_CODE (arg1
) == REAL_CST
)
1760 return fold_convert_const_real_from_real (type
, arg1
);
1761 else if (TREE_CODE (arg1
) == FIXED_CST
)
1762 return fold_convert_const_real_from_fixed (type
, arg1
);
1764 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1766 if (TREE_CODE (arg1
) == FIXED_CST
)
1767 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1768 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1769 return fold_convert_const_fixed_from_int (type
, arg1
);
1770 else if (TREE_CODE (arg1
) == REAL_CST
)
1771 return fold_convert_const_fixed_from_real (type
, arg1
);
1776 /* Construct a vector of zero elements of vector type TYPE. */
1779 build_zero_vector (tree type
)
1783 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1784 return build_vector_from_val (type
, t
);
1787 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1790 fold_convertible_p (const_tree type
, const_tree arg
)
1792 tree orig
= TREE_TYPE (arg
);
1797 if (TREE_CODE (arg
) == ERROR_MARK
1798 || TREE_CODE (type
) == ERROR_MARK
1799 || TREE_CODE (orig
) == ERROR_MARK
)
1802 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1805 switch (TREE_CODE (type
))
1807 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1808 case POINTER_TYPE
: case REFERENCE_TYPE
:
1810 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1811 || TREE_CODE (orig
) == OFFSET_TYPE
)
1813 return (TREE_CODE (orig
) == VECTOR_TYPE
1814 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1817 case FIXED_POINT_TYPE
:
1821 return TREE_CODE (type
) == TREE_CODE (orig
);
1828 /* Convert expression ARG to type TYPE. Used by the middle-end for
1829 simple conversions in preference to calling the front-end's convert. */
1832 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1834 tree orig
= TREE_TYPE (arg
);
1840 if (TREE_CODE (arg
) == ERROR_MARK
1841 || TREE_CODE (type
) == ERROR_MARK
1842 || TREE_CODE (orig
) == ERROR_MARK
)
1843 return error_mark_node
;
1845 switch (TREE_CODE (type
))
1848 case REFERENCE_TYPE
:
1849 /* Handle conversions between pointers to different address spaces. */
1850 if (POINTER_TYPE_P (orig
)
1851 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1852 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1853 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1856 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1858 if (TREE_CODE (arg
) == INTEGER_CST
)
1860 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1861 if (tem
!= NULL_TREE
)
1864 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1865 || TREE_CODE (orig
) == OFFSET_TYPE
)
1866 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1867 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1868 return fold_convert_loc (loc
, type
,
1869 fold_build1_loc (loc
, REALPART_EXPR
,
1870 TREE_TYPE (orig
), arg
));
1871 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1872 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1873 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1876 if (TREE_CODE (arg
) == INTEGER_CST
)
1878 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1879 if (tem
!= NULL_TREE
)
1882 else if (TREE_CODE (arg
) == REAL_CST
)
1884 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1885 if (tem
!= NULL_TREE
)
1888 else if (TREE_CODE (arg
) == FIXED_CST
)
1890 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1891 if (tem
!= NULL_TREE
)
1895 switch (TREE_CODE (orig
))
1898 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1899 case POINTER_TYPE
: case REFERENCE_TYPE
:
1900 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
1903 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1905 case FIXED_POINT_TYPE
:
1906 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1909 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1910 return fold_convert_loc (loc
, type
, tem
);
1916 case FIXED_POINT_TYPE
:
1917 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
1918 || TREE_CODE (arg
) == REAL_CST
)
1920 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1921 if (tem
!= NULL_TREE
)
1922 goto fold_convert_exit
;
1925 switch (TREE_CODE (orig
))
1927 case FIXED_POINT_TYPE
:
1932 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1935 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1936 return fold_convert_loc (loc
, type
, tem
);
1943 switch (TREE_CODE (orig
))
1946 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1947 case POINTER_TYPE
: case REFERENCE_TYPE
:
1949 case FIXED_POINT_TYPE
:
1950 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
1951 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
1952 fold_convert_loc (loc
, TREE_TYPE (type
),
1953 integer_zero_node
));
1958 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
1960 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
1961 TREE_OPERAND (arg
, 0));
1962 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
1963 TREE_OPERAND (arg
, 1));
1964 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
1967 arg
= save_expr (arg
);
1968 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1969 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
1970 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
1971 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
1972 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
1980 if (integer_zerop (arg
))
1981 return build_zero_vector (type
);
1982 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1983 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1984 || TREE_CODE (orig
) == VECTOR_TYPE
);
1985 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
1988 tem
= fold_ignored_result (arg
);
1989 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
1992 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1993 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1997 protected_set_expr_location_unshare (tem
, loc
);
2001 /* Return false if expr can be assumed not to be an lvalue, true
2005 maybe_lvalue_p (const_tree x
)
2007 /* We only need to wrap lvalue tree codes. */
2008 switch (TREE_CODE (x
))
2021 case ARRAY_RANGE_REF
:
2027 case PREINCREMENT_EXPR
:
2028 case PREDECREMENT_EXPR
:
2030 case TRY_CATCH_EXPR
:
2031 case WITH_CLEANUP_EXPR
:
2040 /* Assume the worst for front-end tree codes. */
2041 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2049 /* Return an expr equal to X but certainly not valid as an lvalue. */
2052 non_lvalue_loc (location_t loc
, tree x
)
2054 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2059 if (! maybe_lvalue_p (x
))
2061 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2064 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2065 Zero means allow extended lvalues. */
2067 int pedantic_lvalues
;
2069 /* When pedantic, return an expr equal to X but certainly not valid as a
2070 pedantic lvalue. Otherwise, return X. */
2073 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2075 if (pedantic_lvalues
)
2076 return non_lvalue_loc (loc
, x
);
2078 return protected_set_expr_location_unshare (x
, loc
);
2081 /* Given a tree comparison code, return the code that is the logical inverse.
2082 It is generally not safe to do this for floating-point comparisons, except
2083 for EQ_EXPR and NE_EXPR, so we return ERROR_MARK in this case. */
2086 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2088 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
)
2098 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2100 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2102 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2104 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2118 return UNORDERED_EXPR
;
2119 case UNORDERED_EXPR
:
2120 return ORDERED_EXPR
;
2126 /* Similar, but return the comparison that results if the operands are
2127 swapped. This is safe for floating-point. */
2130 swap_tree_comparison (enum tree_code code
)
2137 case UNORDERED_EXPR
:
2163 /* Convert a comparison tree code from an enum tree_code representation
2164 into a compcode bit-based encoding. This function is the inverse of
2165 compcode_to_comparison. */
2167 static enum comparison_code
2168 comparison_to_compcode (enum tree_code code
)
2185 return COMPCODE_ORD
;
2186 case UNORDERED_EXPR
:
2187 return COMPCODE_UNORD
;
2189 return COMPCODE_UNLT
;
2191 return COMPCODE_UNEQ
;
2193 return COMPCODE_UNLE
;
2195 return COMPCODE_UNGT
;
2197 return COMPCODE_LTGT
;
2199 return COMPCODE_UNGE
;
2205 /* Convert a compcode bit-based encoding of a comparison operator back
2206 to GCC's enum tree_code representation. This function is the
2207 inverse of comparison_to_compcode. */
2209 static enum tree_code
2210 compcode_to_comparison (enum comparison_code code
)
2227 return ORDERED_EXPR
;
2228 case COMPCODE_UNORD
:
2229 return UNORDERED_EXPR
;
2247 /* Return a tree for the comparison which is the combination of
2248 doing the AND or OR (depending on CODE) of the two operations LCODE
2249 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2250 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2251 if this makes the transformation invalid. */
2254 combine_comparisons (location_t loc
,
2255 enum tree_code code
, enum tree_code lcode
,
2256 enum tree_code rcode
, tree truth_type
,
2257 tree ll_arg
, tree lr_arg
)
2259 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2260 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2261 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2266 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2267 compcode
= lcompcode
& rcompcode
;
2270 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2271 compcode
= lcompcode
| rcompcode
;
2280 /* Eliminate unordered comparisons, as well as LTGT and ORD
2281 which are not used unless the mode has NaNs. */
2282 compcode
&= ~COMPCODE_UNORD
;
2283 if (compcode
== COMPCODE_LTGT
)
2284 compcode
= COMPCODE_NE
;
2285 else if (compcode
== COMPCODE_ORD
)
2286 compcode
= COMPCODE_TRUE
;
2288 else if (flag_trapping_math
)
2290 /* Check that the original operation and the optimized ones will trap
2291 under the same condition. */
2292 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2293 && (lcompcode
!= COMPCODE_EQ
)
2294 && (lcompcode
!= COMPCODE_ORD
);
2295 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2296 && (rcompcode
!= COMPCODE_EQ
)
2297 && (rcompcode
!= COMPCODE_ORD
);
2298 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2299 && (compcode
!= COMPCODE_EQ
)
2300 && (compcode
!= COMPCODE_ORD
);
2302 /* In a short-circuited boolean expression the LHS might be
2303 such that the RHS, if evaluated, will never trap. For
2304 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2305 if neither x nor y is NaN. (This is a mixed blessing: for
2306 example, the expression above will never trap, hence
2307 optimizing it to x < y would be invalid). */
2308 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2309 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2312 /* If the comparison was short-circuited, and only the RHS
2313 trapped, we may now generate a spurious trap. */
2315 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2318 /* If we changed the conditions that cause a trap, we lose. */
2319 if ((ltrap
|| rtrap
) != trap
)
2323 if (compcode
== COMPCODE_TRUE
)
2324 return constant_boolean_node (true, truth_type
);
2325 else if (compcode
== COMPCODE_FALSE
)
2326 return constant_boolean_node (false, truth_type
);
2329 enum tree_code tcode
;
2331 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2332 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2336 /* Return nonzero if two operands (typically of the same tree node)
2337 are necessarily equal. If either argument has side-effects this
2338 function returns zero. FLAGS modifies behavior as follows:
2340 If OEP_ONLY_CONST is set, only return nonzero for constants.
2341 This function tests whether the operands are indistinguishable;
2342 it does not test whether they are equal using C's == operation.
2343 The distinction is important for IEEE floating point, because
2344 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2345 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2347 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2348 even though it may hold multiple values during a function.
2349 This is because a GCC tree node guarantees that nothing else is
2350 executed between the evaluation of its "operands" (which may often
2351 be evaluated in arbitrary order). Hence if the operands themselves
2352 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2353 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2354 unset means assuming isochronic (or instantaneous) tree equivalence.
2355 Unless comparing arbitrary expression trees, such as from different
2356 statements, this flag can usually be left unset.
2358 If OEP_PURE_SAME is set, then pure functions with identical arguments
2359 are considered the same. It is used when the caller has other ways
2360 to ensure that global memory is unchanged in between. */
2363 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2365 /* If either is ERROR_MARK, they aren't equal. */
2366 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2367 || TREE_TYPE (arg0
) == error_mark_node
2368 || TREE_TYPE (arg1
) == error_mark_node
)
2371 /* Similar, if either does not have a type (like a released SSA name),
2372 they aren't equal. */
2373 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2376 /* Check equality of integer constants before bailing out due to
2377 precision differences. */
2378 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2379 return tree_int_cst_equal (arg0
, arg1
);
2381 /* If both types don't have the same signedness, then we can't consider
2382 them equal. We must check this before the STRIP_NOPS calls
2383 because they may change the signedness of the arguments. As pointers
2384 strictly don't have a signedness, require either two pointers or
2385 two non-pointers as well. */
2386 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2387 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2390 /* We cannot consider pointers to different address space equal. */
2391 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2392 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2393 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2396 /* If both types don't have the same precision, then it is not safe
2398 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
2404 /* In case both args are comparisons but with different comparison
2405 code, try to swap the comparison operands of one arg to produce
2406 a match and compare that variant. */
2407 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2408 && COMPARISON_CLASS_P (arg0
)
2409 && COMPARISON_CLASS_P (arg1
))
2411 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2413 if (TREE_CODE (arg0
) == swap_code
)
2414 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2415 TREE_OPERAND (arg1
, 1), flags
)
2416 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2417 TREE_OPERAND (arg1
, 0), flags
);
2420 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2421 /* This is needed for conversions and for COMPONENT_REF.
2422 Might as well play it safe and always test this. */
2423 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2424 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2425 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2428 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2429 We don't care about side effects in that case because the SAVE_EXPR
2430 takes care of that for us. In all other cases, two expressions are
2431 equal if they have no side effects. If we have two identical
2432 expressions with side effects that should be treated the same due
2433 to the only side effects being identical SAVE_EXPR's, that will
2434 be detected in the recursive calls below.
2435 If we are taking an invariant address of two identical objects
2436 they are necessarily equal as well. */
2437 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2438 && (TREE_CODE (arg0
) == SAVE_EXPR
2439 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2440 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2443 /* Next handle constant cases, those for which we can return 1 even
2444 if ONLY_CONST is set. */
2445 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2446 switch (TREE_CODE (arg0
))
2449 return tree_int_cst_equal (arg0
, arg1
);
2452 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2453 TREE_FIXED_CST (arg1
));
2456 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2457 TREE_REAL_CST (arg1
)))
2461 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2463 /* If we do not distinguish between signed and unsigned zero,
2464 consider them equal. */
2465 if (real_zerop (arg0
) && real_zerop (arg1
))
2474 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2477 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2479 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2480 VECTOR_CST_ELT (arg1
, i
), flags
))
2487 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2489 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2493 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2494 && ! memcmp (TREE_STRING_POINTER (arg0
),
2495 TREE_STRING_POINTER (arg1
),
2496 TREE_STRING_LENGTH (arg0
)));
2499 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2500 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2501 ? OEP_CONSTANT_ADDRESS_OF
: 0);
2506 if (flags
& OEP_ONLY_CONST
)
2509 /* Define macros to test an operand from arg0 and arg1 for equality and a
2510 variant that allows null and views null as being different from any
2511 non-null value. In the latter case, if either is null, the both
2512 must be; otherwise, do the normal comparison. */
2513 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2514 TREE_OPERAND (arg1, N), flags)
2516 #define OP_SAME_WITH_NULL(N) \
2517 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2518 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2520 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2523 /* Two conversions are equal only if signedness and modes match. */
2524 switch (TREE_CODE (arg0
))
2527 case FIX_TRUNC_EXPR
:
2528 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2529 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2539 case tcc_comparison
:
2541 if (OP_SAME (0) && OP_SAME (1))
2544 /* For commutative ops, allow the other order. */
2545 return (commutative_tree_code (TREE_CODE (arg0
))
2546 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2547 TREE_OPERAND (arg1
, 1), flags
)
2548 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2549 TREE_OPERAND (arg1
, 0), flags
));
2552 /* If either of the pointer (or reference) expressions we are
2553 dereferencing contain a side effect, these cannot be equal. */
2554 if (TREE_SIDE_EFFECTS (arg0
)
2555 || TREE_SIDE_EFFECTS (arg1
))
2558 switch (TREE_CODE (arg0
))
2565 case TARGET_MEM_REF
:
2566 /* Require equal extra operands and then fall thru to MEM_REF
2567 handling of the two common operands. */
2568 if (!OP_SAME_WITH_NULL (2)
2569 || !OP_SAME_WITH_NULL (3)
2570 || !OP_SAME_WITH_NULL (4))
2574 /* Require equal access sizes, and similar pointer types.
2575 We can have incomplete types for array references of
2576 variable-sized arrays from the Fortran frontent
2578 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2579 || (TYPE_SIZE (TREE_TYPE (arg0
))
2580 && TYPE_SIZE (TREE_TYPE (arg1
))
2581 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2582 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2583 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
2584 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1
, 1))))
2585 && OP_SAME (0) && OP_SAME (1));
2588 case ARRAY_RANGE_REF
:
2589 /* Operands 2 and 3 may be null.
2590 Compare the array index by value if it is constant first as we
2591 may have different types but same value here. */
2593 && (tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2594 TREE_OPERAND (arg1
, 1))
2596 && OP_SAME_WITH_NULL (2)
2597 && OP_SAME_WITH_NULL (3));
2600 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2601 may be NULL when we're called to compare MEM_EXPRs. */
2602 return OP_SAME_WITH_NULL (0)
2604 && OP_SAME_WITH_NULL (2);
2607 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2613 case tcc_expression
:
2614 switch (TREE_CODE (arg0
))
2617 case TRUTH_NOT_EXPR
:
2620 case TRUTH_ANDIF_EXPR
:
2621 case TRUTH_ORIF_EXPR
:
2622 return OP_SAME (0) && OP_SAME (1);
2625 case WIDEN_MULT_PLUS_EXPR
:
2626 case WIDEN_MULT_MINUS_EXPR
:
2629 /* The multiplcation operands are commutative. */
2632 case TRUTH_AND_EXPR
:
2634 case TRUTH_XOR_EXPR
:
2635 if (OP_SAME (0) && OP_SAME (1))
2638 /* Otherwise take into account this is a commutative operation. */
2639 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2640 TREE_OPERAND (arg1
, 1), flags
)
2641 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2642 TREE_OPERAND (arg1
, 0), flags
));
2647 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2654 switch (TREE_CODE (arg0
))
2657 /* If the CALL_EXPRs call different functions, then they
2658 clearly can not be equal. */
2659 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2664 unsigned int cef
= call_expr_flags (arg0
);
2665 if (flags
& OEP_PURE_SAME
)
2666 cef
&= ECF_CONST
| ECF_PURE
;
2673 /* Now see if all the arguments are the same. */
2675 const_call_expr_arg_iterator iter0
, iter1
;
2677 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2678 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2680 a0
= next_const_call_expr_arg (&iter0
),
2681 a1
= next_const_call_expr_arg (&iter1
))
2682 if (! operand_equal_p (a0
, a1
, flags
))
2685 /* If we get here and both argument lists are exhausted
2686 then the CALL_EXPRs are equal. */
2687 return ! (a0
|| a1
);
2693 case tcc_declaration
:
2694 /* Consider __builtin_sqrt equal to sqrt. */
2695 return (TREE_CODE (arg0
) == FUNCTION_DECL
2696 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2697 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2698 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2705 #undef OP_SAME_WITH_NULL
2708 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2709 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2711 When in doubt, return 0. */
2714 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2716 int unsignedp1
, unsignedpo
;
2717 tree primarg0
, primarg1
, primother
;
2718 unsigned int correct_width
;
2720 if (operand_equal_p (arg0
, arg1
, 0))
2723 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2724 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2727 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2728 and see if the inner values are the same. This removes any
2729 signedness comparison, which doesn't matter here. */
2730 primarg0
= arg0
, primarg1
= arg1
;
2731 STRIP_NOPS (primarg0
);
2732 STRIP_NOPS (primarg1
);
2733 if (operand_equal_p (primarg0
, primarg1
, 0))
2736 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2737 actual comparison operand, ARG0.
2739 First throw away any conversions to wider types
2740 already present in the operands. */
2742 primarg1
= get_narrower (arg1
, &unsignedp1
);
2743 primother
= get_narrower (other
, &unsignedpo
);
2745 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2746 if (unsignedp1
== unsignedpo
2747 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2748 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2750 tree type
= TREE_TYPE (arg0
);
2752 /* Make sure shorter operand is extended the right way
2753 to match the longer operand. */
2754 primarg1
= fold_convert (signed_or_unsigned_type_for
2755 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2757 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2764 /* See if ARG is an expression that is either a comparison or is performing
2765 arithmetic on comparisons. The comparisons must only be comparing
2766 two different values, which will be stored in *CVAL1 and *CVAL2; if
2767 they are nonzero it means that some operands have already been found.
2768 No variables may be used anywhere else in the expression except in the
2769 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2770 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2772 If this is true, return 1. Otherwise, return zero. */
2775 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2777 enum tree_code code
= TREE_CODE (arg
);
2778 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2780 /* We can handle some of the tcc_expression cases here. */
2781 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2783 else if (tclass
== tcc_expression
2784 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2785 || code
== COMPOUND_EXPR
))
2786 tclass
= tcc_binary
;
2788 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2789 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2791 /* If we've already found a CVAL1 or CVAL2, this expression is
2792 two complex to handle. */
2793 if (*cval1
|| *cval2
)
2803 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2806 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2807 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2808 cval1
, cval2
, save_p
));
2813 case tcc_expression
:
2814 if (code
== COND_EXPR
)
2815 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2816 cval1
, cval2
, save_p
)
2817 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2818 cval1
, cval2
, save_p
)
2819 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2820 cval1
, cval2
, save_p
));
2823 case tcc_comparison
:
2824 /* First see if we can handle the first operand, then the second. For
2825 the second operand, we know *CVAL1 can't be zero. It must be that
2826 one side of the comparison is each of the values; test for the
2827 case where this isn't true by failing if the two operands
2830 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2831 TREE_OPERAND (arg
, 1), 0))
2835 *cval1
= TREE_OPERAND (arg
, 0);
2836 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2838 else if (*cval2
== 0)
2839 *cval2
= TREE_OPERAND (arg
, 0);
2840 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2845 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2847 else if (*cval2
== 0)
2848 *cval2
= TREE_OPERAND (arg
, 1);
2849 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2861 /* ARG is a tree that is known to contain just arithmetic operations and
2862 comparisons. Evaluate the operations in the tree substituting NEW0 for
2863 any occurrence of OLD0 as an operand of a comparison and likewise for
2867 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
2868 tree old1
, tree new1
)
2870 tree type
= TREE_TYPE (arg
);
2871 enum tree_code code
= TREE_CODE (arg
);
2872 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2874 /* We can handle some of the tcc_expression cases here. */
2875 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2877 else if (tclass
== tcc_expression
2878 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2879 tclass
= tcc_binary
;
2884 return fold_build1_loc (loc
, code
, type
,
2885 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2886 old0
, new0
, old1
, new1
));
2889 return fold_build2_loc (loc
, code
, type
,
2890 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2891 old0
, new0
, old1
, new1
),
2892 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2893 old0
, new0
, old1
, new1
));
2895 case tcc_expression
:
2899 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
2903 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
2907 return fold_build3_loc (loc
, code
, type
,
2908 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2909 old0
, new0
, old1
, new1
),
2910 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2911 old0
, new0
, old1
, new1
),
2912 eval_subst (loc
, TREE_OPERAND (arg
, 2),
2913 old0
, new0
, old1
, new1
));
2917 /* Fall through - ??? */
2919 case tcc_comparison
:
2921 tree arg0
= TREE_OPERAND (arg
, 0);
2922 tree arg1
= TREE_OPERAND (arg
, 1);
2924 /* We need to check both for exact equality and tree equality. The
2925 former will be true if the operand has a side-effect. In that
2926 case, we know the operand occurred exactly once. */
2928 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2930 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2933 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2935 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2938 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
2946 /* Return a tree for the case when the result of an expression is RESULT
2947 converted to TYPE and OMITTED was previously an operand of the expression
2948 but is now not needed (e.g., we folded OMITTED * 0).
2950 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2951 the conversion of RESULT to TYPE. */
2954 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
2956 tree t
= fold_convert_loc (loc
, type
, result
);
2958 /* If the resulting operand is an empty statement, just return the omitted
2959 statement casted to void. */
2960 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
2961 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
2962 fold_ignored_result (omitted
));
2964 if (TREE_SIDE_EFFECTS (omitted
))
2965 return build2_loc (loc
, COMPOUND_EXPR
, type
,
2966 fold_ignored_result (omitted
), t
);
2968 return non_lvalue_loc (loc
, t
);
2971 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2974 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
2977 tree t
= fold_convert_loc (loc
, type
, result
);
2979 /* If the resulting operand is an empty statement, just return the omitted
2980 statement casted to void. */
2981 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
2982 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
2983 fold_ignored_result (omitted
));
2985 if (TREE_SIDE_EFFECTS (omitted
))
2986 return build2_loc (loc
, COMPOUND_EXPR
, type
,
2987 fold_ignored_result (omitted
), t
);
2989 return pedantic_non_lvalue_loc (loc
, t
);
2992 /* Return a tree for the case when the result of an expression is RESULT
2993 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2994 of the expression but are now not needed.
2996 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2997 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2998 evaluated before OMITTED2. Otherwise, if neither has side effects,
2999 just do the conversion of RESULT to TYPE. */
3002 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3003 tree omitted1
, tree omitted2
)
3005 tree t
= fold_convert_loc (loc
, type
, result
);
3007 if (TREE_SIDE_EFFECTS (omitted2
))
3008 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3009 if (TREE_SIDE_EFFECTS (omitted1
))
3010 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3012 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3016 /* Return a simplified tree node for the truth-negation of ARG. This
3017 never alters ARG itself. We assume that ARG is an operation that
3018 returns a truth value (0 or 1).
3020 FIXME: one would think we would fold the result, but it causes
3021 problems with the dominator optimizer. */
3024 fold_truth_not_expr (location_t loc
, tree arg
)
3026 tree type
= TREE_TYPE (arg
);
3027 enum tree_code code
= TREE_CODE (arg
);
3028 location_t loc1
, loc2
;
3030 /* If this is a comparison, we can simply invert it, except for
3031 floating-point non-equality comparisons, in which case we just
3032 enclose a TRUTH_NOT_EXPR around what we have. */
3034 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3036 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3037 if (FLOAT_TYPE_P (op_type
)
3038 && flag_trapping_math
3039 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3040 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3043 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3044 if (code
== ERROR_MARK
)
3047 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3048 TREE_OPERAND (arg
, 1));
3054 return constant_boolean_node (integer_zerop (arg
), type
);
3056 case TRUTH_AND_EXPR
:
3057 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3058 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3059 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3060 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3061 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3064 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3065 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3066 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3067 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3068 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3070 case TRUTH_XOR_EXPR
:
3071 /* Here we can invert either operand. We invert the first operand
3072 unless the second operand is a TRUTH_NOT_EXPR in which case our
3073 result is the XOR of the first operand with the inside of the
3074 negation of the second operand. */
3076 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3077 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3078 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3080 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3081 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3082 TREE_OPERAND (arg
, 1));
3084 case TRUTH_ANDIF_EXPR
:
3085 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3086 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3087 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3088 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3089 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3091 case TRUTH_ORIF_EXPR
:
3092 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3093 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3094 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3095 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3096 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3098 case TRUTH_NOT_EXPR
:
3099 return TREE_OPERAND (arg
, 0);
3103 tree arg1
= TREE_OPERAND (arg
, 1);
3104 tree arg2
= TREE_OPERAND (arg
, 2);
3106 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3107 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3109 /* A COND_EXPR may have a throw as one operand, which
3110 then has void type. Just leave void operands
3112 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3113 VOID_TYPE_P (TREE_TYPE (arg1
))
3114 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3115 VOID_TYPE_P (TREE_TYPE (arg2
))
3116 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3120 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3121 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3122 TREE_OPERAND (arg
, 0),
3123 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3125 case NON_LVALUE_EXPR
:
3126 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3127 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3130 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3131 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3133 /* ... fall through ... */
3136 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3137 return build1_loc (loc
, TREE_CODE (arg
), type
,
3138 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3141 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3143 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3146 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3148 case CLEANUP_POINT_EXPR
:
3149 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3150 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3151 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3158 /* Return a simplified tree node for the truth-negation of ARG. This
3159 never alters ARG itself. We assume that ARG is an operation that
3160 returns a truth value (0 or 1).
3162 FIXME: one would think we would fold the result, but it causes
3163 problems with the dominator optimizer. */
3166 invert_truthvalue_loc (location_t loc
, tree arg
)
3170 if (TREE_CODE (arg
) == ERROR_MARK
)
3173 tem
= fold_truth_not_expr (loc
, arg
);
3175 tem
= build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
3180 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3181 operands are another bit-wise operation with a common input. If so,
3182 distribute the bit operations to save an operation and possibly two if
3183 constants are involved. For example, convert
3184 (A | B) & (A | C) into A | (B & C)
3185 Further simplification will occur if B and C are constants.
3187 If this optimization cannot be done, 0 will be returned. */
3190 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3191 tree arg0
, tree arg1
)
3196 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3197 || TREE_CODE (arg0
) == code
3198 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3199 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3202 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3204 common
= TREE_OPERAND (arg0
, 0);
3205 left
= TREE_OPERAND (arg0
, 1);
3206 right
= TREE_OPERAND (arg1
, 1);
3208 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3210 common
= TREE_OPERAND (arg0
, 0);
3211 left
= TREE_OPERAND (arg0
, 1);
3212 right
= TREE_OPERAND (arg1
, 0);
3214 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3216 common
= TREE_OPERAND (arg0
, 1);
3217 left
= TREE_OPERAND (arg0
, 0);
3218 right
= TREE_OPERAND (arg1
, 1);
3220 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3222 common
= TREE_OPERAND (arg0
, 1);
3223 left
= TREE_OPERAND (arg0
, 0);
3224 right
= TREE_OPERAND (arg1
, 0);
3229 common
= fold_convert_loc (loc
, type
, common
);
3230 left
= fold_convert_loc (loc
, type
, left
);
3231 right
= fold_convert_loc (loc
, type
, right
);
3232 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3233 fold_build2_loc (loc
, code
, type
, left
, right
));
3236 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3237 with code CODE. This optimization is unsafe. */
3239 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3240 tree arg0
, tree arg1
)
3242 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3243 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3245 /* (A / C) +- (B / C) -> (A +- B) / C. */
3247 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3248 TREE_OPERAND (arg1
, 1), 0))
3249 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3250 fold_build2_loc (loc
, code
, type
,
3251 TREE_OPERAND (arg0
, 0),
3252 TREE_OPERAND (arg1
, 0)),
3253 TREE_OPERAND (arg0
, 1));
3255 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3256 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3257 TREE_OPERAND (arg1
, 0), 0)
3258 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3259 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3261 REAL_VALUE_TYPE r0
, r1
;
3262 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3263 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3265 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3267 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3268 real_arithmetic (&r0
, code
, &r0
, &r1
);
3269 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3270 TREE_OPERAND (arg0
, 0),
3271 build_real (type
, r0
));
3277 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3278 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3281 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3282 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3284 tree result
, bftype
;
3288 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3289 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3290 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3291 && host_integerp (size
, 0)
3292 && tree_low_cst (size
, 0) == bitsize
)
3293 return fold_convert_loc (loc
, type
, inner
);
3297 if (TYPE_PRECISION (bftype
) != bitsize
3298 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3299 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3301 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3302 size_int (bitsize
), bitsize_int (bitpos
));
3305 result
= fold_convert_loc (loc
, type
, result
);
3310 /* Optimize a bit-field compare.
3312 There are two cases: First is a compare against a constant and the
3313 second is a comparison of two items where the fields are at the same
3314 bit position relative to the start of a chunk (byte, halfword, word)
3315 large enough to contain it. In these cases we can avoid the shift
3316 implicit in bitfield extractions.
3318 For constants, we emit a compare of the shifted constant with the
3319 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3320 compared. For two fields at the same position, we do the ANDs with the
3321 similar mask and compare the result of the ANDs.
3323 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3324 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3325 are the left and right operands of the comparison, respectively.
3327 If the optimization described above can be done, we return the resulting
3328 tree. Otherwise we return zero. */
3331 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3332 tree compare_type
, tree lhs
, tree rhs
)
3334 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3335 tree type
= TREE_TYPE (lhs
);
3336 tree signed_type
, unsigned_type
;
3337 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3338 enum machine_mode lmode
, rmode
, nmode
;
3339 int lunsignedp
, runsignedp
;
3340 int lvolatilep
= 0, rvolatilep
= 0;
3341 tree linner
, rinner
= NULL_TREE
;
3345 /* Get all the information about the extractions being done. If the bit size
3346 if the same as the size of the underlying object, we aren't doing an
3347 extraction at all and so can do nothing. We also don't want to
3348 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3349 then will no longer be able to replace it. */
3350 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3351 &lunsignedp
, &lvolatilep
, false);
3352 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3353 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3358 /* If this is not a constant, we can only do something if bit positions,
3359 sizes, and signedness are the same. */
3360 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3361 &runsignedp
, &rvolatilep
, false);
3363 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3364 || lunsignedp
!= runsignedp
|| offset
!= 0
3365 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3369 /* See if we can find a mode to refer to this field. We should be able to,
3370 but fail if we can't. */
3372 && GET_MODE_BITSIZE (lmode
) > 0
3373 && flag_strict_volatile_bitfields
> 0)
3376 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3377 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3378 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3379 TYPE_ALIGN (TREE_TYPE (rinner
))),
3380 word_mode
, lvolatilep
|| rvolatilep
);
3381 if (nmode
== VOIDmode
)
3384 /* Set signed and unsigned types of the precision of this mode for the
3386 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3387 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3389 /* Compute the bit position and size for the new reference and our offset
3390 within it. If the new reference is the same size as the original, we
3391 won't optimize anything, so return zero. */
3392 nbitsize
= GET_MODE_BITSIZE (nmode
);
3393 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3395 if (nbitsize
== lbitsize
)
3398 if (BYTES_BIG_ENDIAN
)
3399 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3401 /* Make the mask to be used against the extracted field. */
3402 mask
= build_int_cst_type (unsigned_type
, -1);
3403 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3404 mask
= const_binop (RSHIFT_EXPR
, mask
,
3405 size_int (nbitsize
- lbitsize
- lbitpos
));
3408 /* If not comparing with constant, just rework the comparison
3410 return fold_build2_loc (loc
, code
, compare_type
,
3411 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3412 make_bit_field_ref (loc
, linner
,
3417 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3418 make_bit_field_ref (loc
, rinner
,
3424 /* Otherwise, we are handling the constant case. See if the constant is too
3425 big for the field. Warn and return a tree of for 0 (false) if so. We do
3426 this not only for its own sake, but to avoid having to test for this
3427 error case below. If we didn't, we might generate wrong code.
3429 For unsigned fields, the constant shifted right by the field length should
3430 be all zero. For signed fields, the high-order bits should agree with
3435 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3436 fold_convert_loc (loc
,
3437 unsigned_type
, rhs
),
3438 size_int (lbitsize
))))
3440 warning (0, "comparison is always %d due to width of bit-field",
3442 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3447 tree tem
= const_binop (RSHIFT_EXPR
,
3448 fold_convert_loc (loc
, signed_type
, rhs
),
3449 size_int (lbitsize
- 1));
3450 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3452 warning (0, "comparison is always %d due to width of bit-field",
3454 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3458 /* Single-bit compares should always be against zero. */
3459 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3461 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3462 rhs
= build_int_cst (type
, 0);
3465 /* Make a new bitfield reference, shift the constant over the
3466 appropriate number of bits and mask it with the computed mask
3467 (in case this was a signed field). If we changed it, make a new one. */
3468 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3471 TREE_SIDE_EFFECTS (lhs
) = 1;
3472 TREE_THIS_VOLATILE (lhs
) = 1;
3475 rhs
= const_binop (BIT_AND_EXPR
,
3476 const_binop (LSHIFT_EXPR
,
3477 fold_convert_loc (loc
, unsigned_type
, rhs
),
3478 size_int (lbitpos
)),
3481 lhs
= build2_loc (loc
, code
, compare_type
,
3482 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3486 /* Subroutine for fold_truth_andor_1: decode a field reference.
3488 If EXP is a comparison reference, we return the innermost reference.
3490 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3491 set to the starting bit number.
3493 If the innermost field can be completely contained in a mode-sized
3494 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3496 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3497 otherwise it is not changed.
3499 *PUNSIGNEDP is set to the signedness of the field.
3501 *PMASK is set to the mask used. This is either contained in a
3502 BIT_AND_EXPR or derived from the width of the field.
3504 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3506 Return 0 if this is not a component reference or is one that we can't
3507 do anything with. */
3510 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3511 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3512 int *punsignedp
, int *pvolatilep
,
3513 tree
*pmask
, tree
*pand_mask
)
3515 tree outer_type
= 0;
3517 tree mask
, inner
, offset
;
3519 unsigned int precision
;
3521 /* All the optimizations using this function assume integer fields.
3522 There are problems with FP fields since the type_for_size call
3523 below can fail for, e.g., XFmode. */
3524 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3527 /* We are interested in the bare arrangement of bits, so strip everything
3528 that doesn't affect the machine mode. However, record the type of the
3529 outermost expression if it may matter below. */
3530 if (CONVERT_EXPR_P (exp
)
3531 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3532 outer_type
= TREE_TYPE (exp
);
3535 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3537 and_mask
= TREE_OPERAND (exp
, 1);
3538 exp
= TREE_OPERAND (exp
, 0);
3539 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3540 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3544 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3545 punsignedp
, pvolatilep
, false);
3546 if ((inner
== exp
&& and_mask
== 0)
3547 || *pbitsize
< 0 || offset
!= 0
3548 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3551 /* If the number of bits in the reference is the same as the bitsize of
3552 the outer type, then the outer type gives the signedness. Otherwise
3553 (in case of a small bitfield) the signedness is unchanged. */
3554 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3555 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3557 /* Compute the mask to access the bitfield. */
3558 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3559 precision
= TYPE_PRECISION (unsigned_type
);
3561 mask
= build_int_cst_type (unsigned_type
, -1);
3563 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3564 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3566 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3568 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3569 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3572 *pand_mask
= and_mask
;
3576 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3580 all_ones_mask_p (const_tree mask
, int size
)
3582 tree type
= TREE_TYPE (mask
);
3583 unsigned int precision
= TYPE_PRECISION (type
);
3586 tmask
= build_int_cst_type (signed_type_for (type
), -1);
3589 tree_int_cst_equal (mask
,
3590 const_binop (RSHIFT_EXPR
,
3591 const_binop (LSHIFT_EXPR
, tmask
,
3592 size_int (precision
- size
)),
3593 size_int (precision
- size
)));
3596 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3597 represents the sign bit of EXP's type. If EXP represents a sign
3598 or zero extension, also test VAL against the unextended type.
3599 The return value is the (sub)expression whose sign bit is VAL,
3600 or NULL_TREE otherwise. */
3603 sign_bit_p (tree exp
, const_tree val
)
3605 unsigned HOST_WIDE_INT mask_lo
, lo
;
3606 HOST_WIDE_INT mask_hi
, hi
;
3610 /* Tree EXP must have an integral type. */
3611 t
= TREE_TYPE (exp
);
3612 if (! INTEGRAL_TYPE_P (t
))
3615 /* Tree VAL must be an integer constant. */
3616 if (TREE_CODE (val
) != INTEGER_CST
3617 || TREE_OVERFLOW (val
))
3620 width
= TYPE_PRECISION (t
);
3621 if (width
> HOST_BITS_PER_WIDE_INT
)
3623 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3626 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3627 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3633 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3636 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3637 >> (HOST_BITS_PER_WIDE_INT
- width
));
3640 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3641 treat VAL as if it were unsigned. */
3642 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3643 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3646 /* Handle extension from a narrower type. */
3647 if (TREE_CODE (exp
) == NOP_EXPR
3648 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3649 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3654 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3655 to be evaluated unconditionally. */
3658 simple_operand_p (const_tree exp
)
3660 /* Strip any conversions that don't change the machine mode. */
3663 return (CONSTANT_CLASS_P (exp
)
3664 || TREE_CODE (exp
) == SSA_NAME
3666 && ! TREE_ADDRESSABLE (exp
)
3667 && ! TREE_THIS_VOLATILE (exp
)
3668 && ! DECL_NONLOCAL (exp
)
3669 /* Don't regard global variables as simple. They may be
3670 allocated in ways unknown to the compiler (shared memory,
3671 #pragma weak, etc). */
3672 && ! TREE_PUBLIC (exp
)
3673 && ! DECL_EXTERNAL (exp
)
3674 /* Loading a static variable is unduly expensive, but global
3675 registers aren't expensive. */
3676 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3679 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3680 to be evaluated unconditionally.
3681 I addition to simple_operand_p, we assume that comparisons, conversions,
3682 and logic-not operations are simple, if their operands are simple, too. */
3685 simple_operand_p_2 (tree exp
)
3687 enum tree_code code
;
3689 if (TREE_SIDE_EFFECTS (exp
)
3690 || tree_could_trap_p (exp
))
3693 while (CONVERT_EXPR_P (exp
))
3694 exp
= TREE_OPERAND (exp
, 0);
3696 code
= TREE_CODE (exp
);
3698 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3699 return (simple_operand_p (TREE_OPERAND (exp
, 0))
3700 && simple_operand_p (TREE_OPERAND (exp
, 1)));
3702 if (code
== TRUTH_NOT_EXPR
)
3703 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
3705 return simple_operand_p (exp
);
3709 /* The following functions are subroutines to fold_range_test and allow it to
3710 try to change a logical combination of comparisons into a range test.
3713 X == 2 || X == 3 || X == 4 || X == 5
3717 (unsigned) (X - 2) <= 3
3719 We describe each set of comparisons as being either inside or outside
3720 a range, using a variable named like IN_P, and then describe the
3721 range with a lower and upper bound. If one of the bounds is omitted,
3722 it represents either the highest or lowest value of the type.
3724 In the comments below, we represent a range by two numbers in brackets
3725 preceded by a "+" to designate being inside that range, or a "-" to
3726 designate being outside that range, so the condition can be inverted by
3727 flipping the prefix. An omitted bound is represented by a "-". For
3728 example, "- [-, 10]" means being outside the range starting at the lowest
3729 possible value and ending at 10, in other words, being greater than 10.
3730 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3733 We set up things so that the missing bounds are handled in a consistent
3734 manner so neither a missing bound nor "true" and "false" need to be
3735 handled using a special case. */
3737 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3738 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3739 and UPPER1_P are nonzero if the respective argument is an upper bound
3740 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3741 must be specified for a comparison. ARG1 will be converted to ARG0's
3742 type if both are specified. */
3745 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3746 tree arg1
, int upper1_p
)
3752 /* If neither arg represents infinity, do the normal operation.
3753 Else, if not a comparison, return infinity. Else handle the special
3754 comparison rules. Note that most of the cases below won't occur, but
3755 are handled for consistency. */
3757 if (arg0
!= 0 && arg1
!= 0)
3759 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3760 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3762 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3765 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3768 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3769 for neither. In real maths, we cannot assume open ended ranges are
3770 the same. But, this is computer arithmetic, where numbers are finite.
3771 We can therefore make the transformation of any unbounded range with
3772 the value Z, Z being greater than any representable number. This permits
3773 us to treat unbounded ranges as equal. */
3774 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3775 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3779 result
= sgn0
== sgn1
;
3782 result
= sgn0
!= sgn1
;
3785 result
= sgn0
< sgn1
;
3788 result
= sgn0
<= sgn1
;
3791 result
= sgn0
> sgn1
;
3794 result
= sgn0
>= sgn1
;
3800 return constant_boolean_node (result
, type
);
3803 /* Helper routine for make_range. Perform one step for it, return
3804 new expression if the loop should continue or NULL_TREE if it should
3808 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
3809 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
3810 bool *strict_overflow_p
)
3812 tree arg0_type
= TREE_TYPE (arg0
);
3813 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
3814 int in_p
= *p_in_p
, n_in_p
;
3818 case TRUTH_NOT_EXPR
:
3822 case EQ_EXPR
: case NE_EXPR
:
3823 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3824 /* We can only do something if the range is testing for zero
3825 and if the second operand is an integer constant. Note that
3826 saying something is "in" the range we make is done by
3827 complementing IN_P since it will set in the initial case of
3828 being not equal to zero; "out" is leaving it alone. */
3829 if (low
== NULL_TREE
|| high
== NULL_TREE
3830 || ! integer_zerop (low
) || ! integer_zerop (high
)
3831 || TREE_CODE (arg1
) != INTEGER_CST
)
3836 case NE_EXPR
: /* - [c, c] */
3839 case EQ_EXPR
: /* + [c, c] */
3840 in_p
= ! in_p
, low
= high
= arg1
;
3842 case GT_EXPR
: /* - [-, c] */
3843 low
= 0, high
= arg1
;
3845 case GE_EXPR
: /* + [c, -] */
3846 in_p
= ! in_p
, low
= arg1
, high
= 0;
3848 case LT_EXPR
: /* - [c, -] */
3849 low
= arg1
, high
= 0;
3851 case LE_EXPR
: /* + [-, c] */
3852 in_p
= ! in_p
, low
= 0, high
= arg1
;
3858 /* If this is an unsigned comparison, we also know that EXP is
3859 greater than or equal to zero. We base the range tests we make
3860 on that fact, so we record it here so we can parse existing
3861 range tests. We test arg0_type since often the return type
3862 of, e.g. EQ_EXPR, is boolean. */
3863 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3865 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3867 build_int_cst (arg0_type
, 0),
3871 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3873 /* If the high bound is missing, but we have a nonzero low
3874 bound, reverse the range so it goes from zero to the low bound
3876 if (high
== 0 && low
&& ! integer_zerop (low
))
3879 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3880 integer_one_node
, 0);
3881 low
= build_int_cst (arg0_type
, 0);
3891 /* (-x) IN [a,b] -> x in [-b, -a] */
3892 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3893 build_int_cst (exp_type
, 0),
3895 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3896 build_int_cst (exp_type
, 0),
3898 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
3904 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3905 build_int_cst (exp_type
, 1));
3909 if (TREE_CODE (arg1
) != INTEGER_CST
)
3912 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3913 move a constant to the other side. */
3914 if (!TYPE_UNSIGNED (arg0_type
)
3915 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3918 /* If EXP is signed, any overflow in the computation is undefined,
3919 so we don't worry about it so long as our computations on
3920 the bounds don't overflow. For unsigned, overflow is defined
3921 and this is exactly the right thing. */
3922 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3923 arg0_type
, low
, 0, arg1
, 0);
3924 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3925 arg0_type
, high
, 1, arg1
, 0);
3926 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3927 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3930 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3931 *strict_overflow_p
= true;
3934 /* Check for an unsigned range which has wrapped around the maximum
3935 value thus making n_high < n_low, and normalize it. */
3936 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3938 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
3939 integer_one_node
, 0);
3940 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
3941 integer_one_node
, 0);
3943 /* If the range is of the form +/- [ x+1, x ], we won't
3944 be able to normalize it. But then, it represents the
3945 whole range or the empty set, so make it
3947 if (tree_int_cst_equal (n_low
, low
)
3948 && tree_int_cst_equal (n_high
, high
))
3954 low
= n_low
, high
= n_high
;
3962 case NON_LVALUE_EXPR
:
3963 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
3966 if (! INTEGRAL_TYPE_P (arg0_type
)
3967 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
3968 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
3971 n_low
= low
, n_high
= high
;
3974 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
3977 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
3979 /* If we're converting arg0 from an unsigned type, to exp,
3980 a signed type, we will be doing the comparison as unsigned.
3981 The tests above have already verified that LOW and HIGH
3984 So we have to ensure that we will handle large unsigned
3985 values the same way that the current signed bounds treat
3988 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
3992 /* For fixed-point modes, we need to pass the saturating flag
3993 as the 2nd parameter. */
3994 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
3996 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
3997 TYPE_SATURATING (arg0_type
));
4000 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4002 /* A range without an upper bound is, naturally, unbounded.
4003 Since convert would have cropped a very large value, use
4004 the max value for the destination type. */
4006 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4007 : TYPE_MAX_VALUE (arg0_type
);
4009 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4010 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4011 fold_convert_loc (loc
, arg0_type
,
4013 build_int_cst (arg0_type
, 1));
4015 /* If the low bound is specified, "and" the range with the
4016 range for which the original unsigned value will be
4020 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4021 1, fold_convert_loc (loc
, arg0_type
,
4026 in_p
= (n_in_p
== in_p
);
4030 /* Otherwise, "or" the range with the range of the input
4031 that will be interpreted as negative. */
4032 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4033 1, fold_convert_loc (loc
, arg0_type
,
4038 in_p
= (in_p
!= n_in_p
);
4052 /* Given EXP, a logical expression, set the range it is testing into
4053 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4054 actually being tested. *PLOW and *PHIGH will be made of the same
4055 type as the returned expression. If EXP is not a comparison, we
4056 will most likely not be returning a useful value and range. Set
4057 *STRICT_OVERFLOW_P to true if the return value is only valid
4058 because signed overflow is undefined; otherwise, do not change
4059 *STRICT_OVERFLOW_P. */
4062 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4063 bool *strict_overflow_p
)
4065 enum tree_code code
;
4066 tree arg0
, arg1
= NULL_TREE
;
4067 tree exp_type
, nexp
;
4070 location_t loc
= EXPR_LOCATION (exp
);
4072 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4073 and see if we can refine the range. Some of the cases below may not
4074 happen, but it doesn't seem worth worrying about this. We "continue"
4075 the outer loop when we've changed something; otherwise we "break"
4076 the switch, which will "break" the while. */
4079 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4083 code
= TREE_CODE (exp
);
4084 exp_type
= TREE_TYPE (exp
);
4087 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4089 if (TREE_OPERAND_LENGTH (exp
) > 0)
4090 arg0
= TREE_OPERAND (exp
, 0);
4091 if (TREE_CODE_CLASS (code
) == tcc_binary
4092 || TREE_CODE_CLASS (code
) == tcc_comparison
4093 || (TREE_CODE_CLASS (code
) == tcc_expression
4094 && TREE_OPERAND_LENGTH (exp
) > 1))
4095 arg1
= TREE_OPERAND (exp
, 1);
4097 if (arg0
== NULL_TREE
)
4100 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4101 &high
, &in_p
, strict_overflow_p
);
4102 if (nexp
== NULL_TREE
)
4107 /* If EXP is a constant, we can evaluate whether this is true or false. */
4108 if (TREE_CODE (exp
) == INTEGER_CST
)
4110 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4112 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4118 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4122 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4123 type, TYPE, return an expression to test if EXP is in (or out of, depending
4124 on IN_P) the range. Return 0 if the test couldn't be created. */
4127 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4128 tree low
, tree high
)
4130 tree etype
= TREE_TYPE (exp
), value
;
4132 #ifdef HAVE_canonicalize_funcptr_for_compare
4133 /* Disable this optimization for function pointer expressions
4134 on targets that require function pointer canonicalization. */
4135 if (HAVE_canonicalize_funcptr_for_compare
4136 && TREE_CODE (etype
) == POINTER_TYPE
4137 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4143 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4145 return invert_truthvalue_loc (loc
, value
);
4150 if (low
== 0 && high
== 0)
4151 return build_int_cst (type
, 1);
4154 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4155 fold_convert_loc (loc
, etype
, high
));
4158 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4159 fold_convert_loc (loc
, etype
, low
));
4161 if (operand_equal_p (low
, high
, 0))
4162 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4163 fold_convert_loc (loc
, etype
, low
));
4165 if (integer_zerop (low
))
4167 if (! TYPE_UNSIGNED (etype
))
4169 etype
= unsigned_type_for (etype
);
4170 high
= fold_convert_loc (loc
, etype
, high
);
4171 exp
= fold_convert_loc (loc
, etype
, exp
);
4173 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4176 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4177 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4179 unsigned HOST_WIDE_INT lo
;
4183 prec
= TYPE_PRECISION (etype
);
4184 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4187 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4191 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4192 lo
= (unsigned HOST_WIDE_INT
) -1;
4195 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4197 if (TYPE_UNSIGNED (etype
))
4199 tree signed_etype
= signed_type_for (etype
);
4200 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4202 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4204 etype
= signed_etype
;
4205 exp
= fold_convert_loc (loc
, etype
, exp
);
4207 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4208 build_int_cst (etype
, 0));
4212 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4213 This requires wrap-around arithmetics for the type of the expression.
4214 First make sure that arithmetics in this type is valid, then make sure
4215 that it wraps around. */
4216 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4217 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4218 TYPE_UNSIGNED (etype
));
4220 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4222 tree utype
, minv
, maxv
;
4224 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4225 for the type in question, as we rely on this here. */
4226 utype
= unsigned_type_for (etype
);
4227 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4228 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4229 integer_one_node
, 1);
4230 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4232 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4239 high
= fold_convert_loc (loc
, etype
, high
);
4240 low
= fold_convert_loc (loc
, etype
, low
);
4241 exp
= fold_convert_loc (loc
, etype
, exp
);
4243 value
= const_binop (MINUS_EXPR
, high
, low
);
4246 if (POINTER_TYPE_P (etype
))
4248 if (value
!= 0 && !TREE_OVERFLOW (value
))
4250 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4251 return build_range_check (loc
, type
,
4252 fold_build_pointer_plus_loc (loc
, exp
, low
),
4253 1, build_int_cst (etype
, 0), value
);
4258 if (value
!= 0 && !TREE_OVERFLOW (value
))
4259 return build_range_check (loc
, type
,
4260 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4261 1, build_int_cst (etype
, 0), value
);
4266 /* Return the predecessor of VAL in its type, handling the infinite case. */
4269 range_predecessor (tree val
)
4271 tree type
= TREE_TYPE (val
);
4273 if (INTEGRAL_TYPE_P (type
)
4274 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4277 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4280 /* Return the successor of VAL in its type, handling the infinite case. */
4283 range_successor (tree val
)
4285 tree type
= TREE_TYPE (val
);
4287 if (INTEGRAL_TYPE_P (type
)
4288 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4291 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4294 /* Given two ranges, see if we can merge them into one. Return 1 if we
4295 can, 0 if we can't. Set the output range into the specified parameters. */
4298 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4299 tree high0
, int in1_p
, tree low1
, tree high1
)
4307 int lowequal
= ((low0
== 0 && low1
== 0)
4308 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4309 low0
, 0, low1
, 0)));
4310 int highequal
= ((high0
== 0 && high1
== 0)
4311 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4312 high0
, 1, high1
, 1)));
4314 /* Make range 0 be the range that starts first, or ends last if they
4315 start at the same value. Swap them if it isn't. */
4316 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4319 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4320 high1
, 1, high0
, 1))))
4322 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4323 tem
= low0
, low0
= low1
, low1
= tem
;
4324 tem
= high0
, high0
= high1
, high1
= tem
;
4327 /* Now flag two cases, whether the ranges are disjoint or whether the
4328 second range is totally subsumed in the first. Note that the tests
4329 below are simplified by the ones above. */
4330 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4331 high0
, 1, low1
, 0));
4332 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4333 high1
, 1, high0
, 1));
4335 /* We now have four cases, depending on whether we are including or
4336 excluding the two ranges. */
4339 /* If they don't overlap, the result is false. If the second range
4340 is a subset it is the result. Otherwise, the range is from the start
4341 of the second to the end of the first. */
4343 in_p
= 0, low
= high
= 0;
4345 in_p
= 1, low
= low1
, high
= high1
;
4347 in_p
= 1, low
= low1
, high
= high0
;
4350 else if (in0_p
&& ! in1_p
)
4352 /* If they don't overlap, the result is the first range. If they are
4353 equal, the result is false. If the second range is a subset of the
4354 first, and the ranges begin at the same place, we go from just after
4355 the end of the second range to the end of the first. If the second
4356 range is not a subset of the first, or if it is a subset and both
4357 ranges end at the same place, the range starts at the start of the
4358 first range and ends just before the second range.
4359 Otherwise, we can't describe this as a single range. */
4361 in_p
= 1, low
= low0
, high
= high0
;
4362 else if (lowequal
&& highequal
)
4363 in_p
= 0, low
= high
= 0;
4364 else if (subset
&& lowequal
)
4366 low
= range_successor (high1
);
4371 /* We are in the weird situation where high0 > high1 but
4372 high1 has no successor. Punt. */
4376 else if (! subset
|| highequal
)
4379 high
= range_predecessor (low1
);
4383 /* low0 < low1 but low1 has no predecessor. Punt. */
4391 else if (! in0_p
&& in1_p
)
4393 /* If they don't overlap, the result is the second range. If the second
4394 is a subset of the first, the result is false. Otherwise,
4395 the range starts just after the first range and ends at the
4396 end of the second. */
4398 in_p
= 1, low
= low1
, high
= high1
;
4399 else if (subset
|| highequal
)
4400 in_p
= 0, low
= high
= 0;
4403 low
= range_successor (high0
);
4408 /* high1 > high0 but high0 has no successor. Punt. */
4416 /* The case where we are excluding both ranges. Here the complex case
4417 is if they don't overlap. In that case, the only time we have a
4418 range is if they are adjacent. If the second is a subset of the
4419 first, the result is the first. Otherwise, the range to exclude
4420 starts at the beginning of the first range and ends at the end of the
4424 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4425 range_successor (high0
),
4427 in_p
= 0, low
= low0
, high
= high1
;
4430 /* Canonicalize - [min, x] into - [-, x]. */
4431 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4432 switch (TREE_CODE (TREE_TYPE (low0
)))
4435 if (TYPE_PRECISION (TREE_TYPE (low0
))
4436 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4440 if (tree_int_cst_equal (low0
,
4441 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4445 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4446 && integer_zerop (low0
))
4453 /* Canonicalize - [x, max] into - [x, -]. */
4454 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4455 switch (TREE_CODE (TREE_TYPE (high1
)))
4458 if (TYPE_PRECISION (TREE_TYPE (high1
))
4459 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4463 if (tree_int_cst_equal (high1
,
4464 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4468 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4469 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4471 integer_one_node
, 1)))
4478 /* The ranges might be also adjacent between the maximum and
4479 minimum values of the given type. For
4480 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4481 return + [x + 1, y - 1]. */
4482 if (low0
== 0 && high1
== 0)
4484 low
= range_successor (high0
);
4485 high
= range_predecessor (low1
);
4486 if (low
== 0 || high
== 0)
4496 in_p
= 0, low
= low0
, high
= high0
;
4498 in_p
= 0, low
= low0
, high
= high1
;
4501 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4506 /* Subroutine of fold, looking inside expressions of the form
4507 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4508 of the COND_EXPR. This function is being used also to optimize
4509 A op B ? C : A, by reversing the comparison first.
4511 Return a folded expression whose code is not a COND_EXPR
4512 anymore, or NULL_TREE if no folding opportunity is found. */
4515 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4516 tree arg0
, tree arg1
, tree arg2
)
4518 enum tree_code comp_code
= TREE_CODE (arg0
);
4519 tree arg00
= TREE_OPERAND (arg0
, 0);
4520 tree arg01
= TREE_OPERAND (arg0
, 1);
4521 tree arg1_type
= TREE_TYPE (arg1
);
4527 /* If we have A op 0 ? A : -A, consider applying the following
4530 A == 0? A : -A same as -A
4531 A != 0? A : -A same as A
4532 A >= 0? A : -A same as abs (A)
4533 A > 0? A : -A same as abs (A)
4534 A <= 0? A : -A same as -abs (A)
4535 A < 0? A : -A same as -abs (A)
4537 None of these transformations work for modes with signed
4538 zeros. If A is +/-0, the first two transformations will
4539 change the sign of the result (from +0 to -0, or vice
4540 versa). The last four will fix the sign of the result,
4541 even though the original expressions could be positive or
4542 negative, depending on the sign of A.
4544 Note that all these transformations are correct if A is
4545 NaN, since the two alternatives (A and -A) are also NaNs. */
4546 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4547 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4548 ? real_zerop (arg01
)
4549 : integer_zerop (arg01
))
4550 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4551 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4552 /* In the case that A is of the form X-Y, '-A' (arg2) may
4553 have already been folded to Y-X, check for that. */
4554 || (TREE_CODE (arg1
) == MINUS_EXPR
4555 && TREE_CODE (arg2
) == MINUS_EXPR
4556 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4557 TREE_OPERAND (arg2
, 1), 0)
4558 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4559 TREE_OPERAND (arg2
, 0), 0))))
4564 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4565 return pedantic_non_lvalue_loc (loc
,
4566 fold_convert_loc (loc
, type
,
4567 negate_expr (tem
)));
4570 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4573 if (flag_trapping_math
)
4578 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4579 arg1
= fold_convert_loc (loc
, signed_type_for
4580 (TREE_TYPE (arg1
)), arg1
);
4581 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4582 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4585 if (flag_trapping_math
)
4589 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4590 arg1
= fold_convert_loc (loc
, signed_type_for
4591 (TREE_TYPE (arg1
)), arg1
);
4592 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4593 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4595 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4599 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4600 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4601 both transformations are correct when A is NaN: A != 0
4602 is then true, and A == 0 is false. */
4604 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4605 && integer_zerop (arg01
) && integer_zerop (arg2
))
4607 if (comp_code
== NE_EXPR
)
4608 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4609 else if (comp_code
== EQ_EXPR
)
4610 return build_int_cst (type
, 0);
4613 /* Try some transformations of A op B ? A : B.
4615 A == B? A : B same as B
4616 A != B? A : B same as A
4617 A >= B? A : B same as max (A, B)
4618 A > B? A : B same as max (B, A)
4619 A <= B? A : B same as min (A, B)
4620 A < B? A : B same as min (B, A)
4622 As above, these transformations don't work in the presence
4623 of signed zeros. For example, if A and B are zeros of
4624 opposite sign, the first two transformations will change
4625 the sign of the result. In the last four, the original
4626 expressions give different results for (A=+0, B=-0) and
4627 (A=-0, B=+0), but the transformed expressions do not.
4629 The first two transformations are correct if either A or B
4630 is a NaN. In the first transformation, the condition will
4631 be false, and B will indeed be chosen. In the case of the
4632 second transformation, the condition A != B will be true,
4633 and A will be chosen.
4635 The conversions to max() and min() are not correct if B is
4636 a number and A is not. The conditions in the original
4637 expressions will be false, so all four give B. The min()
4638 and max() versions would give a NaN instead. */
4639 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4640 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4641 /* Avoid these transformations if the COND_EXPR may be used
4642 as an lvalue in the C++ front-end. PR c++/19199. */
4644 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4645 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4646 || ! maybe_lvalue_p (arg1
)
4647 || ! maybe_lvalue_p (arg2
)))
4649 tree comp_op0
= arg00
;
4650 tree comp_op1
= arg01
;
4651 tree comp_type
= TREE_TYPE (comp_op0
);
4653 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4654 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4664 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4666 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4671 /* In C++ a ?: expression can be an lvalue, so put the
4672 operand which will be used if they are equal first
4673 so that we can convert this back to the
4674 corresponding COND_EXPR. */
4675 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4677 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4678 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4679 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4680 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4681 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4682 comp_op1
, comp_op0
);
4683 return pedantic_non_lvalue_loc (loc
,
4684 fold_convert_loc (loc
, type
, tem
));
4691 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4693 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4694 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4695 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4696 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4697 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4698 comp_op1
, comp_op0
);
4699 return pedantic_non_lvalue_loc (loc
,
4700 fold_convert_loc (loc
, type
, tem
));
4704 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4705 return pedantic_non_lvalue_loc (loc
,
4706 fold_convert_loc (loc
, type
, arg2
));
4709 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4710 return pedantic_non_lvalue_loc (loc
,
4711 fold_convert_loc (loc
, type
, arg1
));
4714 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4719 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4720 we might still be able to simplify this. For example,
4721 if C1 is one less or one more than C2, this might have started
4722 out as a MIN or MAX and been transformed by this function.
4723 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4725 if (INTEGRAL_TYPE_P (type
)
4726 && TREE_CODE (arg01
) == INTEGER_CST
4727 && TREE_CODE (arg2
) == INTEGER_CST
)
4731 if (TREE_CODE (arg1
) == INTEGER_CST
)
4733 /* We can replace A with C1 in this case. */
4734 arg1
= fold_convert_loc (loc
, type
, arg01
);
4735 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4738 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4739 MIN_EXPR, to preserve the signedness of the comparison. */
4740 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4742 && operand_equal_p (arg01
,
4743 const_binop (PLUS_EXPR
, arg2
,
4744 build_int_cst (type
, 1)),
4747 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4748 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4750 return pedantic_non_lvalue_loc (loc
,
4751 fold_convert_loc (loc
, type
, tem
));
4756 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4758 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4760 && operand_equal_p (arg01
,
4761 const_binop (MINUS_EXPR
, arg2
,
4762 build_int_cst (type
, 1)),
4765 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4766 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4768 return pedantic_non_lvalue_loc (loc
,
4769 fold_convert_loc (loc
, type
, tem
));
4774 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4775 MAX_EXPR, to preserve the signedness of the comparison. */
4776 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4778 && operand_equal_p (arg01
,
4779 const_binop (MINUS_EXPR
, arg2
,
4780 build_int_cst (type
, 1)),
4783 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4784 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4786 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4791 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4792 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4794 && operand_equal_p (arg01
,
4795 const_binop (PLUS_EXPR
, arg2
,
4796 build_int_cst (type
, 1)),
4799 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4800 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4802 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4816 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4817 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4818 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4822 /* EXP is some logical combination of boolean tests. See if we can
4823 merge it into some range test. Return the new tree if so. */
4826 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4829 int or_op
= (code
== TRUTH_ORIF_EXPR
4830 || code
== TRUTH_OR_EXPR
);
4831 int in0_p
, in1_p
, in_p
;
4832 tree low0
, low1
, low
, high0
, high1
, high
;
4833 bool strict_overflow_p
= false;
4834 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4835 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4837 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4838 "when simplifying range test");
4840 /* If this is an OR operation, invert both sides; we will invert
4841 again at the end. */
4843 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4845 /* If both expressions are the same, if we can merge the ranges, and we
4846 can build the range test, return it or it inverted. If one of the
4847 ranges is always true or always false, consider it to be the same
4848 expression as the other. */
4849 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4850 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4852 && 0 != (tem
= (build_range_check (loc
, type
,
4854 : rhs
!= 0 ? rhs
: integer_zero_node
,
4857 if (strict_overflow_p
)
4858 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
4859 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
4862 /* On machines where the branch cost is expensive, if this is a
4863 short-circuited branch and the underlying object on both sides
4864 is the same, make a non-short-circuit operation. */
4865 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4866 && lhs
!= 0 && rhs
!= 0
4867 && (code
== TRUTH_ANDIF_EXPR
4868 || code
== TRUTH_ORIF_EXPR
)
4869 && operand_equal_p (lhs
, rhs
, 0))
4871 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4872 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4873 which cases we can't do this. */
4874 if (simple_operand_p (lhs
))
4875 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4876 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4879 else if (!lang_hooks
.decls
.global_bindings_p ()
4880 && !CONTAINS_PLACEHOLDER_P (lhs
))
4882 tree common
= save_expr (lhs
);
4884 if (0 != (lhs
= build_range_check (loc
, type
, common
,
4885 or_op
? ! in0_p
: in0_p
,
4887 && (0 != (rhs
= build_range_check (loc
, type
, common
,
4888 or_op
? ! in1_p
: in1_p
,
4891 if (strict_overflow_p
)
4892 fold_overflow_warning (warnmsg
,
4893 WARN_STRICT_OVERFLOW_COMPARISON
);
4894 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4895 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4904 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4905 bit value. Arrange things so the extra bits will be set to zero if and
4906 only if C is signed-extended to its full width. If MASK is nonzero,
4907 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4910 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4912 tree type
= TREE_TYPE (c
);
4913 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4916 if (p
== modesize
|| unsignedp
)
4919 /* We work by getting just the sign bit into the low-order bit, then
4920 into the high-order bit, then sign-extend. We then XOR that value
4922 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1));
4923 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1));
4925 /* We must use a signed type in order to get an arithmetic right shift.
4926 However, we must also avoid introducing accidental overflows, so that
4927 a subsequent call to integer_zerop will work. Hence we must
4928 do the type conversion here. At this point, the constant is either
4929 zero or one, and the conversion to a signed type can never overflow.
4930 We could get an overflow if this conversion is done anywhere else. */
4931 if (TYPE_UNSIGNED (type
))
4932 temp
= fold_convert (signed_type_for (type
), temp
);
4934 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
4935 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
4937 temp
= const_binop (BIT_AND_EXPR
, temp
,
4938 fold_convert (TREE_TYPE (c
), mask
));
4939 /* If necessary, convert the type back to match the type of C. */
4940 if (TYPE_UNSIGNED (type
))
4941 temp
= fold_convert (type
, temp
);
4943 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
4946 /* For an expression that has the form
4950 we can drop one of the inner expressions and simplify to
4954 LOC is the location of the resulting expression. OP is the inner
4955 logical operation; the left-hand side in the examples above, while CMPOP
4956 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4957 removing a condition that guards another, as in
4958 (A != NULL && A->...) || A == NULL
4959 which we must not transform. If RHS_ONLY is true, only eliminate the
4960 right-most operand of the inner logical operation. */
4963 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
4966 tree type
= TREE_TYPE (cmpop
);
4967 enum tree_code code
= TREE_CODE (cmpop
);
4968 enum tree_code truthop_code
= TREE_CODE (op
);
4969 tree lhs
= TREE_OPERAND (op
, 0);
4970 tree rhs
= TREE_OPERAND (op
, 1);
4971 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
4972 enum tree_code rhs_code
= TREE_CODE (rhs
);
4973 enum tree_code lhs_code
= TREE_CODE (lhs
);
4974 enum tree_code inv_code
;
4976 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
4979 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4982 if (rhs_code
== truthop_code
)
4984 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
4985 if (newrhs
!= NULL_TREE
)
4988 rhs_code
= TREE_CODE (rhs
);
4991 if (lhs_code
== truthop_code
&& !rhs_only
)
4993 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
4994 if (newlhs
!= NULL_TREE
)
4997 lhs_code
= TREE_CODE (lhs
);
5001 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
5002 if (inv_code
== rhs_code
5003 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5004 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5006 if (!rhs_only
&& inv_code
== lhs_code
5007 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5008 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5010 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5011 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5016 /* Find ways of folding logical expressions of LHS and RHS:
5017 Try to merge two comparisons to the same innermost item.
5018 Look for range tests like "ch >= '0' && ch <= '9'".
5019 Look for combinations of simple terms on machines with expensive branches
5020 and evaluate the RHS unconditionally.
5022 For example, if we have p->a == 2 && p->b == 4 and we can make an
5023 object large enough to span both A and B, we can do this with a comparison
5024 against the object ANDed with the a mask.
5026 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5027 operations to do this with one comparison.
5029 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5030 function and the one above.
5032 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5033 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5035 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5038 We return the simplified tree or 0 if no optimization is possible. */
5041 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5044 /* If this is the "or" of two comparisons, we can do something if
5045 the comparisons are NE_EXPR. If this is the "and", we can do something
5046 if the comparisons are EQ_EXPR. I.e.,
5047 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5049 WANTED_CODE is this operation code. For single bit fields, we can
5050 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5051 comparison for one-bit fields. */
5053 enum tree_code wanted_code
;
5054 enum tree_code lcode
, rcode
;
5055 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5056 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5057 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5058 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5059 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5060 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5061 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5062 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5063 enum machine_mode lnmode
, rnmode
;
5064 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5065 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5066 tree l_const
, r_const
;
5067 tree lntype
, rntype
, result
;
5068 HOST_WIDE_INT first_bit
, end_bit
;
5071 /* Start by getting the comparison codes. Fail if anything is volatile.
5072 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5073 it were surrounded with a NE_EXPR. */
5075 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5078 lcode
= TREE_CODE (lhs
);
5079 rcode
= TREE_CODE (rhs
);
5081 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5083 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5084 build_int_cst (TREE_TYPE (lhs
), 0));
5088 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5090 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5091 build_int_cst (TREE_TYPE (rhs
), 0));
5095 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5096 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5099 ll_arg
= TREE_OPERAND (lhs
, 0);
5100 lr_arg
= TREE_OPERAND (lhs
, 1);
5101 rl_arg
= TREE_OPERAND (rhs
, 0);
5102 rr_arg
= TREE_OPERAND (rhs
, 1);
5104 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5105 if (simple_operand_p (ll_arg
)
5106 && simple_operand_p (lr_arg
))
5108 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5109 && operand_equal_p (lr_arg
, rr_arg
, 0))
5111 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5112 truth_type
, ll_arg
, lr_arg
);
5116 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5117 && operand_equal_p (lr_arg
, rl_arg
, 0))
5119 result
= combine_comparisons (loc
, code
, lcode
,
5120 swap_tree_comparison (rcode
),
5121 truth_type
, ll_arg
, lr_arg
);
5127 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5128 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5130 /* If the RHS can be evaluated unconditionally and its operands are
5131 simple, it wins to evaluate the RHS unconditionally on machines
5132 with expensive branches. In this case, this isn't a comparison
5133 that can be merged. */
5135 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5137 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5138 && simple_operand_p (rl_arg
)
5139 && simple_operand_p (rr_arg
))
5141 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5142 if (code
== TRUTH_OR_EXPR
5143 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5144 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5145 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5146 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5147 return build2_loc (loc
, NE_EXPR
, truth_type
,
5148 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5150 build_int_cst (TREE_TYPE (ll_arg
), 0));
5152 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5153 if (code
== TRUTH_AND_EXPR
5154 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5155 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5156 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5157 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5158 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5159 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5161 build_int_cst (TREE_TYPE (ll_arg
), 0));
5164 /* See if the comparisons can be merged. Then get all the parameters for
5167 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5168 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5172 ll_inner
= decode_field_reference (loc
, ll_arg
,
5173 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5174 &ll_unsignedp
, &volatilep
, &ll_mask
,
5176 lr_inner
= decode_field_reference (loc
, lr_arg
,
5177 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5178 &lr_unsignedp
, &volatilep
, &lr_mask
,
5180 rl_inner
= decode_field_reference (loc
, rl_arg
,
5181 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5182 &rl_unsignedp
, &volatilep
, &rl_mask
,
5184 rr_inner
= decode_field_reference (loc
, rr_arg
,
5185 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5186 &rr_unsignedp
, &volatilep
, &rr_mask
,
5189 /* It must be true that the inner operation on the lhs of each
5190 comparison must be the same if we are to be able to do anything.
5191 Then see if we have constants. If not, the same must be true for
5193 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5194 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5197 if (TREE_CODE (lr_arg
) == INTEGER_CST
5198 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5199 l_const
= lr_arg
, r_const
= rr_arg
;
5200 else if (lr_inner
== 0 || rr_inner
== 0
5201 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5204 l_const
= r_const
= 0;
5206 /* If either comparison code is not correct for our logical operation,
5207 fail. However, we can convert a one-bit comparison against zero into
5208 the opposite comparison against that bit being set in the field. */
5210 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5211 if (lcode
!= wanted_code
)
5213 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5215 /* Make the left operand unsigned, since we are only interested
5216 in the value of one bit. Otherwise we are doing the wrong
5225 /* This is analogous to the code for l_const above. */
5226 if (rcode
!= wanted_code
)
5228 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5237 /* See if we can find a mode that contains both fields being compared on
5238 the left. If we can't, fail. Otherwise, update all constants and masks
5239 to be relative to a field of that size. */
5240 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5241 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5242 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5243 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5245 if (lnmode
== VOIDmode
)
5248 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5249 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5250 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5251 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5253 if (BYTES_BIG_ENDIAN
)
5255 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5256 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5259 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5260 size_int (xll_bitpos
));
5261 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5262 size_int (xrl_bitpos
));
5266 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5267 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5268 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5269 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5270 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5273 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5275 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5280 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5281 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5282 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5283 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5284 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5287 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5289 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5293 /* If the right sides are not constant, do the same for it. Also,
5294 disallow this optimization if a size or signedness mismatch occurs
5295 between the left and right sides. */
5298 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5299 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5300 /* Make sure the two fields on the right
5301 correspond to the left without being swapped. */
5302 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5305 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5306 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5307 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5308 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5310 if (rnmode
== VOIDmode
)
5313 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5314 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5315 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5316 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5318 if (BYTES_BIG_ENDIAN
)
5320 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5321 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5324 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5326 size_int (xlr_bitpos
));
5327 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5329 size_int (xrr_bitpos
));
5331 /* Make a mask that corresponds to both fields being compared.
5332 Do this for both items being compared. If the operands are the
5333 same size and the bits being compared are in the same position
5334 then we can do this by masking both and comparing the masked
5336 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5337 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5338 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5340 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5341 ll_unsignedp
|| rl_unsignedp
);
5342 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5343 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5345 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5346 lr_unsignedp
|| rr_unsignedp
);
5347 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5348 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5350 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5353 /* There is still another way we can do something: If both pairs of
5354 fields being compared are adjacent, we may be able to make a wider
5355 field containing them both.
5357 Note that we still must mask the lhs/rhs expressions. Furthermore,
5358 the mask must be shifted to account for the shift done by
5359 make_bit_field_ref. */
5360 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5361 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5362 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5363 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5367 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5368 ll_bitsize
+ rl_bitsize
,
5369 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5370 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5371 lr_bitsize
+ rr_bitsize
,
5372 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5374 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5375 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5376 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5377 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5379 /* Convert to the smaller type before masking out unwanted bits. */
5381 if (lntype
!= rntype
)
5383 if (lnbitsize
> rnbitsize
)
5385 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5386 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5389 else if (lnbitsize
< rnbitsize
)
5391 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5392 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5397 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5398 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5400 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5401 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5403 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5409 /* Handle the case of comparisons with constants. If there is something in
5410 common between the masks, those bits of the constants must be the same.
5411 If not, the condition is always false. Test for this to avoid generating
5412 incorrect code below. */
5413 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5414 if (! integer_zerop (result
)
5415 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5416 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5418 if (wanted_code
== NE_EXPR
)
5420 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5421 return constant_boolean_node (true, truth_type
);
5425 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5426 return constant_boolean_node (false, truth_type
);
5430 /* Construct the expression we will return. First get the component
5431 reference we will make. Unless the mask is all ones the width of
5432 that field, perform the mask operation. Then compare with the
5434 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5435 ll_unsignedp
|| rl_unsignedp
);
5437 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5438 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5439 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5441 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5442 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5445 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5449 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5453 enum tree_code op_code
;
5456 int consts_equal
, consts_lt
;
5459 STRIP_SIGN_NOPS (arg0
);
5461 op_code
= TREE_CODE (arg0
);
5462 minmax_const
= TREE_OPERAND (arg0
, 1);
5463 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5464 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5465 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5466 inner
= TREE_OPERAND (arg0
, 0);
5468 /* If something does not permit us to optimize, return the original tree. */
5469 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5470 || TREE_CODE (comp_const
) != INTEGER_CST
5471 || TREE_OVERFLOW (comp_const
)
5472 || TREE_CODE (minmax_const
) != INTEGER_CST
5473 || TREE_OVERFLOW (minmax_const
))
5476 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5477 and GT_EXPR, doing the rest with recursive calls using logical
5481 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5484 = optimize_minmax_comparison (loc
,
5485 invert_tree_comparison (code
, false),
5488 return invert_truthvalue_loc (loc
, tem
);
5494 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5495 optimize_minmax_comparison
5496 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5497 optimize_minmax_comparison
5498 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5501 if (op_code
== MAX_EXPR
&& consts_equal
)
5502 /* MAX (X, 0) == 0 -> X <= 0 */
5503 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5505 else if (op_code
== MAX_EXPR
&& consts_lt
)
5506 /* MAX (X, 0) == 5 -> X == 5 */
5507 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5509 else if (op_code
== MAX_EXPR
)
5510 /* MAX (X, 0) == -1 -> false */
5511 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5513 else if (consts_equal
)
5514 /* MIN (X, 0) == 0 -> X >= 0 */
5515 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5518 /* MIN (X, 0) == 5 -> false */
5519 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5522 /* MIN (X, 0) == -1 -> X == -1 */
5523 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5526 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5527 /* MAX (X, 0) > 0 -> X > 0
5528 MAX (X, 0) > 5 -> X > 5 */
5529 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5531 else if (op_code
== MAX_EXPR
)
5532 /* MAX (X, 0) > -1 -> true */
5533 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5535 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5536 /* MIN (X, 0) > 0 -> false
5537 MIN (X, 0) > 5 -> false */
5538 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5541 /* MIN (X, 0) > -1 -> X > -1 */
5542 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5549 /* T is an integer expression that is being multiplied, divided, or taken a
5550 modulus (CODE says which and what kind of divide or modulus) by a
5551 constant C. See if we can eliminate that operation by folding it with
5552 other operations already in T. WIDE_TYPE, if non-null, is a type that
5553 should be used for the computation if wider than our type.
5555 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5556 (X * 2) + (Y * 4). We must, however, be assured that either the original
5557 expression would not overflow or that overflow is undefined for the type
5558 in the language in question.
5560 If we return a non-null expression, it is an equivalent form of the
5561 original computation, but need not be in the original type.
5563 We set *STRICT_OVERFLOW_P to true if the return values depends on
5564 signed overflow being undefined. Otherwise we do not change
5565 *STRICT_OVERFLOW_P. */
5568 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5569 bool *strict_overflow_p
)
5571 /* To avoid exponential search depth, refuse to allow recursion past
5572 three levels. Beyond that (1) it's highly unlikely that we'll find
5573 something interesting and (2) we've probably processed it before
5574 when we built the inner expression. */
5583 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5590 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5591 bool *strict_overflow_p
)
5593 tree type
= TREE_TYPE (t
);
5594 enum tree_code tcode
= TREE_CODE (t
);
5595 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5596 > GET_MODE_SIZE (TYPE_MODE (type
)))
5597 ? wide_type
: type
);
5599 int same_p
= tcode
== code
;
5600 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5601 bool sub_strict_overflow_p
;
5603 /* Don't deal with constants of zero here; they confuse the code below. */
5604 if (integer_zerop (c
))
5607 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5608 op0
= TREE_OPERAND (t
, 0);
5610 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5611 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5613 /* Note that we need not handle conditional operations here since fold
5614 already handles those cases. So just do arithmetic here. */
5618 /* For a constant, we can always simplify if we are a multiply
5619 or (for divide and modulus) if it is a multiple of our constant. */
5620 if (code
== MULT_EXPR
5621 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
)))
5622 return const_binop (code
, fold_convert (ctype
, t
),
5623 fold_convert (ctype
, c
));
5626 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5627 /* If op0 is an expression ... */
5628 if ((COMPARISON_CLASS_P (op0
)
5629 || UNARY_CLASS_P (op0
)
5630 || BINARY_CLASS_P (op0
)
5631 || VL_EXP_CLASS_P (op0
)
5632 || EXPRESSION_CLASS_P (op0
))
5633 /* ... and has wrapping overflow, and its type is smaller
5634 than ctype, then we cannot pass through as widening. */
5635 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5636 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
5637 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
5638 && (TYPE_PRECISION (ctype
)
5639 > TYPE_PRECISION (TREE_TYPE (op0
))))
5640 /* ... or this is a truncation (t is narrower than op0),
5641 then we cannot pass through this narrowing. */
5642 || (TYPE_PRECISION (type
)
5643 < TYPE_PRECISION (TREE_TYPE (op0
)))
5644 /* ... or signedness changes for division or modulus,
5645 then we cannot pass through this conversion. */
5646 || (code
!= MULT_EXPR
5647 && (TYPE_UNSIGNED (ctype
)
5648 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5649 /* ... or has undefined overflow while the converted to
5650 type has not, we cannot do the operation in the inner type
5651 as that would introduce undefined overflow. */
5652 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5653 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5656 /* Pass the constant down and see if we can make a simplification. If
5657 we can, replace this expression with the inner simplification for
5658 possible later conversion to our or some other type. */
5659 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5660 && TREE_CODE (t2
) == INTEGER_CST
5661 && !TREE_OVERFLOW (t2
)
5662 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5664 ? ctype
: NULL_TREE
,
5665 strict_overflow_p
))))
5670 /* If widening the type changes it from signed to unsigned, then we
5671 must avoid building ABS_EXPR itself as unsigned. */
5672 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5674 tree cstype
= (*signed_type_for
) (ctype
);
5675 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5678 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5679 return fold_convert (ctype
, t1
);
5683 /* If the constant is negative, we cannot simplify this. */
5684 if (tree_int_cst_sgn (c
) == -1)
5688 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5690 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5693 case MIN_EXPR
: case MAX_EXPR
:
5694 /* If widening the type changes the signedness, then we can't perform
5695 this optimization as that changes the result. */
5696 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5699 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5700 sub_strict_overflow_p
= false;
5701 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5702 &sub_strict_overflow_p
)) != 0
5703 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5704 &sub_strict_overflow_p
)) != 0)
5706 if (tree_int_cst_sgn (c
) < 0)
5707 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5708 if (sub_strict_overflow_p
)
5709 *strict_overflow_p
= true;
5710 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5711 fold_convert (ctype
, t2
));
5715 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5716 /* If the second operand is constant, this is a multiplication
5717 or floor division, by a power of two, so we can treat it that
5718 way unless the multiplier or divisor overflows. Signed
5719 left-shift overflow is implementation-defined rather than
5720 undefined in C90, so do not convert signed left shift into
5722 if (TREE_CODE (op1
) == INTEGER_CST
5723 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5724 /* const_binop may not detect overflow correctly,
5725 so check for it explicitly here. */
5726 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5727 && TREE_INT_CST_HIGH (op1
) == 0
5728 && 0 != (t1
= fold_convert (ctype
,
5729 const_binop (LSHIFT_EXPR
,
5732 && !TREE_OVERFLOW (t1
))
5733 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5734 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5736 fold_convert (ctype
, op0
),
5738 c
, code
, wide_type
, strict_overflow_p
);
5741 case PLUS_EXPR
: case MINUS_EXPR
:
5742 /* See if we can eliminate the operation on both sides. If we can, we
5743 can return a new PLUS or MINUS. If we can't, the only remaining
5744 cases where we can do anything are if the second operand is a
5746 sub_strict_overflow_p
= false;
5747 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5748 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5749 if (t1
!= 0 && t2
!= 0
5750 && (code
== MULT_EXPR
5751 /* If not multiplication, we can only do this if both operands
5752 are divisible by c. */
5753 || (multiple_of_p (ctype
, op0
, c
)
5754 && multiple_of_p (ctype
, op1
, c
))))
5756 if (sub_strict_overflow_p
)
5757 *strict_overflow_p
= true;
5758 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5759 fold_convert (ctype
, t2
));
5762 /* If this was a subtraction, negate OP1 and set it to be an addition.
5763 This simplifies the logic below. */
5764 if (tcode
== MINUS_EXPR
)
5766 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5767 /* If OP1 was not easily negatable, the constant may be OP0. */
5768 if (TREE_CODE (op0
) == INTEGER_CST
)
5779 if (TREE_CODE (op1
) != INTEGER_CST
)
5782 /* If either OP1 or C are negative, this optimization is not safe for
5783 some of the division and remainder types while for others we need
5784 to change the code. */
5785 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5787 if (code
== CEIL_DIV_EXPR
)
5788 code
= FLOOR_DIV_EXPR
;
5789 else if (code
== FLOOR_DIV_EXPR
)
5790 code
= CEIL_DIV_EXPR
;
5791 else if (code
!= MULT_EXPR
5792 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5796 /* If it's a multiply or a division/modulus operation of a multiple
5797 of our constant, do the operation and verify it doesn't overflow. */
5798 if (code
== MULT_EXPR
5799 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5801 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5802 fold_convert (ctype
, c
));
5803 /* We allow the constant to overflow with wrapping semantics. */
5805 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5811 /* If we have an unsigned type is not a sizetype, we cannot widen
5812 the operation since it will change the result if the original
5813 computation overflowed. */
5814 if (TYPE_UNSIGNED (ctype
)
5815 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
5819 /* If we were able to eliminate our operation from the first side,
5820 apply our operation to the second side and reform the PLUS. */
5821 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5822 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5824 /* The last case is if we are a multiply. In that case, we can
5825 apply the distributive law to commute the multiply and addition
5826 if the multiplication of the constants doesn't overflow. */
5827 if (code
== MULT_EXPR
)
5828 return fold_build2 (tcode
, ctype
,
5829 fold_build2 (code
, ctype
,
5830 fold_convert (ctype
, op0
),
5831 fold_convert (ctype
, c
)),
5837 /* We have a special case here if we are doing something like
5838 (C * 8) % 4 since we know that's zero. */
5839 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5840 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5841 /* If the multiplication can overflow we cannot optimize this.
5842 ??? Until we can properly mark individual operations as
5843 not overflowing we need to treat sizetype special here as
5844 stor-layout relies on this opimization to make
5845 DECL_FIELD_BIT_OFFSET always a constant. */
5846 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5847 || (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
5848 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))))
5849 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5850 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5852 *strict_overflow_p
= true;
5853 return omit_one_operand (type
, integer_zero_node
, op0
);
5856 /* ... fall through ... */
5858 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5859 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5860 /* If we can extract our operation from the LHS, do so and return a
5861 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5862 do something only if the second operand is a constant. */
5864 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5865 strict_overflow_p
)) != 0)
5866 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5867 fold_convert (ctype
, op1
));
5868 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5869 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5870 strict_overflow_p
)) != 0)
5871 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5872 fold_convert (ctype
, t1
));
5873 else if (TREE_CODE (op1
) != INTEGER_CST
)
5876 /* If these are the same operation types, we can associate them
5877 assuming no overflow. */
5882 mul
= double_int_mul_with_sign
5884 (tree_to_double_int (op1
),
5885 TYPE_PRECISION (ctype
), TYPE_UNSIGNED (ctype
)),
5887 (tree_to_double_int (c
),
5888 TYPE_PRECISION (ctype
), TYPE_UNSIGNED (ctype
)),
5889 false, &overflow_p
);
5890 overflow_p
= (((!TYPE_UNSIGNED (ctype
)
5891 || (TREE_CODE (ctype
) == INTEGER_TYPE
5892 && TYPE_IS_SIZETYPE (ctype
)))
5894 | TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
));
5895 if (!double_int_fits_to_tree_p (ctype
, mul
)
5896 && ((TYPE_UNSIGNED (ctype
) && tcode
!= MULT_EXPR
)
5897 || !TYPE_UNSIGNED (ctype
)
5898 || (TREE_CODE (ctype
) == INTEGER_TYPE
5899 && TYPE_IS_SIZETYPE (ctype
))))
5902 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5903 double_int_to_tree (ctype
, mul
));
5906 /* If these operations "cancel" each other, we have the main
5907 optimizations of this pass, which occur when either constant is a
5908 multiple of the other, in which case we replace this with either an
5909 operation or CODE or TCODE.
5911 If we have an unsigned type that is not a sizetype, we cannot do
5912 this since it will change the result if the original computation
5914 if ((TYPE_OVERFLOW_UNDEFINED (ctype
)
5915 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
5916 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5917 || (tcode
== MULT_EXPR
5918 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5919 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
5920 && code
!= MULT_EXPR
)))
5922 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5924 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5925 *strict_overflow_p
= true;
5926 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5927 fold_convert (ctype
,
5928 const_binop (TRUNC_DIV_EXPR
,
5931 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
)))
5933 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5934 *strict_overflow_p
= true;
5935 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5936 fold_convert (ctype
,
5937 const_binop (TRUNC_DIV_EXPR
,
5950 /* Return a node which has the indicated constant VALUE (either 0 or
5951 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5952 and is of the indicated TYPE. */
5955 constant_boolean_node (bool value
, tree type
)
5957 if (type
== integer_type_node
)
5958 return value
? integer_one_node
: integer_zero_node
;
5959 else if (type
== boolean_type_node
)
5960 return value
? boolean_true_node
: boolean_false_node
;
5961 else if (TREE_CODE (type
) == VECTOR_TYPE
)
5962 return build_vector_from_val (type
,
5963 build_int_cst (TREE_TYPE (type
),
5966 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
5970 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5971 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5972 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5973 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5974 COND is the first argument to CODE; otherwise (as in the example
5975 given here), it is the second argument. TYPE is the type of the
5976 original expression. Return NULL_TREE if no simplification is
5980 fold_binary_op_with_conditional_arg (location_t loc
,
5981 enum tree_code code
,
5982 tree type
, tree op0
, tree op1
,
5983 tree cond
, tree arg
, int cond_first_p
)
5985 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
5986 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
5987 tree test
, true_value
, false_value
;
5988 tree lhs
= NULL_TREE
;
5989 tree rhs
= NULL_TREE
;
5991 if (TREE_CODE (cond
) == COND_EXPR
)
5993 test
= TREE_OPERAND (cond
, 0);
5994 true_value
= TREE_OPERAND (cond
, 1);
5995 false_value
= TREE_OPERAND (cond
, 2);
5996 /* If this operand throws an expression, then it does not make
5997 sense to try to perform a logical or arithmetic operation
5999 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6001 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6006 tree testtype
= TREE_TYPE (cond
);
6008 true_value
= constant_boolean_node (true, testtype
);
6009 false_value
= constant_boolean_node (false, testtype
);
6012 /* This transformation is only worthwhile if we don't have to wrap ARG
6013 in a SAVE_EXPR and the operation can be simplified on at least one
6014 of the branches once its pushed inside the COND_EXPR. */
6015 if (!TREE_CONSTANT (arg
)
6016 && (TREE_SIDE_EFFECTS (arg
)
6017 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6020 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6023 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6025 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6027 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6031 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6033 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6035 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6038 /* Check that we have simplified at least one of the branches. */
6039 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6042 return fold_build3_loc (loc
, COND_EXPR
, type
, test
, lhs
, rhs
);
6046 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6048 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6049 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6050 ADDEND is the same as X.
6052 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6053 and finite. The problematic cases are when X is zero, and its mode
6054 has signed zeros. In the case of rounding towards -infinity,
6055 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6056 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6059 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6061 if (!real_zerop (addend
))
6064 /* Don't allow the fold with -fsignaling-nans. */
6065 if (HONOR_SNANS (TYPE_MODE (type
)))
6068 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6069 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6072 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6073 if (TREE_CODE (addend
) == REAL_CST
6074 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6077 /* The mode has signed zeros, and we have to honor their sign.
6078 In this situation, there is only one case we can return true for.
6079 X - 0 is the same as X unless rounding towards -infinity is
6081 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6084 /* Subroutine of fold() that checks comparisons of built-in math
6085 functions against real constants.
6087 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6088 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6089 is the type of the result and ARG0 and ARG1 are the operands of the
6090 comparison. ARG1 must be a TREE_REAL_CST.
6092 The function returns the constant folded tree if a simplification
6093 can be made, and NULL_TREE otherwise. */
6096 fold_mathfn_compare (location_t loc
,
6097 enum built_in_function fcode
, enum tree_code code
,
6098 tree type
, tree arg0
, tree arg1
)
6102 if (BUILTIN_SQRT_P (fcode
))
6104 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6105 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6107 c
= TREE_REAL_CST (arg1
);
6108 if (REAL_VALUE_NEGATIVE (c
))
6110 /* sqrt(x) < y is always false, if y is negative. */
6111 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6112 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6114 /* sqrt(x) > y is always true, if y is negative and we
6115 don't care about NaNs, i.e. negative values of x. */
6116 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6117 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6119 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6120 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6121 build_real (TREE_TYPE (arg
), dconst0
));
6123 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6127 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6128 real_convert (&c2
, mode
, &c2
);
6130 if (REAL_VALUE_ISINF (c2
))
6132 /* sqrt(x) > y is x == +Inf, when y is very large. */
6133 if (HONOR_INFINITIES (mode
))
6134 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6135 build_real (TREE_TYPE (arg
), c2
));
6137 /* sqrt(x) > y is always false, when y is very large
6138 and we don't care about infinities. */
6139 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6142 /* sqrt(x) > c is the same as x > c*c. */
6143 return fold_build2_loc (loc
, code
, type
, arg
,
6144 build_real (TREE_TYPE (arg
), c2
));
6146 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6150 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6151 real_convert (&c2
, mode
, &c2
);
6153 if (REAL_VALUE_ISINF (c2
))
6155 /* sqrt(x) < y is always true, when y is a very large
6156 value and we don't care about NaNs or Infinities. */
6157 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6158 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6160 /* sqrt(x) < y is x != +Inf when y is very large and we
6161 don't care about NaNs. */
6162 if (! HONOR_NANS (mode
))
6163 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6164 build_real (TREE_TYPE (arg
), c2
));
6166 /* sqrt(x) < y is x >= 0 when y is very large and we
6167 don't care about Infinities. */
6168 if (! HONOR_INFINITIES (mode
))
6169 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6170 build_real (TREE_TYPE (arg
), dconst0
));
6172 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6173 arg
= save_expr (arg
);
6174 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6175 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6176 build_real (TREE_TYPE (arg
),
6178 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6179 build_real (TREE_TYPE (arg
),
6183 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6184 if (! HONOR_NANS (mode
))
6185 return fold_build2_loc (loc
, code
, type
, arg
,
6186 build_real (TREE_TYPE (arg
), c2
));
6188 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6189 arg
= save_expr (arg
);
6190 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6191 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6192 build_real (TREE_TYPE (arg
),
6194 fold_build2_loc (loc
, code
, type
, arg
,
6195 build_real (TREE_TYPE (arg
),
6203 /* Subroutine of fold() that optimizes comparisons against Infinities,
6204 either +Inf or -Inf.
6206 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6207 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6208 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6210 The function returns the constant folded tree if a simplification
6211 can be made, and NULL_TREE otherwise. */
6214 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6215 tree arg0
, tree arg1
)
6217 enum machine_mode mode
;
6218 REAL_VALUE_TYPE max
;
6222 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6224 /* For negative infinity swap the sense of the comparison. */
6225 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6227 code
= swap_tree_comparison (code
);
6232 /* x > +Inf is always false, if with ignore sNANs. */
6233 if (HONOR_SNANS (mode
))
6235 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6238 /* x <= +Inf is always true, if we don't case about NaNs. */
6239 if (! HONOR_NANS (mode
))
6240 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6242 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6243 arg0
= save_expr (arg0
);
6244 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6248 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6249 real_maxval (&max
, neg
, mode
);
6250 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6251 arg0
, build_real (TREE_TYPE (arg0
), max
));
6254 /* x < +Inf is always equal to x <= DBL_MAX. */
6255 real_maxval (&max
, neg
, mode
);
6256 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6257 arg0
, build_real (TREE_TYPE (arg0
), max
));
6260 /* x != +Inf is always equal to !(x > DBL_MAX). */
6261 real_maxval (&max
, neg
, mode
);
6262 if (! HONOR_NANS (mode
))
6263 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6264 arg0
, build_real (TREE_TYPE (arg0
), max
));
6266 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6267 arg0
, build_real (TREE_TYPE (arg0
), max
));
6268 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6277 /* Subroutine of fold() that optimizes comparisons of a division by
6278 a nonzero integer constant against an integer constant, i.e.
6281 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6282 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6283 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6285 The function returns the constant folded tree if a simplification
6286 can be made, and NULL_TREE otherwise. */
6289 fold_div_compare (location_t loc
,
6290 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6292 tree prod
, tmp
, hi
, lo
;
6293 tree arg00
= TREE_OPERAND (arg0
, 0);
6294 tree arg01
= TREE_OPERAND (arg0
, 1);
6296 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6300 /* We have to do this the hard way to detect unsigned overflow.
6301 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6302 overflow
= mul_double_with_sign (TREE_INT_CST_LOW (arg01
),
6303 TREE_INT_CST_HIGH (arg01
),
6304 TREE_INT_CST_LOW (arg1
),
6305 TREE_INT_CST_HIGH (arg1
),
6306 &val
.low
, &val
.high
, unsigned_p
);
6307 prod
= force_fit_type_double (TREE_TYPE (arg00
), val
, -1, overflow
);
6308 neg_overflow
= false;
6312 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6313 build_int_cst (TREE_TYPE (arg01
), 1));
6316 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6317 overflow
= add_double_with_sign (TREE_INT_CST_LOW (prod
),
6318 TREE_INT_CST_HIGH (prod
),
6319 TREE_INT_CST_LOW (tmp
),
6320 TREE_INT_CST_HIGH (tmp
),
6321 &val
.low
, &val
.high
, unsigned_p
);
6322 hi
= force_fit_type_double (TREE_TYPE (arg00
), val
,
6323 -1, overflow
| TREE_OVERFLOW (prod
));
6325 else if (tree_int_cst_sgn (arg01
) >= 0)
6327 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6328 build_int_cst (TREE_TYPE (arg01
), 1));
6329 switch (tree_int_cst_sgn (arg1
))
6332 neg_overflow
= true;
6333 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6338 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6343 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6353 /* A negative divisor reverses the relational operators. */
6354 code
= swap_tree_comparison (code
);
6356 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6357 build_int_cst (TREE_TYPE (arg01
), 1));
6358 switch (tree_int_cst_sgn (arg1
))
6361 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6366 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6371 neg_overflow
= true;
6372 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6384 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6385 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6386 if (TREE_OVERFLOW (hi
))
6387 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6388 if (TREE_OVERFLOW (lo
))
6389 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6390 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6393 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6394 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6395 if (TREE_OVERFLOW (hi
))
6396 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6397 if (TREE_OVERFLOW (lo
))
6398 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6399 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6402 if (TREE_OVERFLOW (lo
))
6404 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6405 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6407 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6410 if (TREE_OVERFLOW (hi
))
6412 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6413 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6415 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6418 if (TREE_OVERFLOW (hi
))
6420 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6421 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6423 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6426 if (TREE_OVERFLOW (lo
))
6428 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6429 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6431 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6441 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6442 equality/inequality test, then return a simplified form of the test
6443 using a sign testing. Otherwise return NULL. TYPE is the desired
6447 fold_single_bit_test_into_sign_test (location_t loc
,
6448 enum tree_code code
, tree arg0
, tree arg1
,
6451 /* If this is testing a single bit, we can optimize the test. */
6452 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6453 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6454 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6456 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6457 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6458 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6460 if (arg00
!= NULL_TREE
6461 /* This is only a win if casting to a signed type is cheap,
6462 i.e. when arg00's type is not a partial mode. */
6463 && TYPE_PRECISION (TREE_TYPE (arg00
))
6464 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6466 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6467 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6469 fold_convert_loc (loc
, stype
, arg00
),
6470 build_int_cst (stype
, 0));
6477 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6478 equality/inequality test, then return a simplified form of
6479 the test using shifts and logical operations. Otherwise return
6480 NULL. TYPE is the desired result type. */
6483 fold_single_bit_test (location_t loc
, enum tree_code code
,
6484 tree arg0
, tree arg1
, tree result_type
)
6486 /* If this is testing a single bit, we can optimize the test. */
6487 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6488 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6489 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6491 tree inner
= TREE_OPERAND (arg0
, 0);
6492 tree type
= TREE_TYPE (arg0
);
6493 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6494 enum machine_mode operand_mode
= TYPE_MODE (type
);
6496 tree signed_type
, unsigned_type
, intermediate_type
;
6499 /* First, see if we can fold the single bit test into a sign-bit
6501 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6506 /* Otherwise we have (A & C) != 0 where C is a single bit,
6507 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6508 Similarly for (A & C) == 0. */
6510 /* If INNER is a right shift of a constant and it plus BITNUM does
6511 not overflow, adjust BITNUM and INNER. */
6512 if (TREE_CODE (inner
) == RSHIFT_EXPR
6513 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6514 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6515 && bitnum
< TYPE_PRECISION (type
)
6516 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6517 bitnum
- TYPE_PRECISION (type
)))
6519 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6520 inner
= TREE_OPERAND (inner
, 0);
6523 /* If we are going to be able to omit the AND below, we must do our
6524 operations as unsigned. If we must use the AND, we have a choice.
6525 Normally unsigned is faster, but for some machines signed is. */
6526 #ifdef LOAD_EXTEND_OP
6527 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6528 && !flag_syntax_only
) ? 0 : 1;
6533 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6534 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6535 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6536 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6539 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6540 inner
, size_int (bitnum
));
6542 one
= build_int_cst (intermediate_type
, 1);
6544 if (code
== EQ_EXPR
)
6545 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6547 /* Put the AND last so it can combine with more things. */
6548 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6550 /* Make sure to return the proper type. */
6551 inner
= fold_convert_loc (loc
, result_type
, inner
);
6558 /* Check whether we are allowed to reorder operands arg0 and arg1,
6559 such that the evaluation of arg1 occurs before arg0. */
6562 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6564 if (! flag_evaluation_order
)
6566 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6568 return ! TREE_SIDE_EFFECTS (arg0
)
6569 && ! TREE_SIDE_EFFECTS (arg1
);
6572 /* Test whether it is preferable two swap two operands, ARG0 and
6573 ARG1, for example because ARG0 is an integer constant and ARG1
6574 isn't. If REORDER is true, only recommend swapping if we can
6575 evaluate the operands in reverse order. */
6578 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6580 STRIP_SIGN_NOPS (arg0
);
6581 STRIP_SIGN_NOPS (arg1
);
6583 if (TREE_CODE (arg1
) == INTEGER_CST
)
6585 if (TREE_CODE (arg0
) == INTEGER_CST
)
6588 if (TREE_CODE (arg1
) == REAL_CST
)
6590 if (TREE_CODE (arg0
) == REAL_CST
)
6593 if (TREE_CODE (arg1
) == FIXED_CST
)
6595 if (TREE_CODE (arg0
) == FIXED_CST
)
6598 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6600 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6603 if (TREE_CONSTANT (arg1
))
6605 if (TREE_CONSTANT (arg0
))
6608 if (optimize_function_for_size_p (cfun
))
6611 if (reorder
&& flag_evaluation_order
6612 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6615 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6616 for commutative and comparison operators. Ensuring a canonical
6617 form allows the optimizers to find additional redundancies without
6618 having to explicitly check for both orderings. */
6619 if (TREE_CODE (arg0
) == SSA_NAME
6620 && TREE_CODE (arg1
) == SSA_NAME
6621 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6624 /* Put SSA_NAMEs last. */
6625 if (TREE_CODE (arg1
) == SSA_NAME
)
6627 if (TREE_CODE (arg0
) == SSA_NAME
)
6630 /* Put variables last. */
6639 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6640 ARG0 is extended to a wider type. */
6643 fold_widened_comparison (location_t loc
, enum tree_code code
,
6644 tree type
, tree arg0
, tree arg1
)
6646 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6648 tree shorter_type
, outer_type
;
6652 if (arg0_unw
== arg0
)
6654 shorter_type
= TREE_TYPE (arg0_unw
);
6656 #ifdef HAVE_canonicalize_funcptr_for_compare
6657 /* Disable this optimization if we're casting a function pointer
6658 type on targets that require function pointer canonicalization. */
6659 if (HAVE_canonicalize_funcptr_for_compare
6660 && TREE_CODE (shorter_type
) == POINTER_TYPE
6661 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6665 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6668 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6670 /* If possible, express the comparison in the shorter mode. */
6671 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6672 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6673 && (TREE_TYPE (arg1_unw
) == shorter_type
6674 || ((TYPE_PRECISION (shorter_type
)
6675 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6676 && (TYPE_UNSIGNED (shorter_type
)
6677 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6678 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6679 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6680 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6681 && int_fits_type_p (arg1_unw
, shorter_type
))))
6682 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6683 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6685 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6686 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6687 || !int_fits_type_p (arg1_unw
, shorter_type
))
6690 /* If we are comparing with the integer that does not fit into the range
6691 of the shorter type, the result is known. */
6692 outer_type
= TREE_TYPE (arg1_unw
);
6693 min
= lower_bound_in_type (outer_type
, shorter_type
);
6694 max
= upper_bound_in_type (outer_type
, shorter_type
);
6696 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6698 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6705 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6710 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6716 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6718 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6723 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6725 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6734 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6735 ARG0 just the signedness is changed. */
6738 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6739 tree arg0
, tree arg1
)
6742 tree inner_type
, outer_type
;
6744 if (!CONVERT_EXPR_P (arg0
))
6747 outer_type
= TREE_TYPE (arg0
);
6748 arg0_inner
= TREE_OPERAND (arg0
, 0);
6749 inner_type
= TREE_TYPE (arg0_inner
);
6751 #ifdef HAVE_canonicalize_funcptr_for_compare
6752 /* Disable this optimization if we're casting a function pointer
6753 type on targets that require function pointer canonicalization. */
6754 if (HAVE_canonicalize_funcptr_for_compare
6755 && TREE_CODE (inner_type
) == POINTER_TYPE
6756 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6760 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6763 if (TREE_CODE (arg1
) != INTEGER_CST
6764 && !(CONVERT_EXPR_P (arg1
)
6765 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6768 if ((TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6769 || POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6774 if (TREE_CODE (arg1
) == INTEGER_CST
)
6775 arg1
= force_fit_type_double (inner_type
, tree_to_double_int (arg1
),
6776 0, TREE_OVERFLOW (arg1
));
6778 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6780 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6783 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6784 step of the array. Reconstructs s and delta in the case of s *
6785 delta being an integer constant (and thus already folded). ADDR is
6786 the address. MULT is the multiplicative expression. If the
6787 function succeeds, the new address expression is returned.
6788 Otherwise NULL_TREE is returned. LOC is the location of the
6789 resulting expression. */
6792 try_move_mult_to_index (location_t loc
, tree addr
, tree op1
)
6794 tree s
, delta
, step
;
6795 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6800 /* Strip the nops that might be added when converting op1 to sizetype. */
6803 /* Canonicalize op1 into a possibly non-constant delta
6804 and an INTEGER_CST s. */
6805 if (TREE_CODE (op1
) == MULT_EXPR
)
6807 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6812 if (TREE_CODE (arg0
) == INTEGER_CST
)
6817 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6825 else if (TREE_CODE (op1
) == INTEGER_CST
)
6832 /* Simulate we are delta * 1. */
6834 s
= integer_one_node
;
6837 /* Handle &x.array the same as we would handle &x.array[0]. */
6838 if (TREE_CODE (ref
) == COMPONENT_REF
6839 && TREE_CODE (TREE_TYPE (ref
)) == ARRAY_TYPE
)
6843 /* Remember if this was a multi-dimensional array. */
6844 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6847 domain
= TYPE_DOMAIN (TREE_TYPE (ref
));
6850 itype
= TREE_TYPE (domain
);
6852 step
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref
)));
6853 if (TREE_CODE (step
) != INTEGER_CST
)
6858 if (! tree_int_cst_equal (step
, s
))
6863 /* Try if delta is a multiple of step. */
6864 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
6870 /* Only fold here if we can verify we do not overflow one
6871 dimension of a multi-dimensional array. */
6876 if (!TYPE_MIN_VALUE (domain
)
6877 || !TYPE_MAX_VALUE (domain
)
6878 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6881 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6882 fold_convert_loc (loc
, itype
,
6883 TYPE_MIN_VALUE (domain
)),
6884 fold_convert_loc (loc
, itype
, delta
));
6885 if (TREE_CODE (tmp
) != INTEGER_CST
6886 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6890 /* We found a suitable component reference. */
6892 pref
= TREE_OPERAND (addr
, 0);
6893 ret
= copy_node (pref
);
6894 SET_EXPR_LOCATION (ret
, loc
);
6896 ret
= build4_loc (loc
, ARRAY_REF
, TREE_TYPE (TREE_TYPE (ref
)), ret
,
6898 (loc
, PLUS_EXPR
, itype
,
6899 fold_convert_loc (loc
, itype
,
6901 (TYPE_DOMAIN (TREE_TYPE (ref
)))),
6902 fold_convert_loc (loc
, itype
, delta
)),
6903 NULL_TREE
, NULL_TREE
);
6904 return build_fold_addr_expr_loc (loc
, ret
);
6909 for (;; ref
= TREE_OPERAND (ref
, 0))
6911 if (TREE_CODE (ref
) == ARRAY_REF
)
6915 /* Remember if this was a multi-dimensional array. */
6916 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6919 domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6922 itype
= TREE_TYPE (domain
);
6924 step
= array_ref_element_size (ref
);
6925 if (TREE_CODE (step
) != INTEGER_CST
)
6930 if (! tree_int_cst_equal (step
, s
))
6935 /* Try if delta is a multiple of step. */
6936 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
6942 /* Only fold here if we can verify we do not overflow one
6943 dimension of a multi-dimensional array. */
6948 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
6949 || !TYPE_MAX_VALUE (domain
)
6950 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6953 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6954 fold_convert_loc (loc
, itype
,
6955 TREE_OPERAND (ref
, 1)),
6956 fold_convert_loc (loc
, itype
, delta
));
6958 || TREE_CODE (tmp
) != INTEGER_CST
6959 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6968 if (!handled_component_p (ref
))
6972 /* We found the suitable array reference. So copy everything up to it,
6973 and replace the index. */
6975 pref
= TREE_OPERAND (addr
, 0);
6976 ret
= copy_node (pref
);
6977 SET_EXPR_LOCATION (ret
, loc
);
6982 pref
= TREE_OPERAND (pref
, 0);
6983 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6984 pos
= TREE_OPERAND (pos
, 0);
6987 TREE_OPERAND (pos
, 1)
6988 = fold_build2_loc (loc
, PLUS_EXPR
, itype
,
6989 fold_convert_loc (loc
, itype
, TREE_OPERAND (pos
, 1)),
6990 fold_convert_loc (loc
, itype
, delta
));
6991 return fold_build1_loc (loc
, ADDR_EXPR
, TREE_TYPE (addr
), ret
);
6995 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6996 means A >= Y && A != MAX, but in this case we know that
6997 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7000 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
7002 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7004 if (TREE_CODE (bound
) == LT_EXPR
)
7005 a
= TREE_OPERAND (bound
, 0);
7006 else if (TREE_CODE (bound
) == GT_EXPR
)
7007 a
= TREE_OPERAND (bound
, 1);
7011 typea
= TREE_TYPE (a
);
7012 if (!INTEGRAL_TYPE_P (typea
)
7013 && !POINTER_TYPE_P (typea
))
7016 if (TREE_CODE (ineq
) == LT_EXPR
)
7018 a1
= TREE_OPERAND (ineq
, 1);
7019 y
= TREE_OPERAND (ineq
, 0);
7021 else if (TREE_CODE (ineq
) == GT_EXPR
)
7023 a1
= TREE_OPERAND (ineq
, 0);
7024 y
= TREE_OPERAND (ineq
, 1);
7029 if (TREE_TYPE (a1
) != typea
)
7032 if (POINTER_TYPE_P (typea
))
7034 /* Convert the pointer types into integer before taking the difference. */
7035 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7036 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7037 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7040 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7042 if (!diff
|| !integer_onep (diff
))
7045 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7048 /* Fold a sum or difference of at least one multiplication.
7049 Returns the folded tree or NULL if no simplification could be made. */
7052 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7053 tree arg0
, tree arg1
)
7055 tree arg00
, arg01
, arg10
, arg11
;
7056 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7058 /* (A * C) +- (B * C) -> (A+-B) * C.
7059 (A * C) +- A -> A * (C+-1).
7060 We are most concerned about the case where C is a constant,
7061 but other combinations show up during loop reduction. Since
7062 it is not difficult, try all four possibilities. */
7064 if (TREE_CODE (arg0
) == MULT_EXPR
)
7066 arg00
= TREE_OPERAND (arg0
, 0);
7067 arg01
= TREE_OPERAND (arg0
, 1);
7069 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7071 arg00
= build_one_cst (type
);
7076 /* We cannot generate constant 1 for fract. */
7077 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7080 arg01
= build_one_cst (type
);
7082 if (TREE_CODE (arg1
) == MULT_EXPR
)
7084 arg10
= TREE_OPERAND (arg1
, 0);
7085 arg11
= TREE_OPERAND (arg1
, 1);
7087 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7089 arg10
= build_one_cst (type
);
7090 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7091 the purpose of this canonicalization. */
7092 if (TREE_INT_CST_HIGH (arg1
) == -1
7093 && negate_expr_p (arg1
)
7094 && code
== PLUS_EXPR
)
7096 arg11
= negate_expr (arg1
);
7104 /* We cannot generate constant 1 for fract. */
7105 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7108 arg11
= build_one_cst (type
);
7112 if (operand_equal_p (arg01
, arg11
, 0))
7113 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7114 else if (operand_equal_p (arg00
, arg10
, 0))
7115 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7116 else if (operand_equal_p (arg00
, arg11
, 0))
7117 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7118 else if (operand_equal_p (arg01
, arg10
, 0))
7119 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7121 /* No identical multiplicands; see if we can find a common
7122 power-of-two factor in non-power-of-two multiplies. This
7123 can help in multi-dimensional array access. */
7124 else if (host_integerp (arg01
, 0)
7125 && host_integerp (arg11
, 0))
7127 HOST_WIDE_INT int01
, int11
, tmp
;
7130 int01
= TREE_INT_CST_LOW (arg01
);
7131 int11
= TREE_INT_CST_LOW (arg11
);
7133 /* Move min of absolute values to int11. */
7134 if (absu_hwi (int01
) < absu_hwi (int11
))
7136 tmp
= int01
, int01
= int11
, int11
= tmp
;
7137 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7144 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7145 /* The remainder should not be a constant, otherwise we
7146 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7147 increased the number of multiplications necessary. */
7148 && TREE_CODE (arg10
) != INTEGER_CST
)
7150 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7151 build_int_cst (TREE_TYPE (arg00
),
7156 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7161 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7162 fold_build2_loc (loc
, code
, type
,
7163 fold_convert_loc (loc
, type
, alt0
),
7164 fold_convert_loc (loc
, type
, alt1
)),
7165 fold_convert_loc (loc
, type
, same
));
7170 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7171 specified by EXPR into the buffer PTR of length LEN bytes.
7172 Return the number of bytes placed in the buffer, or zero
7176 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7178 tree type
= TREE_TYPE (expr
);
7179 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7180 int byte
, offset
, word
, words
;
7181 unsigned char value
;
7183 if (total_bytes
> len
)
7185 words
= total_bytes
/ UNITS_PER_WORD
;
7187 for (byte
= 0; byte
< total_bytes
; byte
++)
7189 int bitpos
= byte
* BITS_PER_UNIT
;
7190 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7191 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7193 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7194 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7196 if (total_bytes
> UNITS_PER_WORD
)
7198 word
= byte
/ UNITS_PER_WORD
;
7199 if (WORDS_BIG_ENDIAN
)
7200 word
= (words
- 1) - word
;
7201 offset
= word
* UNITS_PER_WORD
;
7202 if (BYTES_BIG_ENDIAN
)
7203 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7205 offset
+= byte
% UNITS_PER_WORD
;
7208 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7209 ptr
[offset
] = value
;
7215 /* Subroutine of native_encode_expr. Encode the REAL_CST
7216 specified by EXPR into the buffer PTR of length LEN bytes.
7217 Return the number of bytes placed in the buffer, or zero
7221 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7223 tree type
= TREE_TYPE (expr
);
7224 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7225 int byte
, offset
, word
, words
, bitpos
;
7226 unsigned char value
;
7228 /* There are always 32 bits in each long, no matter the size of
7229 the hosts long. We handle floating point representations with
7233 if (total_bytes
> len
)
7235 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7237 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7239 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7240 bitpos
+= BITS_PER_UNIT
)
7242 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7243 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7245 if (UNITS_PER_WORD
< 4)
7247 word
= byte
/ UNITS_PER_WORD
;
7248 if (WORDS_BIG_ENDIAN
)
7249 word
= (words
- 1) - word
;
7250 offset
= word
* UNITS_PER_WORD
;
7251 if (BYTES_BIG_ENDIAN
)
7252 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7254 offset
+= byte
% UNITS_PER_WORD
;
7257 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7258 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7263 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7264 specified by EXPR into the buffer PTR of length LEN bytes.
7265 Return the number of bytes placed in the buffer, or zero
7269 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7274 part
= TREE_REALPART (expr
);
7275 rsize
= native_encode_expr (part
, ptr
, len
);
7278 part
= TREE_IMAGPART (expr
);
7279 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7282 return rsize
+ isize
;
7286 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7287 specified by EXPR into the buffer PTR of length LEN bytes.
7288 Return the number of bytes placed in the buffer, or zero
7292 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7299 count
= VECTOR_CST_NELTS (expr
);
7300 itype
= TREE_TYPE (TREE_TYPE (expr
));
7301 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7302 for (i
= 0; i
< count
; i
++)
7304 elem
= VECTOR_CST_ELT (expr
, i
);
7305 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7313 /* Subroutine of native_encode_expr. Encode the STRING_CST
7314 specified by EXPR into the buffer PTR of length LEN bytes.
7315 Return the number of bytes placed in the buffer, or zero
7319 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
)
7321 tree type
= TREE_TYPE (expr
);
7322 HOST_WIDE_INT total_bytes
;
7324 if (TREE_CODE (type
) != ARRAY_TYPE
7325 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7326 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7327 || !host_integerp (TYPE_SIZE_UNIT (type
), 0))
7329 total_bytes
= tree_low_cst (TYPE_SIZE_UNIT (type
), 0);
7330 if (total_bytes
> len
)
7332 if (TREE_STRING_LENGTH (expr
) < total_bytes
)
7334 memcpy (ptr
, TREE_STRING_POINTER (expr
), TREE_STRING_LENGTH (expr
));
7335 memset (ptr
+ TREE_STRING_LENGTH (expr
), 0,
7336 total_bytes
- TREE_STRING_LENGTH (expr
));
7339 memcpy (ptr
, TREE_STRING_POINTER (expr
), total_bytes
);
7344 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7345 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7346 buffer PTR of length LEN bytes. Return the number of bytes
7347 placed in the buffer, or zero upon failure. */
7350 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
7352 switch (TREE_CODE (expr
))
7355 return native_encode_int (expr
, ptr
, len
);
7358 return native_encode_real (expr
, ptr
, len
);
7361 return native_encode_complex (expr
, ptr
, len
);
7364 return native_encode_vector (expr
, ptr
, len
);
7367 return native_encode_string (expr
, ptr
, len
);
7375 /* Subroutine of native_interpret_expr. Interpret the contents of
7376 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7377 If the buffer cannot be interpreted, return NULL_TREE. */
7380 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7382 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7383 int byte
, offset
, word
, words
;
7384 unsigned char value
;
7387 if (total_bytes
> len
)
7389 if (total_bytes
* BITS_PER_UNIT
> 2 * HOST_BITS_PER_WIDE_INT
)
7392 result
= double_int_zero
;
7393 words
= total_bytes
/ UNITS_PER_WORD
;
7395 for (byte
= 0; byte
< total_bytes
; byte
++)
7397 int bitpos
= byte
* BITS_PER_UNIT
;
7398 if (total_bytes
> UNITS_PER_WORD
)
7400 word
= byte
/ UNITS_PER_WORD
;
7401 if (WORDS_BIG_ENDIAN
)
7402 word
= (words
- 1) - word
;
7403 offset
= word
* UNITS_PER_WORD
;
7404 if (BYTES_BIG_ENDIAN
)
7405 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7407 offset
+= byte
% UNITS_PER_WORD
;
7410 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7411 value
= ptr
[offset
];
7413 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7414 result
.low
|= (unsigned HOST_WIDE_INT
) value
<< bitpos
;
7416 result
.high
|= (unsigned HOST_WIDE_INT
) value
7417 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
7420 return double_int_to_tree (type
, result
);
7424 /* Subroutine of native_interpret_expr. Interpret the contents of
7425 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7426 If the buffer cannot be interpreted, return NULL_TREE. */
7429 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7431 enum machine_mode mode
= TYPE_MODE (type
);
7432 int total_bytes
= GET_MODE_SIZE (mode
);
7433 int byte
, offset
, word
, words
, bitpos
;
7434 unsigned char value
;
7435 /* There are always 32 bits in each long, no matter the size of
7436 the hosts long. We handle floating point representations with
7441 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7442 if (total_bytes
> len
|| total_bytes
> 24)
7444 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7446 memset (tmp
, 0, sizeof (tmp
));
7447 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7448 bitpos
+= BITS_PER_UNIT
)
7450 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7451 if (UNITS_PER_WORD
< 4)
7453 word
= byte
/ UNITS_PER_WORD
;
7454 if (WORDS_BIG_ENDIAN
)
7455 word
= (words
- 1) - word
;
7456 offset
= word
* UNITS_PER_WORD
;
7457 if (BYTES_BIG_ENDIAN
)
7458 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7460 offset
+= byte
% UNITS_PER_WORD
;
7463 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7464 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7466 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7469 real_from_target (&r
, tmp
, mode
);
7470 return build_real (type
, r
);
7474 /* Subroutine of native_interpret_expr. Interpret the contents of
7475 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7476 If the buffer cannot be interpreted, return NULL_TREE. */
7479 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7481 tree etype
, rpart
, ipart
;
7484 etype
= TREE_TYPE (type
);
7485 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7488 rpart
= native_interpret_expr (etype
, ptr
, size
);
7491 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7494 return build_complex (type
, rpart
, ipart
);
7498 /* Subroutine of native_interpret_expr. Interpret the contents of
7499 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7500 If the buffer cannot be interpreted, return NULL_TREE. */
7503 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7509 etype
= TREE_TYPE (type
);
7510 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7511 count
= TYPE_VECTOR_SUBPARTS (type
);
7512 if (size
* count
> len
)
7515 elements
= XALLOCAVEC (tree
, count
);
7516 for (i
= count
- 1; i
>= 0; i
--)
7518 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7523 return build_vector (type
, elements
);
7527 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7528 the buffer PTR of length LEN as a constant of type TYPE. For
7529 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7530 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7531 return NULL_TREE. */
7534 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7536 switch (TREE_CODE (type
))
7542 case REFERENCE_TYPE
:
7543 return native_interpret_int (type
, ptr
, len
);
7546 return native_interpret_real (type
, ptr
, len
);
7549 return native_interpret_complex (type
, ptr
, len
);
7552 return native_interpret_vector (type
, ptr
, len
);
7559 /* Returns true if we can interpret the contents of a native encoding
7563 can_native_interpret_type_p (tree type
)
7565 switch (TREE_CODE (type
))
7571 case REFERENCE_TYPE
:
7581 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7582 TYPE at compile-time. If we're unable to perform the conversion
7583 return NULL_TREE. */
7586 fold_view_convert_expr (tree type
, tree expr
)
7588 /* We support up to 512-bit values (for V8DFmode). */
7589 unsigned char buffer
[64];
7592 /* Check that the host and target are sane. */
7593 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7596 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7600 return native_interpret_expr (type
, buffer
, len
);
7603 /* Build an expression for the address of T. Folds away INDIRECT_REF
7604 to avoid confusing the gimplify process. */
7607 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7609 /* The size of the object is not relevant when talking about its address. */
7610 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7611 t
= TREE_OPERAND (t
, 0);
7613 if (TREE_CODE (t
) == INDIRECT_REF
)
7615 t
= TREE_OPERAND (t
, 0);
7617 if (TREE_TYPE (t
) != ptrtype
)
7618 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7620 else if (TREE_CODE (t
) == MEM_REF
7621 && integer_zerop (TREE_OPERAND (t
, 1)))
7622 return TREE_OPERAND (t
, 0);
7623 else if (TREE_CODE (t
) == MEM_REF
7624 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7625 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7626 TREE_OPERAND (t
, 0),
7627 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7628 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7630 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7632 if (TREE_TYPE (t
) != ptrtype
)
7633 t
= fold_convert_loc (loc
, ptrtype
, t
);
7636 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7641 /* Build an expression for the address of T. */
7644 build_fold_addr_expr_loc (location_t loc
, tree t
)
7646 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7648 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7651 static bool vec_cst_ctor_to_array (tree
, tree
*);
7653 /* Fold a unary expression of code CODE and type TYPE with operand
7654 OP0. Return the folded expression if folding is successful.
7655 Otherwise, return NULL_TREE. */
7658 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7662 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7664 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7665 && TREE_CODE_LENGTH (code
) == 1);
7670 if (CONVERT_EXPR_CODE_P (code
)
7671 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7673 /* Don't use STRIP_NOPS, because signedness of argument type
7675 STRIP_SIGN_NOPS (arg0
);
7679 /* Strip any conversions that don't change the mode. This
7680 is safe for every expression, except for a comparison
7681 expression because its signedness is derived from its
7684 Note that this is done as an internal manipulation within
7685 the constant folder, in order to find the simplest
7686 representation of the arguments so that their form can be
7687 studied. In any cases, the appropriate type conversions
7688 should be put back in the tree that will get out of the
7694 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7696 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7697 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7698 fold_build1_loc (loc
, code
, type
,
7699 fold_convert_loc (loc
, TREE_TYPE (op0
),
7700 TREE_OPERAND (arg0
, 1))));
7701 else if (TREE_CODE (arg0
) == COND_EXPR
)
7703 tree arg01
= TREE_OPERAND (arg0
, 1);
7704 tree arg02
= TREE_OPERAND (arg0
, 2);
7705 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7706 arg01
= fold_build1_loc (loc
, code
, type
,
7707 fold_convert_loc (loc
,
7708 TREE_TYPE (op0
), arg01
));
7709 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7710 arg02
= fold_build1_loc (loc
, code
, type
,
7711 fold_convert_loc (loc
,
7712 TREE_TYPE (op0
), arg02
));
7713 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7716 /* If this was a conversion, and all we did was to move into
7717 inside the COND_EXPR, bring it back out. But leave it if
7718 it is a conversion from integer to integer and the
7719 result precision is no wider than a word since such a
7720 conversion is cheap and may be optimized away by combine,
7721 while it couldn't if it were outside the COND_EXPR. Then return
7722 so we don't get into an infinite recursion loop taking the
7723 conversion out and then back in. */
7725 if ((CONVERT_EXPR_CODE_P (code
)
7726 || code
== NON_LVALUE_EXPR
)
7727 && TREE_CODE (tem
) == COND_EXPR
7728 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7729 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7730 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7731 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7732 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7733 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7734 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7736 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7737 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7738 || flag_syntax_only
))
7739 tem
= build1_loc (loc
, code
, type
,
7741 TREE_TYPE (TREE_OPERAND
7742 (TREE_OPERAND (tem
, 1), 0)),
7743 TREE_OPERAND (tem
, 0),
7744 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7745 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7754 /* Re-association barriers around constants and other re-association
7755 barriers can be removed. */
7756 if (CONSTANT_CLASS_P (op0
)
7757 || TREE_CODE (op0
) == PAREN_EXPR
)
7758 return fold_convert_loc (loc
, type
, op0
);
7763 case FIX_TRUNC_EXPR
:
7764 if (TREE_TYPE (op0
) == type
)
7767 if (COMPARISON_CLASS_P (op0
))
7769 /* If we have (type) (a CMP b) and type is an integral type, return
7770 new expression involving the new type. Canonicalize
7771 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7773 Do not fold the result as that would not simplify further, also
7774 folding again results in recursions. */
7775 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7776 return build2_loc (loc
, TREE_CODE (op0
), type
,
7777 TREE_OPERAND (op0
, 0),
7778 TREE_OPERAND (op0
, 1));
7779 else if (!INTEGRAL_TYPE_P (type
))
7780 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7781 constant_boolean_node (true, type
),
7782 constant_boolean_node (false, type
));
7785 /* Handle cases of two conversions in a row. */
7786 if (CONVERT_EXPR_P (op0
))
7788 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7789 tree inter_type
= TREE_TYPE (op0
);
7790 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7791 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7792 int inside_float
= FLOAT_TYPE_P (inside_type
);
7793 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7794 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7795 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7796 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7797 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7798 int inter_float
= FLOAT_TYPE_P (inter_type
);
7799 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7800 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7801 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7802 int final_int
= INTEGRAL_TYPE_P (type
);
7803 int final_ptr
= POINTER_TYPE_P (type
);
7804 int final_float
= FLOAT_TYPE_P (type
);
7805 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7806 unsigned int final_prec
= TYPE_PRECISION (type
);
7807 int final_unsignedp
= TYPE_UNSIGNED (type
);
7809 /* In addition to the cases of two conversions in a row
7810 handled below, if we are converting something to its own
7811 type via an object of identical or wider precision, neither
7812 conversion is needed. */
7813 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7814 && (((inter_int
|| inter_ptr
) && final_int
)
7815 || (inter_float
&& final_float
))
7816 && inter_prec
>= final_prec
)
7817 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7819 /* Likewise, if the intermediate and initial types are either both
7820 float or both integer, we don't need the middle conversion if the
7821 former is wider than the latter and doesn't change the signedness
7822 (for integers). Avoid this if the final type is a pointer since
7823 then we sometimes need the middle conversion. Likewise if the
7824 final type has a precision not equal to the size of its mode. */
7825 if (((inter_int
&& inside_int
)
7826 || (inter_float
&& inside_float
)
7827 || (inter_vec
&& inside_vec
))
7828 && inter_prec
>= inside_prec
7829 && (inter_float
|| inter_vec
7830 || inter_unsignedp
== inside_unsignedp
)
7831 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7832 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7834 && (! final_vec
|| inter_prec
== inside_prec
))
7835 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7837 /* If we have a sign-extension of a zero-extended value, we can
7838 replace that by a single zero-extension. Likewise if the
7839 final conversion does not change precision we can drop the
7840 intermediate conversion. */
7841 if (inside_int
&& inter_int
&& final_int
7842 && ((inside_prec
< inter_prec
&& inter_prec
< final_prec
7843 && inside_unsignedp
&& !inter_unsignedp
)
7844 || final_prec
== inter_prec
))
7845 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7847 /* Two conversions in a row are not needed unless:
7848 - some conversion is floating-point (overstrict for now), or
7849 - some conversion is a vector (overstrict for now), or
7850 - the intermediate type is narrower than both initial and
7852 - the intermediate type and innermost type differ in signedness,
7853 and the outermost type is wider than the intermediate, or
7854 - the initial type is a pointer type and the precisions of the
7855 intermediate and final types differ, or
7856 - the final type is a pointer type and the precisions of the
7857 initial and intermediate types differ. */
7858 if (! inside_float
&& ! inter_float
&& ! final_float
7859 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7860 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7861 && ! (inside_int
&& inter_int
7862 && inter_unsignedp
!= inside_unsignedp
7863 && inter_prec
< final_prec
)
7864 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7865 == (final_unsignedp
&& final_prec
> inter_prec
))
7866 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7867 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7868 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7869 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
7870 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7873 /* Handle (T *)&A.B.C for A being of type T and B and C
7874 living at offset zero. This occurs frequently in
7875 C++ upcasting and then accessing the base. */
7876 if (TREE_CODE (op0
) == ADDR_EXPR
7877 && POINTER_TYPE_P (type
)
7878 && handled_component_p (TREE_OPERAND (op0
, 0)))
7880 HOST_WIDE_INT bitsize
, bitpos
;
7882 enum machine_mode mode
;
7883 int unsignedp
, volatilep
;
7884 tree base
= TREE_OPERAND (op0
, 0);
7885 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7886 &mode
, &unsignedp
, &volatilep
, false);
7887 /* If the reference was to a (constant) zero offset, we can use
7888 the address of the base if it has the same base type
7889 as the result type and the pointer type is unqualified. */
7890 if (! offset
&& bitpos
== 0
7891 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7892 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7893 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7894 return fold_convert_loc (loc
, type
,
7895 build_fold_addr_expr_loc (loc
, base
));
7898 if (TREE_CODE (op0
) == MODIFY_EXPR
7899 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7900 /* Detect assigning a bitfield. */
7901 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7903 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7905 /* Don't leave an assignment inside a conversion
7906 unless assigning a bitfield. */
7907 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7908 /* First do the assignment, then return converted constant. */
7909 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7910 TREE_NO_WARNING (tem
) = 1;
7911 TREE_USED (tem
) = 1;
7915 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7916 constants (if x has signed type, the sign bit cannot be set
7917 in c). This folds extension into the BIT_AND_EXPR.
7918 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7919 very likely don't have maximal range for their precision and this
7920 transformation effectively doesn't preserve non-maximal ranges. */
7921 if (TREE_CODE (type
) == INTEGER_TYPE
7922 && TREE_CODE (op0
) == BIT_AND_EXPR
7923 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7925 tree and_expr
= op0
;
7926 tree and0
= TREE_OPERAND (and_expr
, 0);
7927 tree and1
= TREE_OPERAND (and_expr
, 1);
7930 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7931 || (TYPE_PRECISION (type
)
7932 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7934 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7935 <= HOST_BITS_PER_WIDE_INT
7936 && host_integerp (and1
, 1))
7938 unsigned HOST_WIDE_INT cst
;
7940 cst
= tree_low_cst (and1
, 1);
7941 cst
&= (HOST_WIDE_INT
) -1
7942 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7943 change
= (cst
== 0);
7944 #ifdef LOAD_EXTEND_OP
7946 && !flag_syntax_only
7947 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7950 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7951 and0
= fold_convert_loc (loc
, uns
, and0
);
7952 and1
= fold_convert_loc (loc
, uns
, and1
);
7958 tem
= force_fit_type_double (type
, tree_to_double_int (and1
),
7959 0, TREE_OVERFLOW (and1
));
7960 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7961 fold_convert_loc (loc
, type
, and0
), tem
);
7965 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7966 when one of the new casts will fold away. Conservatively we assume
7967 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7968 if (POINTER_TYPE_P (type
)
7969 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7970 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
7971 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7972 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7973 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
7975 tree arg00
= TREE_OPERAND (arg0
, 0);
7976 tree arg01
= TREE_OPERAND (arg0
, 1);
7978 return fold_build_pointer_plus_loc
7979 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
7982 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7983 of the same precision, and X is an integer type not narrower than
7984 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7985 if (INTEGRAL_TYPE_P (type
)
7986 && TREE_CODE (op0
) == BIT_NOT_EXPR
7987 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7988 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7989 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7991 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7992 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7993 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7994 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
7995 fold_convert_loc (loc
, type
, tem
));
7998 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7999 type of X and Y (integer types only). */
8000 if (INTEGRAL_TYPE_P (type
)
8001 && TREE_CODE (op0
) == MULT_EXPR
8002 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8003 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8005 /* Be careful not to introduce new overflows. */
8007 if (TYPE_OVERFLOW_WRAPS (type
))
8010 mult_type
= unsigned_type_for (type
);
8012 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8014 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
8015 fold_convert_loc (loc
, mult_type
,
8016 TREE_OPERAND (op0
, 0)),
8017 fold_convert_loc (loc
, mult_type
,
8018 TREE_OPERAND (op0
, 1)));
8019 return fold_convert_loc (loc
, type
, tem
);
8023 tem
= fold_convert_const (code
, type
, op0
);
8024 return tem
? tem
: NULL_TREE
;
8026 case ADDR_SPACE_CONVERT_EXPR
:
8027 if (integer_zerop (arg0
))
8028 return fold_convert_const (code
, type
, arg0
);
8031 case FIXED_CONVERT_EXPR
:
8032 tem
= fold_convert_const (code
, type
, arg0
);
8033 return tem
? tem
: NULL_TREE
;
8035 case VIEW_CONVERT_EXPR
:
8036 if (TREE_TYPE (op0
) == type
)
8038 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
8039 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8040 type
, TREE_OPERAND (op0
, 0));
8041 if (TREE_CODE (op0
) == MEM_REF
)
8042 return fold_build2_loc (loc
, MEM_REF
, type
,
8043 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
8045 /* For integral conversions with the same precision or pointer
8046 conversions use a NOP_EXPR instead. */
8047 if ((INTEGRAL_TYPE_P (type
)
8048 || POINTER_TYPE_P (type
))
8049 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8050 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8051 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8052 return fold_convert_loc (loc
, type
, op0
);
8054 /* Strip inner integral conversions that do not change the precision. */
8055 if (CONVERT_EXPR_P (op0
)
8056 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8057 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8058 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8059 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0))))
8060 && (TYPE_PRECISION (TREE_TYPE (op0
))
8061 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8062 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8063 type
, TREE_OPERAND (op0
, 0));
8065 return fold_view_convert_expr (type
, op0
);
8068 tem
= fold_negate_expr (loc
, arg0
);
8070 return fold_convert_loc (loc
, type
, tem
);
8074 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8075 return fold_abs_const (arg0
, type
);
8076 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8077 return fold_build1_loc (loc
, ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8078 /* Convert fabs((double)float) into (double)fabsf(float). */
8079 else if (TREE_CODE (arg0
) == NOP_EXPR
8080 && TREE_CODE (type
) == REAL_TYPE
)
8082 tree targ0
= strip_float_extensions (arg0
);
8084 return fold_convert_loc (loc
, type
,
8085 fold_build1_loc (loc
, ABS_EXPR
,
8089 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8090 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8092 else if (tree_expr_nonnegative_p (arg0
))
8095 /* Strip sign ops from argument. */
8096 if (TREE_CODE (type
) == REAL_TYPE
)
8098 tem
= fold_strip_sign_ops (arg0
);
8100 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8101 fold_convert_loc (loc
, type
, tem
));
8106 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8107 return fold_convert_loc (loc
, type
, arg0
);
8108 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8110 tree itype
= TREE_TYPE (type
);
8111 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8112 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8113 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8114 negate_expr (ipart
));
8116 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8118 tree itype
= TREE_TYPE (type
);
8119 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
8120 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
8121 return build_complex (type
, rpart
, negate_expr (ipart
));
8123 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8124 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8128 if (TREE_CODE (arg0
) == INTEGER_CST
)
8129 return fold_not_const (arg0
, type
);
8130 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8131 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8132 /* Convert ~ (-A) to A - 1. */
8133 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8134 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8135 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8136 build_int_cst (type
, 1));
8137 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8138 else if (INTEGRAL_TYPE_P (type
)
8139 && ((TREE_CODE (arg0
) == MINUS_EXPR
8140 && integer_onep (TREE_OPERAND (arg0
, 1)))
8141 || (TREE_CODE (arg0
) == PLUS_EXPR
8142 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8143 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8144 fold_convert_loc (loc
, type
,
8145 TREE_OPERAND (arg0
, 0)));
8146 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8147 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8148 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8149 fold_convert_loc (loc
, type
,
8150 TREE_OPERAND (arg0
, 0)))))
8151 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8152 fold_convert_loc (loc
, type
,
8153 TREE_OPERAND (arg0
, 1)));
8154 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8155 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8156 fold_convert_loc (loc
, type
,
8157 TREE_OPERAND (arg0
, 1)))))
8158 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8159 fold_convert_loc (loc
, type
,
8160 TREE_OPERAND (arg0
, 0)), tem
);
8161 /* Perform BIT_NOT_EXPR on each element individually. */
8162 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8166 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
8168 elements
= XALLOCAVEC (tree
, count
);
8169 for (i
= 0; i
< count
; i
++)
8171 elem
= VECTOR_CST_ELT (arg0
, i
);
8172 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8173 if (elem
== NULL_TREE
)
8178 return build_vector (type
, elements
);
8183 case TRUTH_NOT_EXPR
:
8184 /* The argument to invert_truthvalue must have Boolean type. */
8185 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8186 arg0
= fold_convert_loc (loc
, boolean_type_node
, arg0
);
8188 /* Note that the operand of this must be an int
8189 and its values must be 0 or 1.
8190 ("true" is a fixed value perhaps depending on the language,
8191 but we don't handle values other than 1 correctly yet.) */
8192 tem
= fold_truth_not_expr (loc
, arg0
);
8195 return fold_convert_loc (loc
, type
, tem
);
8198 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8199 return fold_convert_loc (loc
, type
, arg0
);
8200 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8201 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8202 TREE_OPERAND (arg0
, 1));
8203 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8204 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8205 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8207 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8208 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8209 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8210 TREE_OPERAND (arg0
, 0)),
8211 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8212 TREE_OPERAND (arg0
, 1)));
8213 return fold_convert_loc (loc
, type
, tem
);
8215 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8217 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8218 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8219 TREE_OPERAND (arg0
, 0));
8220 return fold_convert_loc (loc
, type
, tem
);
8222 if (TREE_CODE (arg0
) == CALL_EXPR
)
8224 tree fn
= get_callee_fndecl (arg0
);
8225 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8226 switch (DECL_FUNCTION_CODE (fn
))
8228 CASE_FLT_FN (BUILT_IN_CEXPI
):
8229 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8231 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8241 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8242 return build_zero_cst (type
);
8243 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8244 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8245 TREE_OPERAND (arg0
, 0));
8246 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8247 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8248 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8250 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8251 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8252 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8253 TREE_OPERAND (arg0
, 0)),
8254 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8255 TREE_OPERAND (arg0
, 1)));
8256 return fold_convert_loc (loc
, type
, tem
);
8258 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8260 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8261 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8262 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8264 if (TREE_CODE (arg0
) == CALL_EXPR
)
8266 tree fn
= get_callee_fndecl (arg0
);
8267 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8268 switch (DECL_FUNCTION_CODE (fn
))
8270 CASE_FLT_FN (BUILT_IN_CEXPI
):
8271 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8273 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8283 /* Fold *&X to X if X is an lvalue. */
8284 if (TREE_CODE (op0
) == ADDR_EXPR
)
8286 tree op00
= TREE_OPERAND (op0
, 0);
8287 if ((TREE_CODE (op00
) == VAR_DECL
8288 || TREE_CODE (op00
) == PARM_DECL
8289 || TREE_CODE (op00
) == RESULT_DECL
)
8290 && !TREE_READONLY (op00
))
8295 case VEC_UNPACK_LO_EXPR
:
8296 case VEC_UNPACK_HI_EXPR
:
8297 case VEC_UNPACK_FLOAT_LO_EXPR
:
8298 case VEC_UNPACK_FLOAT_HI_EXPR
:
8300 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8302 enum tree_code subcode
;
8304 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
8305 if (TREE_CODE (arg0
) != VECTOR_CST
)
8308 elts
= XALLOCAVEC (tree
, nelts
* 2);
8309 if (!vec_cst_ctor_to_array (arg0
, elts
))
8312 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
8313 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
8316 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
8319 subcode
= FLOAT_EXPR
;
8321 for (i
= 0; i
< nelts
; i
++)
8323 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
8324 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
8328 return build_vector (type
, elts
);
8333 } /* switch (code) */
8337 /* If the operation was a conversion do _not_ mark a resulting constant
8338 with TREE_OVERFLOW if the original constant was not. These conversions
8339 have implementation defined behavior and retaining the TREE_OVERFLOW
8340 flag here would confuse later passes such as VRP. */
8342 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8343 tree type
, tree op0
)
8345 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8347 && TREE_CODE (res
) == INTEGER_CST
8348 && TREE_CODE (op0
) == INTEGER_CST
8349 && CONVERT_EXPR_CODE_P (code
))
8350 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8355 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8356 operands OP0 and OP1. LOC is the location of the resulting expression.
8357 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8358 Return the folded expression if folding is successful. Otherwise,
8359 return NULL_TREE. */
8361 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8362 tree arg0
, tree arg1
, tree op0
, tree op1
)
8366 /* We only do these simplifications if we are optimizing. */
8370 /* Check for things like (A || B) && (A || C). We can convert this
8371 to A || (B && C). Note that either operator can be any of the four
8372 truth and/or operations and the transformation will still be
8373 valid. Also note that we only care about order for the
8374 ANDIF and ORIF operators. If B contains side effects, this
8375 might change the truth-value of A. */
8376 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8377 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8378 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8379 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8380 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8381 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8383 tree a00
= TREE_OPERAND (arg0
, 0);
8384 tree a01
= TREE_OPERAND (arg0
, 1);
8385 tree a10
= TREE_OPERAND (arg1
, 0);
8386 tree a11
= TREE_OPERAND (arg1
, 1);
8387 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8388 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8389 && (code
== TRUTH_AND_EXPR
8390 || code
== TRUTH_OR_EXPR
));
8392 if (operand_equal_p (a00
, a10
, 0))
8393 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8394 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8395 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8396 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8397 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8398 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8399 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8400 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8402 /* This case if tricky because we must either have commutative
8403 operators or else A10 must not have side-effects. */
8405 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8406 && operand_equal_p (a01
, a11
, 0))
8407 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8408 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8412 /* See if we can build a range comparison. */
8413 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8416 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8417 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8419 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8421 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8424 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8425 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8427 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8429 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8432 /* Check for the possibility of merging component references. If our
8433 lhs is another similar operation, try to merge its rhs with our
8434 rhs. Then try to merge our lhs and rhs. */
8435 if (TREE_CODE (arg0
) == code
8436 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8437 TREE_OPERAND (arg0
, 1), arg1
)))
8438 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8440 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8443 if ((BRANCH_COST (optimize_function_for_speed_p (cfun
),
8445 && LOGICAL_OP_NON_SHORT_CIRCUIT
8446 && (code
== TRUTH_AND_EXPR
8447 || code
== TRUTH_ANDIF_EXPR
8448 || code
== TRUTH_OR_EXPR
8449 || code
== TRUTH_ORIF_EXPR
))
8451 enum tree_code ncode
, icode
;
8453 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8454 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8455 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8457 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8458 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8459 We don't want to pack more than two leafs to a non-IF AND/OR
8461 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8462 equal to IF-CODE, then we don't want to add right-hand operand.
8463 If the inner right-hand side of left-hand operand has
8464 side-effects, or isn't simple, then we can't add to it,
8465 as otherwise we might destroy if-sequence. */
8466 if (TREE_CODE (arg0
) == icode
8467 && simple_operand_p_2 (arg1
)
8468 /* Needed for sequence points to handle trappings, and
8470 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8472 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8474 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8477 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8478 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8479 else if (TREE_CODE (arg1
) == icode
8480 && simple_operand_p_2 (arg0
)
8481 /* Needed for sequence points to handle trappings, and
8483 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8485 tem
= fold_build2_loc (loc
, ncode
, type
,
8486 arg0
, TREE_OPERAND (arg1
, 0));
8487 return fold_build2_loc (loc
, icode
, type
, tem
,
8488 TREE_OPERAND (arg1
, 1));
8490 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8492 For sequence point consistancy, we need to check for trapping,
8493 and side-effects. */
8494 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8495 && simple_operand_p_2 (arg1
))
8496 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8502 /* Fold a binary expression of code CODE and type TYPE with operands
8503 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8504 Return the folded expression if folding is successful. Otherwise,
8505 return NULL_TREE. */
8508 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8510 enum tree_code compl_code
;
8512 if (code
== MIN_EXPR
)
8513 compl_code
= MAX_EXPR
;
8514 else if (code
== MAX_EXPR
)
8515 compl_code
= MIN_EXPR
;
8519 /* MIN (MAX (a, b), b) == b. */
8520 if (TREE_CODE (op0
) == compl_code
8521 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8522 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8524 /* MIN (MAX (b, a), b) == b. */
8525 if (TREE_CODE (op0
) == compl_code
8526 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8527 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8528 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8530 /* MIN (a, MAX (a, b)) == a. */
8531 if (TREE_CODE (op1
) == compl_code
8532 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8533 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8534 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8536 /* MIN (a, MAX (b, a)) == a. */
8537 if (TREE_CODE (op1
) == compl_code
8538 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8539 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8540 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8545 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8546 by changing CODE to reduce the magnitude of constants involved in
8547 ARG0 of the comparison.
8548 Returns a canonicalized comparison tree if a simplification was
8549 possible, otherwise returns NULL_TREE.
8550 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8551 valid if signed overflow is undefined. */
8554 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8555 tree arg0
, tree arg1
,
8556 bool *strict_overflow_p
)
8558 enum tree_code code0
= TREE_CODE (arg0
);
8559 tree t
, cst0
= NULL_TREE
;
8563 /* Match A +- CST code arg1 and CST code arg1. We can change the
8564 first form only if overflow is undefined. */
8565 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8566 /* In principle pointers also have undefined overflow behavior,
8567 but that causes problems elsewhere. */
8568 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8569 && (code0
== MINUS_EXPR
8570 || code0
== PLUS_EXPR
)
8571 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8572 || code0
== INTEGER_CST
))
8575 /* Identify the constant in arg0 and its sign. */
8576 if (code0
== INTEGER_CST
)
8579 cst0
= TREE_OPERAND (arg0
, 1);
8580 sgn0
= tree_int_cst_sgn (cst0
);
8582 /* Overflowed constants and zero will cause problems. */
8583 if (integer_zerop (cst0
)
8584 || TREE_OVERFLOW (cst0
))
8587 /* See if we can reduce the magnitude of the constant in
8588 arg0 by changing the comparison code. */
8589 if (code0
== INTEGER_CST
)
8591 /* CST <= arg1 -> CST-1 < arg1. */
8592 if (code
== LE_EXPR
&& sgn0
== 1)
8594 /* -CST < arg1 -> -CST-1 <= arg1. */
8595 else if (code
== LT_EXPR
&& sgn0
== -1)
8597 /* CST > arg1 -> CST-1 >= arg1. */
8598 else if (code
== GT_EXPR
&& sgn0
== 1)
8600 /* -CST >= arg1 -> -CST-1 > arg1. */
8601 else if (code
== GE_EXPR
&& sgn0
== -1)
8605 /* arg1 code' CST' might be more canonical. */
8610 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8612 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8614 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8615 else if (code
== GT_EXPR
8616 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8618 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8619 else if (code
== LE_EXPR
8620 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8622 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8623 else if (code
== GE_EXPR
8624 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8628 *strict_overflow_p
= true;
8631 /* Now build the constant reduced in magnitude. But not if that
8632 would produce one outside of its types range. */
8633 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8635 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8636 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8638 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8639 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8640 /* We cannot swap the comparison here as that would cause us to
8641 endlessly recurse. */
8644 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8645 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8646 if (code0
!= INTEGER_CST
)
8647 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8648 t
= fold_convert (TREE_TYPE (arg1
), t
);
8650 /* If swapping might yield to a more canonical form, do so. */
8652 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8654 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8657 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8658 overflow further. Try to decrease the magnitude of constants involved
8659 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8660 and put sole constants at the second argument position.
8661 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8664 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8665 tree arg0
, tree arg1
)
8668 bool strict_overflow_p
;
8669 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8670 "when reducing constant in comparison");
8672 /* Try canonicalization by simplifying arg0. */
8673 strict_overflow_p
= false;
8674 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8675 &strict_overflow_p
);
8678 if (strict_overflow_p
)
8679 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8683 /* Try canonicalization by simplifying arg1 using the swapped
8685 code
= swap_tree_comparison (code
);
8686 strict_overflow_p
= false;
8687 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8688 &strict_overflow_p
);
8689 if (t
&& strict_overflow_p
)
8690 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8694 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8695 space. This is used to avoid issuing overflow warnings for
8696 expressions like &p->x which can not wrap. */
8699 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8701 unsigned HOST_WIDE_INT offset_low
, total_low
;
8702 HOST_WIDE_INT size
, offset_high
, total_high
;
8704 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8710 if (offset
== NULL_TREE
)
8715 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8719 offset_low
= TREE_INT_CST_LOW (offset
);
8720 offset_high
= TREE_INT_CST_HIGH (offset
);
8723 if (add_double_with_sign (offset_low
, offset_high
,
8724 bitpos
/ BITS_PER_UNIT
, 0,
8725 &total_low
, &total_high
,
8729 if (total_high
!= 0)
8732 size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8736 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8738 if (TREE_CODE (base
) == ADDR_EXPR
)
8740 HOST_WIDE_INT base_size
;
8742 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8743 if (base_size
> 0 && size
< base_size
)
8747 return total_low
> (unsigned HOST_WIDE_INT
) size
;
8750 /* Subroutine of fold_binary. This routine performs all of the
8751 transformations that are common to the equality/inequality
8752 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8753 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8754 fold_binary should call fold_binary. Fold a comparison with
8755 tree code CODE and type TYPE with operands OP0 and OP1. Return
8756 the folded comparison or NULL_TREE. */
8759 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8762 tree arg0
, arg1
, tem
;
8767 STRIP_SIGN_NOPS (arg0
);
8768 STRIP_SIGN_NOPS (arg1
);
8770 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8771 if (tem
!= NULL_TREE
)
8774 /* If one arg is a real or integer constant, put it last. */
8775 if (tree_swap_operands_p (arg0
, arg1
, true))
8776 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
8778 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8779 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8780 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8781 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8782 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
8783 && (TREE_CODE (arg1
) == INTEGER_CST
8784 && !TREE_OVERFLOW (arg1
)))
8786 tree const1
= TREE_OPERAND (arg0
, 1);
8788 tree variable
= TREE_OPERAND (arg0
, 0);
8791 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
8793 lhs
= fold_build2_loc (loc
, lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
8794 TREE_TYPE (arg1
), const2
, const1
);
8796 /* If the constant operation overflowed this can be
8797 simplified as a comparison against INT_MAX/INT_MIN. */
8798 if (TREE_CODE (lhs
) == INTEGER_CST
8799 && TREE_OVERFLOW (lhs
))
8801 int const1_sgn
= tree_int_cst_sgn (const1
);
8802 enum tree_code code2
= code
;
8804 /* Get the sign of the constant on the lhs if the
8805 operation were VARIABLE + CONST1. */
8806 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8807 const1_sgn
= -const1_sgn
;
8809 /* The sign of the constant determines if we overflowed
8810 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8811 Canonicalize to the INT_MIN overflow by swapping the comparison
8813 if (const1_sgn
== -1)
8814 code2
= swap_tree_comparison (code
);
8816 /* We now can look at the canonicalized case
8817 VARIABLE + 1 CODE2 INT_MIN
8818 and decide on the result. */
8819 if (code2
== LT_EXPR
8821 || code2
== EQ_EXPR
)
8822 return omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8823 else if (code2
== NE_EXPR
8825 || code2
== GT_EXPR
)
8826 return omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8829 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
8830 && (TREE_CODE (lhs
) != INTEGER_CST
8831 || !TREE_OVERFLOW (lhs
)))
8833 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
8834 fold_overflow_warning ("assuming signed overflow does not occur "
8835 "when changing X +- C1 cmp C2 to "
8837 WARN_STRICT_OVERFLOW_COMPARISON
);
8838 return fold_build2_loc (loc
, code
, type
, variable
, lhs
);
8842 /* For comparisons of pointers we can decompose it to a compile time
8843 comparison of the base objects and the offsets into the object.
8844 This requires at least one operand being an ADDR_EXPR or a
8845 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8846 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8847 && (TREE_CODE (arg0
) == ADDR_EXPR
8848 || TREE_CODE (arg1
) == ADDR_EXPR
8849 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8850 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8852 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8853 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8854 enum machine_mode mode
;
8855 int volatilep
, unsignedp
;
8856 bool indirect_base0
= false, indirect_base1
= false;
8858 /* Get base and offset for the access. Strip ADDR_EXPR for
8859 get_inner_reference, but put it back by stripping INDIRECT_REF
8860 off the base object if possible. indirect_baseN will be true
8861 if baseN is not an address but refers to the object itself. */
8863 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8865 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8866 &bitsize
, &bitpos0
, &offset0
, &mode
,
8867 &unsignedp
, &volatilep
, false);
8868 if (TREE_CODE (base0
) == INDIRECT_REF
)
8869 base0
= TREE_OPERAND (base0
, 0);
8871 indirect_base0
= true;
8873 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8875 base0
= TREE_OPERAND (arg0
, 0);
8876 STRIP_SIGN_NOPS (base0
);
8877 if (TREE_CODE (base0
) == ADDR_EXPR
)
8879 base0
= TREE_OPERAND (base0
, 0);
8880 indirect_base0
= true;
8882 offset0
= TREE_OPERAND (arg0
, 1);
8883 if (host_integerp (offset0
, 0))
8885 HOST_WIDE_INT off
= size_low_cst (offset0
);
8886 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8888 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8890 bitpos0
= off
* BITS_PER_UNIT
;
8891 offset0
= NULL_TREE
;
8897 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8899 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8900 &bitsize
, &bitpos1
, &offset1
, &mode
,
8901 &unsignedp
, &volatilep
, false);
8902 if (TREE_CODE (base1
) == INDIRECT_REF
)
8903 base1
= TREE_OPERAND (base1
, 0);
8905 indirect_base1
= true;
8907 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8909 base1
= TREE_OPERAND (arg1
, 0);
8910 STRIP_SIGN_NOPS (base1
);
8911 if (TREE_CODE (base1
) == ADDR_EXPR
)
8913 base1
= TREE_OPERAND (base1
, 0);
8914 indirect_base1
= true;
8916 offset1
= TREE_OPERAND (arg1
, 1);
8917 if (host_integerp (offset1
, 0))
8919 HOST_WIDE_INT off
= size_low_cst (offset1
);
8920 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8922 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8924 bitpos1
= off
* BITS_PER_UNIT
;
8925 offset1
= NULL_TREE
;
8930 /* A local variable can never be pointed to by
8931 the default SSA name of an incoming parameter. */
8932 if ((TREE_CODE (arg0
) == ADDR_EXPR
8934 && TREE_CODE (base0
) == VAR_DECL
8935 && auto_var_in_fn_p (base0
, current_function_decl
)
8937 && TREE_CODE (base1
) == SSA_NAME
8938 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
8939 && SSA_NAME_IS_DEFAULT_DEF (base1
))
8940 || (TREE_CODE (arg1
) == ADDR_EXPR
8942 && TREE_CODE (base1
) == VAR_DECL
8943 && auto_var_in_fn_p (base1
, current_function_decl
)
8945 && TREE_CODE (base0
) == SSA_NAME
8946 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
8947 && SSA_NAME_IS_DEFAULT_DEF (base0
)))
8949 if (code
== NE_EXPR
)
8950 return constant_boolean_node (1, type
);
8951 else if (code
== EQ_EXPR
)
8952 return constant_boolean_node (0, type
);
8954 /* If we have equivalent bases we might be able to simplify. */
8955 else if (indirect_base0
== indirect_base1
8956 && operand_equal_p (base0
, base1
, 0))
8958 /* We can fold this expression to a constant if the non-constant
8959 offset parts are equal. */
8960 if ((offset0
== offset1
8961 || (offset0
&& offset1
8962 && operand_equal_p (offset0
, offset1
, 0)))
8965 || (indirect_base0
&& DECL_P (base0
))
8966 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8971 && bitpos0
!= bitpos1
8972 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8973 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8974 fold_overflow_warning (("assuming pointer wraparound does not "
8975 "occur when comparing P +- C1 with "
8977 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8982 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8984 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8986 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8988 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8990 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8992 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8996 /* We can simplify the comparison to a comparison of the variable
8997 offset parts if the constant offset parts are equal.
8998 Be careful to use signed size type here because otherwise we
8999 mess with array offsets in the wrong way. This is possible
9000 because pointer arithmetic is restricted to retain within an
9001 object and overflow on pointer differences is undefined as of
9002 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9003 else if (bitpos0
== bitpos1
9004 && ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9005 || (indirect_base0
&& DECL_P (base0
))
9006 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9008 /* By converting to signed size type we cover middle-end pointer
9009 arithmetic which operates on unsigned pointer types of size
9010 type size and ARRAY_REF offsets which are properly sign or
9011 zero extended from their type in case it is narrower than
9013 if (offset0
== NULL_TREE
)
9014 offset0
= build_int_cst (ssizetype
, 0);
9016 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
9017 if (offset1
== NULL_TREE
)
9018 offset1
= build_int_cst (ssizetype
, 0);
9020 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
9024 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9025 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9026 fold_overflow_warning (("assuming pointer wraparound does not "
9027 "occur when comparing P +- C1 with "
9029 WARN_STRICT_OVERFLOW_COMPARISON
);
9031 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
9034 /* For non-equal bases we can simplify if they are addresses
9035 of local binding decls or constants. */
9036 else if (indirect_base0
&& indirect_base1
9037 /* We know that !operand_equal_p (base0, base1, 0)
9038 because the if condition was false. But make
9039 sure two decls are not the same. */
9041 && TREE_CODE (arg0
) == ADDR_EXPR
9042 && TREE_CODE (arg1
) == ADDR_EXPR
9043 && (((TREE_CODE (base0
) == VAR_DECL
9044 || TREE_CODE (base0
) == PARM_DECL
)
9045 && (targetm
.binds_local_p (base0
)
9046 || CONSTANT_CLASS_P (base1
)))
9047 || CONSTANT_CLASS_P (base0
))
9048 && (((TREE_CODE (base1
) == VAR_DECL
9049 || TREE_CODE (base1
) == PARM_DECL
)
9050 && (targetm
.binds_local_p (base1
)
9051 || CONSTANT_CLASS_P (base0
)))
9052 || CONSTANT_CLASS_P (base1
)))
9054 if (code
== EQ_EXPR
)
9055 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
9057 else if (code
== NE_EXPR
)
9058 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
9061 /* For equal offsets we can simplify to a comparison of the
9063 else if (bitpos0
== bitpos1
9065 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
9067 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
9068 && ((offset0
== offset1
)
9069 || (offset0
&& offset1
9070 && operand_equal_p (offset0
, offset1
, 0))))
9073 base0
= build_fold_addr_expr_loc (loc
, base0
);
9075 base1
= build_fold_addr_expr_loc (loc
, base1
);
9076 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
9080 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9081 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9082 the resulting offset is smaller in absolute value than the
9084 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9085 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9086 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9087 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9088 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
9089 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9090 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
9092 tree const1
= TREE_OPERAND (arg0
, 1);
9093 tree const2
= TREE_OPERAND (arg1
, 1);
9094 tree variable1
= TREE_OPERAND (arg0
, 0);
9095 tree variable2
= TREE_OPERAND (arg1
, 0);
9097 const char * const warnmsg
= G_("assuming signed overflow does not "
9098 "occur when combining constants around "
9101 /* Put the constant on the side where it doesn't overflow and is
9102 of lower absolute value than before. */
9103 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9104 ? MINUS_EXPR
: PLUS_EXPR
,
9106 if (!TREE_OVERFLOW (cst
)
9107 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
9109 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9110 return fold_build2_loc (loc
, code
, type
,
9112 fold_build2_loc (loc
,
9113 TREE_CODE (arg1
), TREE_TYPE (arg1
),
9117 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9118 ? MINUS_EXPR
: PLUS_EXPR
,
9120 if (!TREE_OVERFLOW (cst
)
9121 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
9123 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9124 return fold_build2_loc (loc
, code
, type
,
9125 fold_build2_loc (loc
, TREE_CODE (arg0
), TREE_TYPE (arg0
),
9131 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9132 signed arithmetic case. That form is created by the compiler
9133 often enough for folding it to be of value. One example is in
9134 computing loop trip counts after Operator Strength Reduction. */
9135 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9136 && TREE_CODE (arg0
) == MULT_EXPR
9137 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9138 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9139 && integer_zerop (arg1
))
9141 tree const1
= TREE_OPERAND (arg0
, 1);
9142 tree const2
= arg1
; /* zero */
9143 tree variable1
= TREE_OPERAND (arg0
, 0);
9144 enum tree_code cmp_code
= code
;
9146 /* Handle unfolded multiplication by zero. */
9147 if (integer_zerop (const1
))
9148 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
9150 fold_overflow_warning (("assuming signed overflow does not occur when "
9151 "eliminating multiplication in comparison "
9153 WARN_STRICT_OVERFLOW_COMPARISON
);
9155 /* If const1 is negative we swap the sense of the comparison. */
9156 if (tree_int_cst_sgn (const1
) < 0)
9157 cmp_code
= swap_tree_comparison (cmp_code
);
9159 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9162 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9166 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9168 tree targ0
= strip_float_extensions (arg0
);
9169 tree targ1
= strip_float_extensions (arg1
);
9170 tree newtype
= TREE_TYPE (targ0
);
9172 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9173 newtype
= TREE_TYPE (targ1
);
9175 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9176 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9177 return fold_build2_loc (loc
, code
, type
,
9178 fold_convert_loc (loc
, newtype
, targ0
),
9179 fold_convert_loc (loc
, newtype
, targ1
));
9181 /* (-a) CMP (-b) -> b CMP a */
9182 if (TREE_CODE (arg0
) == NEGATE_EXPR
9183 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9184 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
9185 TREE_OPERAND (arg0
, 0));
9187 if (TREE_CODE (arg1
) == REAL_CST
)
9189 REAL_VALUE_TYPE cst
;
9190 cst
= TREE_REAL_CST (arg1
);
9192 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9193 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9194 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9195 TREE_OPERAND (arg0
, 0),
9196 build_real (TREE_TYPE (arg1
),
9197 real_value_negate (&cst
)));
9199 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9200 /* a CMP (-0) -> a CMP 0 */
9201 if (REAL_VALUE_MINUS_ZERO (cst
))
9202 return fold_build2_loc (loc
, code
, type
, arg0
,
9203 build_real (TREE_TYPE (arg1
), dconst0
));
9205 /* x != NaN is always true, other ops are always false. */
9206 if (REAL_VALUE_ISNAN (cst
)
9207 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9209 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9210 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9213 /* Fold comparisons against infinity. */
9214 if (REAL_VALUE_ISINF (cst
)
9215 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9217 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9218 if (tem
!= NULL_TREE
)
9223 /* If this is a comparison of a real constant with a PLUS_EXPR
9224 or a MINUS_EXPR of a real constant, we can convert it into a
9225 comparison with a revised real constant as long as no overflow
9226 occurs when unsafe_math_optimizations are enabled. */
9227 if (flag_unsafe_math_optimizations
9228 && TREE_CODE (arg1
) == REAL_CST
9229 && (TREE_CODE (arg0
) == PLUS_EXPR
9230 || TREE_CODE (arg0
) == MINUS_EXPR
)
9231 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9232 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9233 ? MINUS_EXPR
: PLUS_EXPR
,
9234 arg1
, TREE_OPERAND (arg0
, 1)))
9235 && !TREE_OVERFLOW (tem
))
9236 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9238 /* Likewise, we can simplify a comparison of a real constant with
9239 a MINUS_EXPR whose first operand is also a real constant, i.e.
9240 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9241 floating-point types only if -fassociative-math is set. */
9242 if (flag_associative_math
9243 && TREE_CODE (arg1
) == REAL_CST
9244 && TREE_CODE (arg0
) == MINUS_EXPR
9245 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9246 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9248 && !TREE_OVERFLOW (tem
))
9249 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9250 TREE_OPERAND (arg0
, 1), tem
);
9252 /* Fold comparisons against built-in math functions. */
9253 if (TREE_CODE (arg1
) == REAL_CST
9254 && flag_unsafe_math_optimizations
9255 && ! flag_errno_math
)
9257 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9259 if (fcode
!= END_BUILTINS
)
9261 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9262 if (tem
!= NULL_TREE
)
9268 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9269 && CONVERT_EXPR_P (arg0
))
9271 /* If we are widening one operand of an integer comparison,
9272 see if the other operand is similarly being widened. Perhaps we
9273 can do the comparison in the narrower type. */
9274 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9278 /* Or if we are changing signedness. */
9279 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9284 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9285 constant, we can simplify it. */
9286 if (TREE_CODE (arg1
) == INTEGER_CST
9287 && (TREE_CODE (arg0
) == MIN_EXPR
9288 || TREE_CODE (arg0
) == MAX_EXPR
)
9289 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9291 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9296 /* Simplify comparison of something with itself. (For IEEE
9297 floating-point, we can only do some of these simplifications.) */
9298 if (operand_equal_p (arg0
, arg1
, 0))
9303 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9304 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9305 return constant_boolean_node (1, type
);
9310 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9311 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9312 return constant_boolean_node (1, type
);
9313 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9316 /* For NE, we can only do this simplification if integer
9317 or we don't honor IEEE floating point NaNs. */
9318 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9319 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9321 /* ... fall through ... */
9324 return constant_boolean_node (0, type
);
9330 /* If we are comparing an expression that just has comparisons
9331 of two integer values, arithmetic expressions of those comparisons,
9332 and constants, we can simplify it. There are only three cases
9333 to check: the two values can either be equal, the first can be
9334 greater, or the second can be greater. Fold the expression for
9335 those three values. Since each value must be 0 or 1, we have
9336 eight possibilities, each of which corresponds to the constant 0
9337 or 1 or one of the six possible comparisons.
9339 This handles common cases like (a > b) == 0 but also handles
9340 expressions like ((x > y) - (y > x)) > 0, which supposedly
9341 occur in macroized code. */
9343 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9345 tree cval1
= 0, cval2
= 0;
9348 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9349 /* Don't handle degenerate cases here; they should already
9350 have been handled anyway. */
9351 && cval1
!= 0 && cval2
!= 0
9352 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9353 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9354 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9355 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9356 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9357 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9358 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9360 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9361 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9363 /* We can't just pass T to eval_subst in case cval1 or cval2
9364 was the same as ARG1. */
9367 = fold_build2_loc (loc
, code
, type
,
9368 eval_subst (loc
, arg0
, cval1
, maxval
,
9372 = fold_build2_loc (loc
, code
, type
,
9373 eval_subst (loc
, arg0
, cval1
, maxval
,
9377 = fold_build2_loc (loc
, code
, type
,
9378 eval_subst (loc
, arg0
, cval1
, minval
,
9382 /* All three of these results should be 0 or 1. Confirm they are.
9383 Then use those values to select the proper code to use. */
9385 if (TREE_CODE (high_result
) == INTEGER_CST
9386 && TREE_CODE (equal_result
) == INTEGER_CST
9387 && TREE_CODE (low_result
) == INTEGER_CST
)
9389 /* Make a 3-bit mask with the high-order bit being the
9390 value for `>', the next for '=', and the low for '<'. */
9391 switch ((integer_onep (high_result
) * 4)
9392 + (integer_onep (equal_result
) * 2)
9393 + integer_onep (low_result
))
9397 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9418 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9423 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9424 SET_EXPR_LOCATION (tem
, loc
);
9427 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9432 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9433 into a single range test. */
9434 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9435 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9436 && TREE_CODE (arg1
) == INTEGER_CST
9437 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9438 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9439 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9440 && !TREE_OVERFLOW (arg1
))
9442 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9443 if (tem
!= NULL_TREE
)
9447 /* Fold ~X op ~Y as Y op X. */
9448 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9449 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9451 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9452 return fold_build2_loc (loc
, code
, type
,
9453 fold_convert_loc (loc
, cmp_type
,
9454 TREE_OPERAND (arg1
, 0)),
9455 TREE_OPERAND (arg0
, 0));
9458 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9459 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9460 && TREE_CODE (arg1
) == INTEGER_CST
)
9462 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9463 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9464 TREE_OPERAND (arg0
, 0),
9465 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9466 fold_convert_loc (loc
, cmp_type
, arg1
)));
9473 /* Subroutine of fold_binary. Optimize complex multiplications of the
9474 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9475 argument EXPR represents the expression "z" of type TYPE. */
9478 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9480 tree itype
= TREE_TYPE (type
);
9481 tree rpart
, ipart
, tem
;
9483 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9485 rpart
= TREE_OPERAND (expr
, 0);
9486 ipart
= TREE_OPERAND (expr
, 1);
9488 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9490 rpart
= TREE_REALPART (expr
);
9491 ipart
= TREE_IMAGPART (expr
);
9495 expr
= save_expr (expr
);
9496 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9497 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9500 rpart
= save_expr (rpart
);
9501 ipart
= save_expr (ipart
);
9502 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9503 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9504 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9505 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9506 build_zero_cst (itype
));
9510 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9511 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9512 guarantees that P and N have the same least significant log2(M) bits.
9513 N is not otherwise constrained. In particular, N is not normalized to
9514 0 <= N < M as is common. In general, the precise value of P is unknown.
9515 M is chosen as large as possible such that constant N can be determined.
9517 Returns M and sets *RESIDUE to N.
9519 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9520 account. This is not always possible due to PR 35705.
9523 static unsigned HOST_WIDE_INT
9524 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9525 bool allow_func_align
)
9527 enum tree_code code
;
9531 code
= TREE_CODE (expr
);
9532 if (code
== ADDR_EXPR
)
9534 unsigned int bitalign
;
9535 bitalign
= get_object_alignment_1 (TREE_OPERAND (expr
, 0), residue
);
9536 *residue
/= BITS_PER_UNIT
;
9537 return bitalign
/ BITS_PER_UNIT
;
9539 else if (code
== POINTER_PLUS_EXPR
)
9542 unsigned HOST_WIDE_INT modulus
;
9543 enum tree_code inner_code
;
9545 op0
= TREE_OPERAND (expr
, 0);
9547 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9550 op1
= TREE_OPERAND (expr
, 1);
9552 inner_code
= TREE_CODE (op1
);
9553 if (inner_code
== INTEGER_CST
)
9555 *residue
+= TREE_INT_CST_LOW (op1
);
9558 else if (inner_code
== MULT_EXPR
)
9560 op1
= TREE_OPERAND (op1
, 1);
9561 if (TREE_CODE (op1
) == INTEGER_CST
)
9563 unsigned HOST_WIDE_INT align
;
9565 /* Compute the greatest power-of-2 divisor of op1. */
9566 align
= TREE_INT_CST_LOW (op1
);
9569 /* If align is non-zero and less than *modulus, replace
9570 *modulus with align., If align is 0, then either op1 is 0
9571 or the greatest power-of-2 divisor of op1 doesn't fit in an
9572 unsigned HOST_WIDE_INT. In either case, no additional
9573 constraint is imposed. */
9575 modulus
= MIN (modulus
, align
);
9582 /* If we get here, we were unable to determine anything useful about the
9587 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9588 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9591 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9593 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9595 if (TREE_CODE (arg
) == VECTOR_CST
)
9597 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
9598 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9600 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9602 constructor_elt
*elt
;
9604 FOR_EACH_VEC_ELT (constructor_elt
, CONSTRUCTOR_ELTS (arg
), i
, elt
)
9608 elts
[i
] = elt
->value
;
9612 for (; i
< nelts
; i
++)
9614 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9618 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9619 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9620 NULL_TREE otherwise. */
9623 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9625 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9627 bool need_ctor
= false;
9629 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9630 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9631 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9632 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9635 elts
= XALLOCAVEC (tree
, nelts
* 3);
9636 if (!vec_cst_ctor_to_array (arg0
, elts
)
9637 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9640 for (i
= 0; i
< nelts
; i
++)
9642 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9644 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9649 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, nelts
);
9650 for (i
= 0; i
< nelts
; i
++)
9651 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9652 return build_constructor (type
, v
);
9655 return build_vector (type
, &elts
[2 * nelts
]);
9658 /* Try to fold a pointer difference of type TYPE two address expressions of
9659 array references AREF0 and AREF1 using location LOC. Return a
9660 simplified expression for the difference or NULL_TREE. */
9663 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9664 tree aref0
, tree aref1
)
9666 tree base0
= TREE_OPERAND (aref0
, 0);
9667 tree base1
= TREE_OPERAND (aref1
, 0);
9668 tree base_offset
= build_int_cst (type
, 0);
9670 /* If the bases are array references as well, recurse. If the bases
9671 are pointer indirections compute the difference of the pointers.
9672 If the bases are equal, we are set. */
9673 if ((TREE_CODE (base0
) == ARRAY_REF
9674 && TREE_CODE (base1
) == ARRAY_REF
9676 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
9677 || (INDIRECT_REF_P (base0
)
9678 && INDIRECT_REF_P (base1
)
9679 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9680 TREE_OPERAND (base0
, 0),
9681 TREE_OPERAND (base1
, 0))))
9682 || operand_equal_p (base0
, base1
, 0))
9684 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9685 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9686 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9687 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9688 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9690 fold_build2_loc (loc
, MULT_EXPR
, type
,
9696 /* If the real or vector real constant CST of type TYPE has an exact
9697 inverse, return it, else return NULL. */
9700 exact_inverse (tree type
, tree cst
)
9703 tree unit_type
, *elts
;
9704 enum machine_mode mode
;
9705 unsigned vec_nelts
, i
;
9707 switch (TREE_CODE (cst
))
9710 r
= TREE_REAL_CST (cst
);
9712 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9713 return build_real (type
, r
);
9718 vec_nelts
= VECTOR_CST_NELTS (cst
);
9719 elts
= XALLOCAVEC (tree
, vec_nelts
);
9720 unit_type
= TREE_TYPE (type
);
9721 mode
= TYPE_MODE (unit_type
);
9723 for (i
= 0; i
< vec_nelts
; i
++)
9725 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9726 if (!exact_real_inverse (mode
, &r
))
9728 elts
[i
] = build_real (unit_type
, r
);
9731 return build_vector (type
, elts
);
9738 /* Fold a binary expression of code CODE and type TYPE with operands
9739 OP0 and OP1. LOC is the location of the resulting expression.
9740 Return the folded expression if folding is successful. Otherwise,
9741 return NULL_TREE. */
9744 fold_binary_loc (location_t loc
,
9745 enum tree_code code
, tree type
, tree op0
, tree op1
)
9747 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9748 tree arg0
, arg1
, tem
;
9749 tree t1
= NULL_TREE
;
9750 bool strict_overflow_p
;
9752 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9753 && TREE_CODE_LENGTH (code
) == 2
9755 && op1
!= NULL_TREE
);
9760 /* Strip any conversions that don't change the mode. This is
9761 safe for every expression, except for a comparison expression
9762 because its signedness is derived from its operands. So, in
9763 the latter case, only strip conversions that don't change the
9764 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9767 Note that this is done as an internal manipulation within the
9768 constant folder, in order to find the simplest representation
9769 of the arguments so that their form can be studied. In any
9770 cases, the appropriate type conversions should be put back in
9771 the tree that will get out of the constant folder. */
9773 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9775 STRIP_SIGN_NOPS (arg0
);
9776 STRIP_SIGN_NOPS (arg1
);
9784 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9785 constant but we can't do arithmetic on them. */
9786 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9787 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9788 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
9789 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9790 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9791 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
))
9793 if (kind
== tcc_binary
)
9795 /* Make sure type and arg0 have the same saturating flag. */
9796 gcc_assert (TYPE_SATURATING (type
)
9797 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9798 tem
= const_binop (code
, arg0
, arg1
);
9800 else if (kind
== tcc_comparison
)
9801 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9805 if (tem
!= NULL_TREE
)
9807 if (TREE_TYPE (tem
) != type
)
9808 tem
= fold_convert_loc (loc
, type
, tem
);
9813 /* If this is a commutative operation, and ARG0 is a constant, move it
9814 to ARG1 to reduce the number of tests below. */
9815 if (commutative_tree_code (code
)
9816 && tree_swap_operands_p (arg0
, arg1
, true))
9817 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9819 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9821 First check for cases where an arithmetic operation is applied to a
9822 compound, conditional, or comparison operation. Push the arithmetic
9823 operation inside the compound or conditional to see if any folding
9824 can then be done. Convert comparison to conditional for this purpose.
9825 The also optimizes non-constant cases that used to be done in
9828 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9829 one of the operands is a comparison and the other is a comparison, a
9830 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9831 code below would make the expression more complex. Change it to a
9832 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9833 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9835 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9836 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9837 && ((truth_value_p (TREE_CODE (arg0
))
9838 && (truth_value_p (TREE_CODE (arg1
))
9839 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9840 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9841 || (truth_value_p (TREE_CODE (arg1
))
9842 && (truth_value_p (TREE_CODE (arg0
))
9843 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9844 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9846 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9847 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9850 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9851 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9853 if (code
== EQ_EXPR
)
9854 tem
= invert_truthvalue_loc (loc
, tem
);
9856 return fold_convert_loc (loc
, type
, tem
);
9859 if (TREE_CODE_CLASS (code
) == tcc_binary
9860 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9862 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9864 tem
= fold_build2_loc (loc
, code
, type
,
9865 fold_convert_loc (loc
, TREE_TYPE (op0
),
9866 TREE_OPERAND (arg0
, 1)), op1
);
9867 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9870 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9871 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9873 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9874 fold_convert_loc (loc
, TREE_TYPE (op1
),
9875 TREE_OPERAND (arg1
, 1)));
9876 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9880 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
9882 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9884 /*cond_first_p=*/1);
9885 if (tem
!= NULL_TREE
)
9889 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
9891 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9893 /*cond_first_p=*/0);
9894 if (tem
!= NULL_TREE
)
9902 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9903 if (TREE_CODE (arg0
) == ADDR_EXPR
9904 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
9906 tree iref
= TREE_OPERAND (arg0
, 0);
9907 return fold_build2 (MEM_REF
, type
,
9908 TREE_OPERAND (iref
, 0),
9909 int_const_binop (PLUS_EXPR
, arg1
,
9910 TREE_OPERAND (iref
, 1)));
9913 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9914 if (TREE_CODE (arg0
) == ADDR_EXPR
9915 && handled_component_p (TREE_OPERAND (arg0
, 0)))
9918 HOST_WIDE_INT coffset
;
9919 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
9923 return fold_build2 (MEM_REF
, type
,
9924 build_fold_addr_expr (base
),
9925 int_const_binop (PLUS_EXPR
, arg1
,
9926 size_int (coffset
)));
9931 case POINTER_PLUS_EXPR
:
9932 /* 0 +p index -> (type)index */
9933 if (integer_zerop (arg0
))
9934 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
9936 /* PTR +p 0 -> PTR */
9937 if (integer_zerop (arg1
))
9938 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
9940 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9941 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9942 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9943 return fold_convert_loc (loc
, type
,
9944 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9945 fold_convert_loc (loc
, sizetype
,
9947 fold_convert_loc (loc
, sizetype
,
9950 /* (PTR +p B) +p A -> PTR +p (B + A) */
9951 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9954 tree arg01
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (arg0
, 1));
9955 tree arg00
= TREE_OPERAND (arg0
, 0);
9956 inner
= fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9957 arg01
, fold_convert_loc (loc
, sizetype
, arg1
));
9958 return fold_convert_loc (loc
, type
,
9959 fold_build_pointer_plus_loc (loc
,
9963 /* PTR_CST +p CST -> CST1 */
9964 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9965 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
9966 fold_convert_loc (loc
, type
, arg1
));
9968 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9969 of the array. Loop optimizer sometimes produce this type of
9971 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9973 tem
= try_move_mult_to_index (loc
, arg0
,
9974 fold_convert_loc (loc
, sizetype
, arg1
));
9976 return fold_convert_loc (loc
, type
, tem
);
9982 /* A + (-B) -> A - B */
9983 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
9984 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9985 fold_convert_loc (loc
, type
, arg0
),
9986 fold_convert_loc (loc
, type
,
9987 TREE_OPERAND (arg1
, 0)));
9988 /* (-A) + B -> B - A */
9989 if (TREE_CODE (arg0
) == NEGATE_EXPR
9990 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
9991 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9992 fold_convert_loc (loc
, type
, arg1
),
9993 fold_convert_loc (loc
, type
,
9994 TREE_OPERAND (arg0
, 0)));
9996 if (INTEGRAL_TYPE_P (type
))
9998 /* Convert ~A + 1 to -A. */
9999 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10000 && integer_onep (arg1
))
10001 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
10002 fold_convert_loc (loc
, type
,
10003 TREE_OPERAND (arg0
, 0)));
10005 /* ~X + X is -1. */
10006 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10007 && !TYPE_OVERFLOW_TRAPS (type
))
10009 tree tem
= TREE_OPERAND (arg0
, 0);
10012 if (operand_equal_p (tem
, arg1
, 0))
10014 t1
= build_int_cst_type (type
, -1);
10015 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10019 /* X + ~X is -1. */
10020 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10021 && !TYPE_OVERFLOW_TRAPS (type
))
10023 tree tem
= TREE_OPERAND (arg1
, 0);
10026 if (operand_equal_p (arg0
, tem
, 0))
10028 t1
= build_int_cst_type (type
, -1);
10029 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10033 /* X + (X / CST) * -CST is X % CST. */
10034 if (TREE_CODE (arg1
) == MULT_EXPR
10035 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10036 && operand_equal_p (arg0
,
10037 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
10039 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
10040 tree cst1
= TREE_OPERAND (arg1
, 1);
10041 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
10043 if (sum
&& integer_zerop (sum
))
10044 return fold_convert_loc (loc
, type
,
10045 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
10046 TREE_TYPE (arg0
), arg0
,
10051 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
10052 same or one. Make sure type is not saturating.
10053 fold_plusminus_mult_expr will re-associate. */
10054 if ((TREE_CODE (arg0
) == MULT_EXPR
10055 || TREE_CODE (arg1
) == MULT_EXPR
)
10056 && !TYPE_SATURATING (type
)
10057 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10059 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10064 if (! FLOAT_TYPE_P (type
))
10066 if (integer_zerop (arg1
))
10067 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10069 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10070 with a constant, and the two constants have no bits in common,
10071 we should treat this as a BIT_IOR_EXPR since this may produce more
10072 simplifications. */
10073 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10074 && TREE_CODE (arg1
) == BIT_AND_EXPR
10075 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10076 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10077 && integer_zerop (const_binop (BIT_AND_EXPR
,
10078 TREE_OPERAND (arg0
, 1),
10079 TREE_OPERAND (arg1
, 1))))
10081 code
= BIT_IOR_EXPR
;
10085 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10086 (plus (plus (mult) (mult)) (foo)) so that we can
10087 take advantage of the factoring cases below. */
10088 if (TYPE_OVERFLOW_WRAPS (type
)
10089 && (((TREE_CODE (arg0
) == PLUS_EXPR
10090 || TREE_CODE (arg0
) == MINUS_EXPR
)
10091 && TREE_CODE (arg1
) == MULT_EXPR
)
10092 || ((TREE_CODE (arg1
) == PLUS_EXPR
10093 || TREE_CODE (arg1
) == MINUS_EXPR
)
10094 && TREE_CODE (arg0
) == MULT_EXPR
)))
10096 tree parg0
, parg1
, parg
, marg
;
10097 enum tree_code pcode
;
10099 if (TREE_CODE (arg1
) == MULT_EXPR
)
10100 parg
= arg0
, marg
= arg1
;
10102 parg
= arg1
, marg
= arg0
;
10103 pcode
= TREE_CODE (parg
);
10104 parg0
= TREE_OPERAND (parg
, 0);
10105 parg1
= TREE_OPERAND (parg
, 1);
10106 STRIP_NOPS (parg0
);
10107 STRIP_NOPS (parg1
);
10109 if (TREE_CODE (parg0
) == MULT_EXPR
10110 && TREE_CODE (parg1
) != MULT_EXPR
)
10111 return fold_build2_loc (loc
, pcode
, type
,
10112 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10113 fold_convert_loc (loc
, type
,
10115 fold_convert_loc (loc
, type
,
10117 fold_convert_loc (loc
, type
, parg1
));
10118 if (TREE_CODE (parg0
) != MULT_EXPR
10119 && TREE_CODE (parg1
) == MULT_EXPR
)
10121 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10122 fold_convert_loc (loc
, type
, parg0
),
10123 fold_build2_loc (loc
, pcode
, type
,
10124 fold_convert_loc (loc
, type
, marg
),
10125 fold_convert_loc (loc
, type
,
10131 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10132 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
10133 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10135 /* Likewise if the operands are reversed. */
10136 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10137 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10139 /* Convert X + -C into X - C. */
10140 if (TREE_CODE (arg1
) == REAL_CST
10141 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
10143 tem
= fold_negate_const (arg1
, type
);
10144 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
10145 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10146 fold_convert_loc (loc
, type
, arg0
),
10147 fold_convert_loc (loc
, type
, tem
));
10150 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10151 to __complex__ ( x, y ). This is not the same for SNaNs or
10152 if signed zeros are involved. */
10153 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10154 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10155 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10157 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10158 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10159 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10160 bool arg0rz
= false, arg0iz
= false;
10161 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10162 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10164 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10165 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10166 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10168 tree rp
= arg1r
? arg1r
10169 : build1 (REALPART_EXPR
, rtype
, arg1
);
10170 tree ip
= arg0i
? arg0i
10171 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10172 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10174 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10176 tree rp
= arg0r
? arg0r
10177 : build1 (REALPART_EXPR
, rtype
, arg0
);
10178 tree ip
= arg1i
? arg1i
10179 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10180 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10185 if (flag_unsafe_math_optimizations
10186 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10187 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10188 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10191 /* Convert x+x into x*2.0. */
10192 if (operand_equal_p (arg0
, arg1
, 0)
10193 && SCALAR_FLOAT_TYPE_P (type
))
10194 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
10195 build_real (type
, dconst2
));
10197 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10198 We associate floats only if the user has specified
10199 -fassociative-math. */
10200 if (flag_associative_math
10201 && TREE_CODE (arg1
) == PLUS_EXPR
10202 && TREE_CODE (arg0
) != MULT_EXPR
)
10204 tree tree10
= TREE_OPERAND (arg1
, 0);
10205 tree tree11
= TREE_OPERAND (arg1
, 1);
10206 if (TREE_CODE (tree11
) == MULT_EXPR
10207 && TREE_CODE (tree10
) == MULT_EXPR
)
10210 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10211 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10214 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10215 We associate floats only if the user has specified
10216 -fassociative-math. */
10217 if (flag_associative_math
10218 && TREE_CODE (arg0
) == PLUS_EXPR
10219 && TREE_CODE (arg1
) != MULT_EXPR
)
10221 tree tree00
= TREE_OPERAND (arg0
, 0);
10222 tree tree01
= TREE_OPERAND (arg0
, 1);
10223 if (TREE_CODE (tree01
) == MULT_EXPR
10224 && TREE_CODE (tree00
) == MULT_EXPR
)
10227 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10228 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10234 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10235 is a rotate of A by C1 bits. */
10236 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10237 is a rotate of A by B bits. */
10239 enum tree_code code0
, code1
;
10241 code0
= TREE_CODE (arg0
);
10242 code1
= TREE_CODE (arg1
);
10243 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10244 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10245 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10246 TREE_OPERAND (arg1
, 0), 0)
10247 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10248 TYPE_UNSIGNED (rtype
))
10249 /* Only create rotates in complete modes. Other cases are not
10250 expanded properly. */
10251 && TYPE_PRECISION (rtype
) == GET_MODE_PRECISION (TYPE_MODE (rtype
)))
10253 tree tree01
, tree11
;
10254 enum tree_code code01
, code11
;
10256 tree01
= TREE_OPERAND (arg0
, 1);
10257 tree11
= TREE_OPERAND (arg1
, 1);
10258 STRIP_NOPS (tree01
);
10259 STRIP_NOPS (tree11
);
10260 code01
= TREE_CODE (tree01
);
10261 code11
= TREE_CODE (tree11
);
10262 if (code01
== INTEGER_CST
10263 && code11
== INTEGER_CST
10264 && TREE_INT_CST_HIGH (tree01
) == 0
10265 && TREE_INT_CST_HIGH (tree11
) == 0
10266 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
10267 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10269 tem
= build2_loc (loc
, LROTATE_EXPR
,
10270 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10271 TREE_OPERAND (arg0
, 0),
10272 code0
== LSHIFT_EXPR
? tree01
: tree11
);
10273 return fold_convert_loc (loc
, type
, tem
);
10275 else if (code11
== MINUS_EXPR
)
10277 tree tree110
, tree111
;
10278 tree110
= TREE_OPERAND (tree11
, 0);
10279 tree111
= TREE_OPERAND (tree11
, 1);
10280 STRIP_NOPS (tree110
);
10281 STRIP_NOPS (tree111
);
10282 if (TREE_CODE (tree110
) == INTEGER_CST
10283 && 0 == compare_tree_int (tree110
,
10285 (TREE_TYPE (TREE_OPERAND
10287 && operand_equal_p (tree01
, tree111
, 0))
10289 fold_convert_loc (loc
, type
,
10290 build2 ((code0
== LSHIFT_EXPR
10293 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10294 TREE_OPERAND (arg0
, 0), tree01
));
10296 else if (code01
== MINUS_EXPR
)
10298 tree tree010
, tree011
;
10299 tree010
= TREE_OPERAND (tree01
, 0);
10300 tree011
= TREE_OPERAND (tree01
, 1);
10301 STRIP_NOPS (tree010
);
10302 STRIP_NOPS (tree011
);
10303 if (TREE_CODE (tree010
) == INTEGER_CST
10304 && 0 == compare_tree_int (tree010
,
10306 (TREE_TYPE (TREE_OPERAND
10308 && operand_equal_p (tree11
, tree011
, 0))
10309 return fold_convert_loc
10311 build2 ((code0
!= LSHIFT_EXPR
10314 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10315 TREE_OPERAND (arg0
, 0), tree11
));
10321 /* In most languages, can't associate operations on floats through
10322 parentheses. Rather than remember where the parentheses were, we
10323 don't associate floats at all, unless the user has specified
10324 -fassociative-math.
10325 And, we need to make sure type is not saturating. */
10327 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10328 && !TYPE_SATURATING (type
))
10330 tree var0
, con0
, lit0
, minus_lit0
;
10331 tree var1
, con1
, lit1
, minus_lit1
;
10334 /* Split both trees into variables, constants, and literals. Then
10335 associate each group together, the constants with literals,
10336 then the result with variables. This increases the chances of
10337 literals being recombined later and of generating relocatable
10338 expressions for the sum of a constant and literal. */
10339 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10340 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10341 code
== MINUS_EXPR
);
10343 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10344 if (code
== MINUS_EXPR
)
10347 /* With undefined overflow we can only associate constants with one
10348 variable, and constants whose association doesn't overflow. */
10349 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10350 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10357 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10358 tmp0
= TREE_OPERAND (tmp0
, 0);
10359 if (CONVERT_EXPR_P (tmp0
)
10360 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10361 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10362 <= TYPE_PRECISION (type
)))
10363 tmp0
= TREE_OPERAND (tmp0
, 0);
10364 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10365 tmp1
= TREE_OPERAND (tmp1
, 0);
10366 if (CONVERT_EXPR_P (tmp1
)
10367 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10368 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10369 <= TYPE_PRECISION (type
)))
10370 tmp1
= TREE_OPERAND (tmp1
, 0);
10371 /* The only case we can still associate with two variables
10372 is if they are the same, modulo negation and bit-pattern
10373 preserving conversions. */
10374 if (!operand_equal_p (tmp0
, tmp1
, 0))
10378 if (ok
&& lit0
&& lit1
)
10380 tree tmp0
= fold_convert (type
, lit0
);
10381 tree tmp1
= fold_convert (type
, lit1
);
10383 if (!TREE_OVERFLOW (tmp0
) && !TREE_OVERFLOW (tmp1
)
10384 && TREE_OVERFLOW (fold_build2 (code
, type
, tmp0
, tmp1
)))
10389 /* Only do something if we found more than two objects. Otherwise,
10390 nothing has changed and we risk infinite recursion. */
10392 && (2 < ((var0
!= 0) + (var1
!= 0)
10393 + (con0
!= 0) + (con1
!= 0)
10394 + (lit0
!= 0) + (lit1
!= 0)
10395 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10397 var0
= associate_trees (loc
, var0
, var1
, code
, type
);
10398 con0
= associate_trees (loc
, con0
, con1
, code
, type
);
10399 lit0
= associate_trees (loc
, lit0
, lit1
, code
, type
);
10400 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
, code
, type
);
10402 /* Preserve the MINUS_EXPR if the negative part of the literal is
10403 greater than the positive part. Otherwise, the multiplicative
10404 folding code (i.e extract_muldiv) may be fooled in case
10405 unsigned constants are subtracted, like in the following
10406 example: ((X*2 + 4) - 8U)/2. */
10407 if (minus_lit0
&& lit0
)
10409 if (TREE_CODE (lit0
) == INTEGER_CST
10410 && TREE_CODE (minus_lit0
) == INTEGER_CST
10411 && tree_int_cst_lt (lit0
, minus_lit0
))
10413 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10419 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10428 fold_convert_loc (loc
, type
,
10429 associate_trees (loc
, var0
, minus_lit0
,
10430 MINUS_EXPR
, type
));
10433 con0
= associate_trees (loc
, con0
, minus_lit0
,
10436 fold_convert_loc (loc
, type
,
10437 associate_trees (loc
, var0
, con0
,
10442 con0
= associate_trees (loc
, con0
, lit0
, code
, type
);
10444 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10452 /* Pointer simplifications for subtraction, simple reassociations. */
10453 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10455 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10456 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10457 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10459 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10460 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10461 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10462 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10463 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10464 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10466 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10469 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10470 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10472 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10473 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10474 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10475 fold_convert_loc (loc
, type
, arg1
));
10477 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10480 /* A - (-B) -> A + B */
10481 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10482 return fold_build2_loc (loc
, PLUS_EXPR
, type
, op0
,
10483 fold_convert_loc (loc
, type
,
10484 TREE_OPERAND (arg1
, 0)));
10485 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10486 if (TREE_CODE (arg0
) == NEGATE_EXPR
10487 && (FLOAT_TYPE_P (type
)
10488 || INTEGRAL_TYPE_P (type
))
10489 && negate_expr_p (arg1
)
10490 && reorder_operands_p (arg0
, arg1
))
10491 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10492 fold_convert_loc (loc
, type
,
10493 negate_expr (arg1
)),
10494 fold_convert_loc (loc
, type
,
10495 TREE_OPERAND (arg0
, 0)));
10496 /* Convert -A - 1 to ~A. */
10497 if (INTEGRAL_TYPE_P (type
)
10498 && TREE_CODE (arg0
) == NEGATE_EXPR
10499 && integer_onep (arg1
)
10500 && !TYPE_OVERFLOW_TRAPS (type
))
10501 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10502 fold_convert_loc (loc
, type
,
10503 TREE_OPERAND (arg0
, 0)));
10505 /* Convert -1 - A to ~A. */
10506 if (INTEGRAL_TYPE_P (type
)
10507 && integer_all_onesp (arg0
))
10508 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10511 /* X - (X / CST) * CST is X % CST. */
10512 if (INTEGRAL_TYPE_P (type
)
10513 && TREE_CODE (arg1
) == MULT_EXPR
10514 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10515 && operand_equal_p (arg0
,
10516 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10517 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10518 TREE_OPERAND (arg1
, 1), 0))
10520 fold_convert_loc (loc
, type
,
10521 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10522 arg0
, TREE_OPERAND (arg1
, 1)));
10524 if (! FLOAT_TYPE_P (type
))
10526 if (integer_zerop (arg0
))
10527 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10528 if (integer_zerop (arg1
))
10529 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10531 /* Fold A - (A & B) into ~B & A. */
10532 if (!TREE_SIDE_EFFECTS (arg0
)
10533 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10535 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10537 tree arg10
= fold_convert_loc (loc
, type
,
10538 TREE_OPERAND (arg1
, 0));
10539 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10540 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10542 fold_convert_loc (loc
, type
, arg0
));
10544 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10546 tree arg11
= fold_convert_loc (loc
,
10547 type
, TREE_OPERAND (arg1
, 1));
10548 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10549 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10551 fold_convert_loc (loc
, type
, arg0
));
10555 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10556 any power of 2 minus 1. */
10557 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10558 && TREE_CODE (arg1
) == BIT_AND_EXPR
10559 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10560 TREE_OPERAND (arg1
, 0), 0))
10562 tree mask0
= TREE_OPERAND (arg0
, 1);
10563 tree mask1
= TREE_OPERAND (arg1
, 1);
10564 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10566 if (operand_equal_p (tem
, mask1
, 0))
10568 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10569 TREE_OPERAND (arg0
, 0), mask1
);
10570 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10575 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10576 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10577 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10579 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10580 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10581 (-ARG1 + ARG0) reduces to -ARG1. */
10582 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10583 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10585 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10586 __complex__ ( x, -y ). This is not the same for SNaNs or if
10587 signed zeros are involved. */
10588 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10589 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10590 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10592 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10593 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10594 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10595 bool arg0rz
= false, arg0iz
= false;
10596 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10597 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10599 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10600 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10601 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10603 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10605 : build1 (REALPART_EXPR
, rtype
, arg1
));
10606 tree ip
= arg0i
? arg0i
10607 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10608 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10610 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10612 tree rp
= arg0r
? arg0r
10613 : build1 (REALPART_EXPR
, rtype
, arg0
);
10614 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10616 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10617 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10622 /* Fold &x - &x. This can happen from &x.foo - &x.
10623 This is unsafe for certain floats even in non-IEEE formats.
10624 In IEEE, it is unsafe because it does wrong for NaNs.
10625 Also note that operand_equal_p is always false if an operand
10628 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10629 && operand_equal_p (arg0
, arg1
, 0))
10630 return build_zero_cst (type
);
10632 /* A - B -> A + (-B) if B is easily negatable. */
10633 if (negate_expr_p (arg1
)
10634 && ((FLOAT_TYPE_P (type
)
10635 /* Avoid this transformation if B is a positive REAL_CST. */
10636 && (TREE_CODE (arg1
) != REAL_CST
10637 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10638 || INTEGRAL_TYPE_P (type
)))
10639 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10640 fold_convert_loc (loc
, type
, arg0
),
10641 fold_convert_loc (loc
, type
,
10642 negate_expr (arg1
)));
10644 /* Try folding difference of addresses. */
10646 HOST_WIDE_INT diff
;
10648 if ((TREE_CODE (arg0
) == ADDR_EXPR
10649 || TREE_CODE (arg1
) == ADDR_EXPR
)
10650 && ptr_difference_const (arg0
, arg1
, &diff
))
10651 return build_int_cst_type (type
, diff
);
10654 /* Fold &a[i] - &a[j] to i-j. */
10655 if (TREE_CODE (arg0
) == ADDR_EXPR
10656 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10657 && TREE_CODE (arg1
) == ADDR_EXPR
10658 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10660 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
10661 TREE_OPERAND (arg0
, 0),
10662 TREE_OPERAND (arg1
, 0));
10667 if (FLOAT_TYPE_P (type
)
10668 && flag_unsafe_math_optimizations
10669 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10670 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10671 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10674 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10675 same or one. Make sure type is not saturating.
10676 fold_plusminus_mult_expr will re-associate. */
10677 if ((TREE_CODE (arg0
) == MULT_EXPR
10678 || TREE_CODE (arg1
) == MULT_EXPR
)
10679 && !TYPE_SATURATING (type
)
10680 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10682 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10690 /* (-A) * (-B) -> A * B */
10691 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10692 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10693 fold_convert_loc (loc
, type
,
10694 TREE_OPERAND (arg0
, 0)),
10695 fold_convert_loc (loc
, type
,
10696 negate_expr (arg1
)));
10697 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10698 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10699 fold_convert_loc (loc
, type
,
10700 negate_expr (arg0
)),
10701 fold_convert_loc (loc
, type
,
10702 TREE_OPERAND (arg1
, 0)));
10704 if (! FLOAT_TYPE_P (type
))
10706 if (integer_zerop (arg1
))
10707 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10708 if (integer_onep (arg1
))
10709 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10710 /* Transform x * -1 into -x. Make sure to do the negation
10711 on the original operand with conversions not stripped
10712 because we can only strip non-sign-changing conversions. */
10713 if (integer_all_onesp (arg1
))
10714 return fold_convert_loc (loc
, type
, negate_expr (op0
));
10715 /* Transform x * -C into -x * C if x is easily negatable. */
10716 if (TREE_CODE (arg1
) == INTEGER_CST
10717 && tree_int_cst_sgn (arg1
) == -1
10718 && negate_expr_p (arg0
)
10719 && (tem
= negate_expr (arg1
)) != arg1
10720 && !TREE_OVERFLOW (tem
))
10721 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10722 fold_convert_loc (loc
, type
,
10723 negate_expr (arg0
)),
10726 /* (a * (1 << b)) is (a << b) */
10727 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10728 && integer_onep (TREE_OPERAND (arg1
, 0)))
10729 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10730 TREE_OPERAND (arg1
, 1));
10731 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10732 && integer_onep (TREE_OPERAND (arg0
, 0)))
10733 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10734 TREE_OPERAND (arg0
, 1));
10736 /* (A + A) * C -> A * 2 * C */
10737 if (TREE_CODE (arg0
) == PLUS_EXPR
10738 && TREE_CODE (arg1
) == INTEGER_CST
10739 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10740 TREE_OPERAND (arg0
, 1), 0))
10741 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10742 omit_one_operand_loc (loc
, type
,
10743 TREE_OPERAND (arg0
, 0),
10744 TREE_OPERAND (arg0
, 1)),
10745 fold_build2_loc (loc
, MULT_EXPR
, type
,
10746 build_int_cst (type
, 2) , arg1
));
10748 strict_overflow_p
= false;
10749 if (TREE_CODE (arg1
) == INTEGER_CST
10750 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10751 &strict_overflow_p
)))
10753 if (strict_overflow_p
)
10754 fold_overflow_warning (("assuming signed overflow does not "
10755 "occur when simplifying "
10757 WARN_STRICT_OVERFLOW_MISC
);
10758 return fold_convert_loc (loc
, type
, tem
);
10761 /* Optimize z * conj(z) for integer complex numbers. */
10762 if (TREE_CODE (arg0
) == CONJ_EXPR
10763 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10764 return fold_mult_zconjz (loc
, type
, arg1
);
10765 if (TREE_CODE (arg1
) == CONJ_EXPR
10766 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10767 return fold_mult_zconjz (loc
, type
, arg0
);
10771 /* Maybe fold x * 0 to 0. The expressions aren't the same
10772 when x is NaN, since x * 0 is also NaN. Nor are they the
10773 same in modes with signed zeros, since multiplying a
10774 negative value by 0 gives -0, not +0. */
10775 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10776 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10777 && real_zerop (arg1
))
10778 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10779 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10780 Likewise for complex arithmetic with signed zeros. */
10781 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10782 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10783 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10784 && real_onep (arg1
))
10785 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10787 /* Transform x * -1.0 into -x. */
10788 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10789 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10790 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10791 && real_minus_onep (arg1
))
10792 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
10794 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10795 the result for floating point types due to rounding so it is applied
10796 only if -fassociative-math was specify. */
10797 if (flag_associative_math
10798 && TREE_CODE (arg0
) == RDIV_EXPR
10799 && TREE_CODE (arg1
) == REAL_CST
10800 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10802 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10805 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10806 TREE_OPERAND (arg0
, 1));
10809 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10810 if (operand_equal_p (arg0
, arg1
, 0))
10812 tree tem
= fold_strip_sign_ops (arg0
);
10813 if (tem
!= NULL_TREE
)
10815 tem
= fold_convert_loc (loc
, type
, tem
);
10816 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
10820 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10821 This is not the same for NaNs or if signed zeros are
10823 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10824 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10825 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10826 && TREE_CODE (arg1
) == COMPLEX_CST
10827 && real_zerop (TREE_REALPART (arg1
)))
10829 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10830 if (real_onep (TREE_IMAGPART (arg1
)))
10832 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10833 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
10835 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
10836 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10838 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10839 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
10840 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
10844 /* Optimize z * conj(z) for floating point complex numbers.
10845 Guarded by flag_unsafe_math_optimizations as non-finite
10846 imaginary components don't produce scalar results. */
10847 if (flag_unsafe_math_optimizations
10848 && TREE_CODE (arg0
) == CONJ_EXPR
10849 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10850 return fold_mult_zconjz (loc
, type
, arg1
);
10851 if (flag_unsafe_math_optimizations
10852 && TREE_CODE (arg1
) == CONJ_EXPR
10853 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10854 return fold_mult_zconjz (loc
, type
, arg0
);
10856 if (flag_unsafe_math_optimizations
)
10858 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10859 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10861 /* Optimizations of root(...)*root(...). */
10862 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10865 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10866 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10868 /* Optimize sqrt(x)*sqrt(x) as x. */
10869 if (BUILTIN_SQRT_P (fcode0
)
10870 && operand_equal_p (arg00
, arg10
, 0)
10871 && ! HONOR_SNANS (TYPE_MODE (type
)))
10874 /* Optimize root(x)*root(y) as root(x*y). */
10875 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10876 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
10877 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
10880 /* Optimize expN(x)*expN(y) as expN(x+y). */
10881 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10883 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10884 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10885 CALL_EXPR_ARG (arg0
, 0),
10886 CALL_EXPR_ARG (arg1
, 0));
10887 return build_call_expr_loc (loc
, expfn
, 1, arg
);
10890 /* Optimizations of pow(...)*pow(...). */
10891 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10892 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10893 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10895 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10896 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10897 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10898 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10900 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10901 if (operand_equal_p (arg01
, arg11
, 0))
10903 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10904 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
10906 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
10909 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10910 if (operand_equal_p (arg00
, arg10
, 0))
10912 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10913 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10915 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
10919 /* Optimize tan(x)*cos(x) as sin(x). */
10920 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10921 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10922 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10923 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10924 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10925 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10926 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10927 CALL_EXPR_ARG (arg1
, 0), 0))
10929 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10931 if (sinfn
!= NULL_TREE
)
10932 return build_call_expr_loc (loc
, sinfn
, 1,
10933 CALL_EXPR_ARG (arg0
, 0));
10936 /* Optimize x*pow(x,c) as pow(x,c+1). */
10937 if (fcode1
== BUILT_IN_POW
10938 || fcode1
== BUILT_IN_POWF
10939 || fcode1
== BUILT_IN_POWL
)
10941 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10942 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10943 if (TREE_CODE (arg11
) == REAL_CST
10944 && !TREE_OVERFLOW (arg11
)
10945 && operand_equal_p (arg0
, arg10
, 0))
10947 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10951 c
= TREE_REAL_CST (arg11
);
10952 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10953 arg
= build_real (type
, c
);
10954 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10958 /* Optimize pow(x,c)*x as pow(x,c+1). */
10959 if (fcode0
== BUILT_IN_POW
10960 || fcode0
== BUILT_IN_POWF
10961 || fcode0
== BUILT_IN_POWL
)
10963 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10964 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10965 if (TREE_CODE (arg01
) == REAL_CST
10966 && !TREE_OVERFLOW (arg01
)
10967 && operand_equal_p (arg1
, arg00
, 0))
10969 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10973 c
= TREE_REAL_CST (arg01
);
10974 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10975 arg
= build_real (type
, c
);
10976 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
10980 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10981 if (!in_gimple_form
10983 && operand_equal_p (arg0
, arg1
, 0))
10985 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
10989 tree arg
= build_real (type
, dconst2
);
10990 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10999 if (integer_all_onesp (arg1
))
11000 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11001 if (integer_zerop (arg1
))
11002 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11003 if (operand_equal_p (arg0
, arg1
, 0))
11004 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11006 /* ~X | X is -1. */
11007 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11008 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11010 t1
= build_zero_cst (type
);
11011 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11012 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11015 /* X | ~X is -1. */
11016 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11017 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11019 t1
= build_zero_cst (type
);
11020 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11021 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11024 /* Canonicalize (X & C1) | C2. */
11025 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11026 && TREE_CODE (arg1
) == INTEGER_CST
11027 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11029 double_int c1
, c2
, c3
, msk
;
11030 int width
= TYPE_PRECISION (type
), w
;
11031 c1
= tree_to_double_int (TREE_OPERAND (arg0
, 1));
11032 c2
= tree_to_double_int (arg1
);
11034 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11035 if (double_int_equal_p (double_int_and (c1
, c2
), c1
))
11036 return omit_one_operand_loc (loc
, type
, arg1
,
11037 TREE_OPERAND (arg0
, 0));
11039 msk
= double_int_mask (width
);
11041 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11042 if (double_int_zero_p (double_int_and_not (msk
,
11043 double_int_ior (c1
, c2
))))
11044 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11045 TREE_OPERAND (arg0
, 0), arg1
);
11047 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11048 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11049 mode which allows further optimizations. */
11050 c1
= double_int_and (c1
, msk
);
11051 c2
= double_int_and (c2
, msk
);
11052 c3
= double_int_and_not (c1
, c2
);
11053 for (w
= BITS_PER_UNIT
;
11054 w
<= width
&& w
<= HOST_BITS_PER_WIDE_INT
;
11057 unsigned HOST_WIDE_INT mask
11058 = (unsigned HOST_WIDE_INT
) -1 >> (HOST_BITS_PER_WIDE_INT
- w
);
11059 if (((c1
.low
| c2
.low
) & mask
) == mask
11060 && (c1
.low
& ~mask
) == 0 && c1
.high
== 0)
11062 c3
= uhwi_to_double_int (mask
);
11066 if (!double_int_equal_p (c3
, c1
))
11067 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11068 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11069 TREE_OPERAND (arg0
, 0),
11070 double_int_to_tree (type
,
11075 /* (X & Y) | Y is (X, Y). */
11076 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11077 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11078 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11079 /* (X & Y) | X is (Y, X). */
11080 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11081 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11082 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11083 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11084 /* X | (X & Y) is (Y, X). */
11085 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11086 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11087 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11088 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11089 /* X | (Y & X) is (Y, X). */
11090 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11091 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11092 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11093 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11095 /* (X & ~Y) | (~X & Y) is X ^ Y */
11096 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11097 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11099 tree a0
, a1
, l0
, l1
, n0
, n1
;
11101 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11102 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11104 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11105 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11107 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
11108 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
11110 if ((operand_equal_p (n0
, a0
, 0)
11111 && operand_equal_p (n1
, a1
, 0))
11112 || (operand_equal_p (n0
, a1
, 0)
11113 && operand_equal_p (n1
, a0
, 0)))
11114 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
11117 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11118 if (t1
!= NULL_TREE
)
11121 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11123 This results in more efficient code for machines without a NAND
11124 instruction. Combine will canonicalize to the first form
11125 which will allow use of NAND instructions provided by the
11126 backend if they exist. */
11127 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11128 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11131 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11132 build2 (BIT_AND_EXPR
, type
,
11133 fold_convert_loc (loc
, type
,
11134 TREE_OPERAND (arg0
, 0)),
11135 fold_convert_loc (loc
, type
,
11136 TREE_OPERAND (arg1
, 0))));
11139 /* See if this can be simplified into a rotate first. If that
11140 is unsuccessful continue in the association code. */
11144 if (integer_zerop (arg1
))
11145 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11146 if (integer_all_onesp (arg1
))
11147 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op0
);
11148 if (operand_equal_p (arg0
, arg1
, 0))
11149 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11151 /* ~X ^ X is -1. */
11152 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11153 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11155 t1
= build_zero_cst (type
);
11156 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11157 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11160 /* X ^ ~X is -1. */
11161 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11162 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11164 t1
= build_zero_cst (type
);
11165 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11166 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11169 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11170 with a constant, and the two constants have no bits in common,
11171 we should treat this as a BIT_IOR_EXPR since this may produce more
11172 simplifications. */
11173 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11174 && TREE_CODE (arg1
) == BIT_AND_EXPR
11175 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11176 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
11177 && integer_zerop (const_binop (BIT_AND_EXPR
,
11178 TREE_OPERAND (arg0
, 1),
11179 TREE_OPERAND (arg1
, 1))))
11181 code
= BIT_IOR_EXPR
;
11185 /* (X | Y) ^ X -> Y & ~ X*/
11186 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11187 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11189 tree t2
= TREE_OPERAND (arg0
, 1);
11190 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11192 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11193 fold_convert_loc (loc
, type
, t2
),
11194 fold_convert_loc (loc
, type
, t1
));
11198 /* (Y | X) ^ X -> Y & ~ X*/
11199 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11200 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11202 tree t2
= TREE_OPERAND (arg0
, 0);
11203 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11205 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11206 fold_convert_loc (loc
, type
, t2
),
11207 fold_convert_loc (loc
, type
, t1
));
11211 /* X ^ (X | Y) -> Y & ~ X*/
11212 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11213 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
11215 tree t2
= TREE_OPERAND (arg1
, 1);
11216 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11218 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11219 fold_convert_loc (loc
, type
, t2
),
11220 fold_convert_loc (loc
, type
, t1
));
11224 /* X ^ (Y | X) -> Y & ~ X*/
11225 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11226 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
11228 tree t2
= TREE_OPERAND (arg1
, 0);
11229 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11231 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11232 fold_convert_loc (loc
, type
, t2
),
11233 fold_convert_loc (loc
, type
, t1
));
11237 /* Convert ~X ^ ~Y to X ^ Y. */
11238 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11239 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11240 return fold_build2_loc (loc
, code
, type
,
11241 fold_convert_loc (loc
, type
,
11242 TREE_OPERAND (arg0
, 0)),
11243 fold_convert_loc (loc
, type
,
11244 TREE_OPERAND (arg1
, 0)));
11246 /* Convert ~X ^ C to X ^ ~C. */
11247 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11248 && TREE_CODE (arg1
) == INTEGER_CST
)
11249 return fold_build2_loc (loc
, code
, type
,
11250 fold_convert_loc (loc
, type
,
11251 TREE_OPERAND (arg0
, 0)),
11252 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11254 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11255 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11256 && integer_onep (TREE_OPERAND (arg0
, 1))
11257 && integer_onep (arg1
))
11258 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11259 build_int_cst (TREE_TYPE (arg0
), 0));
11261 /* Fold (X & Y) ^ Y as ~X & Y. */
11262 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11263 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11265 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11266 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11267 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11268 fold_convert_loc (loc
, type
, arg1
));
11270 /* Fold (X & Y) ^ X as ~Y & X. */
11271 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11272 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11273 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11275 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11276 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11277 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11278 fold_convert_loc (loc
, type
, arg1
));
11280 /* Fold X ^ (X & Y) as X & ~Y. */
11281 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11282 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11284 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11285 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11286 fold_convert_loc (loc
, type
, arg0
),
11287 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11289 /* Fold X ^ (Y & X) as ~Y & X. */
11290 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11291 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11292 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11294 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11295 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11296 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11297 fold_convert_loc (loc
, type
, arg0
));
11300 /* See if this can be simplified into a rotate first. If that
11301 is unsuccessful continue in the association code. */
11305 if (integer_all_onesp (arg1
))
11306 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11307 if (integer_zerop (arg1
))
11308 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11309 if (operand_equal_p (arg0
, arg1
, 0))
11310 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11312 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11313 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11314 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11315 || (TREE_CODE (arg0
) == EQ_EXPR
11316 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11317 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11318 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11320 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11321 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11322 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11323 || (TREE_CODE (arg1
) == EQ_EXPR
11324 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11325 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11326 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11328 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11329 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11330 && TREE_CODE (arg1
) == INTEGER_CST
11331 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11333 tree tmp1
= fold_convert_loc (loc
, type
, arg1
);
11334 tree tmp2
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11335 tree tmp3
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11336 tmp2
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp2
, tmp1
);
11337 tmp3
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp3
, tmp1
);
11339 fold_convert_loc (loc
, type
,
11340 fold_build2_loc (loc
, BIT_IOR_EXPR
,
11341 type
, tmp2
, tmp3
));
11344 /* (X | Y) & Y is (X, Y). */
11345 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11346 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11347 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11348 /* (X | Y) & X is (Y, X). */
11349 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11350 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11351 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11352 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11353 /* X & (X | Y) is (Y, X). */
11354 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11355 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11356 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11357 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11358 /* X & (Y | X) is (Y, X). */
11359 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11360 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11361 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11362 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11364 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11365 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11366 && integer_onep (TREE_OPERAND (arg0
, 1))
11367 && integer_onep (arg1
))
11369 tem
= TREE_OPERAND (arg0
, 0);
11370 return fold_build2_loc (loc
, EQ_EXPR
, type
,
11371 fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
11372 build_int_cst (TREE_TYPE (tem
), 1)),
11373 build_int_cst (TREE_TYPE (tem
), 0));
11375 /* Fold ~X & 1 as (X & 1) == 0. */
11376 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11377 && integer_onep (arg1
))
11379 tem
= TREE_OPERAND (arg0
, 0);
11380 return fold_build2_loc (loc
, EQ_EXPR
, type
,
11381 fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
11382 build_int_cst (TREE_TYPE (tem
), 1)),
11383 build_int_cst (TREE_TYPE (tem
), 0));
11385 /* Fold !X & 1 as X == 0. */
11386 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11387 && integer_onep (arg1
))
11389 tem
= TREE_OPERAND (arg0
, 0);
11390 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11391 build_int_cst (TREE_TYPE (tem
), 0));
11394 /* Fold (X ^ Y) & Y as ~X & Y. */
11395 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11396 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11398 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11399 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11400 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11401 fold_convert_loc (loc
, type
, arg1
));
11403 /* Fold (X ^ Y) & X as ~Y & X. */
11404 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11405 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11406 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11408 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11409 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11410 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11411 fold_convert_loc (loc
, type
, arg1
));
11413 /* Fold X & (X ^ Y) as X & ~Y. */
11414 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11415 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11417 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11418 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11419 fold_convert_loc (loc
, type
, arg0
),
11420 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11422 /* Fold X & (Y ^ X) as ~Y & X. */
11423 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11424 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11425 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11427 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11428 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11429 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11430 fold_convert_loc (loc
, type
, arg0
));
11433 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11434 multiple of 1 << CST. */
11435 if (TREE_CODE (arg1
) == INTEGER_CST
)
11437 double_int cst1
= tree_to_double_int (arg1
);
11438 double_int ncst1
= double_int_ext (double_int_neg (cst1
),
11439 TYPE_PRECISION (TREE_TYPE (arg1
)),
11440 TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11441 if (double_int_equal_p (double_int_and (cst1
, ncst1
), ncst1
)
11442 && multiple_of_p (type
, arg0
,
11443 double_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11444 return fold_convert_loc (loc
, type
, arg0
);
11447 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11448 ((A & N) + B) & M -> (A + B) & M
11449 Similarly if (N & M) == 0,
11450 ((A | N) + B) & M -> (A + B) & M
11451 and for - instead of + (or unary - instead of +)
11452 and/or ^ instead of |.
11453 If B is constant and (B & M) == 0, fold into A & M. */
11454 if (host_integerp (arg1
, 1))
11456 unsigned HOST_WIDE_INT cst1
= tree_low_cst (arg1
, 1);
11457 if (~cst1
&& (cst1
& (cst1
+ 1)) == 0
11458 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11459 && (TREE_CODE (arg0
) == PLUS_EXPR
11460 || TREE_CODE (arg0
) == MINUS_EXPR
11461 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11462 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11463 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11467 unsigned HOST_WIDE_INT cst0
;
11469 /* Now we know that arg0 is (C + D) or (C - D) or
11470 -C and arg1 (M) is == (1LL << cst) - 1.
11471 Store C into PMOP[0] and D into PMOP[1]. */
11472 pmop
[0] = TREE_OPERAND (arg0
, 0);
11474 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11476 pmop
[1] = TREE_OPERAND (arg0
, 1);
11480 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11481 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11485 for (; which
>= 0; which
--)
11486 switch (TREE_CODE (pmop
[which
]))
11491 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11494 /* tree_low_cst not used, because we don't care about
11496 cst0
= TREE_INT_CST_LOW (TREE_OPERAND (pmop
[which
], 1));
11498 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11503 else if (cst0
!= 0)
11505 /* If C or D is of the form (A & N) where
11506 (N & M) == M, or of the form (A | N) or
11507 (A ^ N) where (N & M) == 0, replace it with A. */
11508 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11511 /* If C or D is a N where (N & M) == 0, it can be
11512 omitted (assumed 0). */
11513 if ((TREE_CODE (arg0
) == PLUS_EXPR
11514 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11515 && (TREE_INT_CST_LOW (pmop
[which
]) & cst1
) == 0)
11516 pmop
[which
] = NULL
;
11522 /* Only build anything new if we optimized one or both arguments
11524 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11525 || (TREE_CODE (arg0
) != NEGATE_EXPR
11526 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11528 tree utype
= TREE_TYPE (arg0
);
11529 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11531 /* Perform the operations in a type that has defined
11532 overflow behavior. */
11533 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11534 if (pmop
[0] != NULL
)
11535 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11536 if (pmop
[1] != NULL
)
11537 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11540 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11541 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11542 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11544 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11545 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11547 else if (pmop
[0] != NULL
)
11549 else if (pmop
[1] != NULL
)
11552 return build_int_cst (type
, 0);
11554 else if (pmop
[0] == NULL
)
11555 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11557 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11559 /* TEM is now the new binary +, - or unary - replacement. */
11560 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11561 fold_convert_loc (loc
, utype
, arg1
));
11562 return fold_convert_loc (loc
, type
, tem
);
11567 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11568 if (t1
!= NULL_TREE
)
11570 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11571 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11572 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11575 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11577 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
11578 && (~TREE_INT_CST_LOW (arg1
)
11579 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
11581 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11584 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11586 This results in more efficient code for machines without a NOR
11587 instruction. Combine will canonicalize to the first form
11588 which will allow use of NOR instructions provided by the
11589 backend if they exist. */
11590 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11591 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11593 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11594 build2 (BIT_IOR_EXPR
, type
,
11595 fold_convert_loc (loc
, type
,
11596 TREE_OPERAND (arg0
, 0)),
11597 fold_convert_loc (loc
, type
,
11598 TREE_OPERAND (arg1
, 0))));
11601 /* If arg0 is derived from the address of an object or function, we may
11602 be able to fold this expression using the object or function's
11604 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && host_integerp (arg1
, 1))
11606 unsigned HOST_WIDE_INT modulus
, residue
;
11607 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (arg1
);
11609 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11610 integer_onep (arg1
));
11612 /* This works because modulus is a power of 2. If this weren't the
11613 case, we'd have to replace it by its greatest power-of-2
11614 divisor: modulus & -modulus. */
11616 return build_int_cst (type
, residue
& low
);
11619 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11620 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11621 if the new mask might be further optimized. */
11622 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11623 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11624 && host_integerp (TREE_OPERAND (arg0
, 1), 1)
11625 && host_integerp (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)))
11626 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1)
11627 < TYPE_PRECISION (TREE_TYPE (arg0
))
11628 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11629 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1) > 0)
11631 unsigned int shiftc
= tree_low_cst (TREE_OPERAND (arg0
, 1), 1);
11632 unsigned HOST_WIDE_INT mask
11633 = tree_low_cst (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11634 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11635 tree shift_type
= TREE_TYPE (arg0
);
11637 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11638 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11639 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11640 && TYPE_PRECISION (TREE_TYPE (arg0
))
11641 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0
))))
11643 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11644 tree arg00
= TREE_OPERAND (arg0
, 0);
11645 /* See if more bits can be proven as zero because of
11647 if (TREE_CODE (arg00
) == NOP_EXPR
11648 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11650 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11651 if (TYPE_PRECISION (inner_type
)
11652 == GET_MODE_BITSIZE (TYPE_MODE (inner_type
))
11653 && TYPE_PRECISION (inner_type
) < prec
)
11655 prec
= TYPE_PRECISION (inner_type
);
11656 /* See if we can shorten the right shift. */
11658 shift_type
= inner_type
;
11661 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11662 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11663 zerobits
<<= prec
- shiftc
;
11664 /* For arithmetic shift if sign bit could be set, zerobits
11665 can contain actually sign bits, so no transformation is
11666 possible, unless MASK masks them all away. In that
11667 case the shift needs to be converted into logical shift. */
11668 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11669 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11671 if ((mask
& zerobits
) == 0)
11672 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11678 /* ((X << 16) & 0xff00) is (X, 0). */
11679 if ((mask
& zerobits
) == mask
)
11680 return omit_one_operand_loc (loc
, type
,
11681 build_int_cst (type
, 0), arg0
);
11683 newmask
= mask
| zerobits
;
11684 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11688 /* Only do the transformation if NEWMASK is some integer
11690 for (prec
= BITS_PER_UNIT
;
11691 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11692 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11694 if (prec
< HOST_BITS_PER_WIDE_INT
11695 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11699 if (shift_type
!= TREE_TYPE (arg0
))
11701 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
11702 fold_convert_loc (loc
, shift_type
,
11703 TREE_OPERAND (arg0
, 0)),
11704 TREE_OPERAND (arg0
, 1));
11705 tem
= fold_convert_loc (loc
, type
, tem
);
11709 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
11710 if (!tree_int_cst_equal (newmaskt
, arg1
))
11711 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
11719 /* Don't touch a floating-point divide by zero unless the mode
11720 of the constant can represent infinity. */
11721 if (TREE_CODE (arg1
) == REAL_CST
11722 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11723 && real_zerop (arg1
))
11726 /* Optimize A / A to 1.0 if we don't care about
11727 NaNs or Infinities. Skip the transformation
11728 for non-real operands. */
11729 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11730 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11731 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
11732 && operand_equal_p (arg0
, arg1
, 0))
11734 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
11736 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11739 /* The complex version of the above A / A optimization. */
11740 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11741 && operand_equal_p (arg0
, arg1
, 0))
11743 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
11744 if (! HONOR_NANS (TYPE_MODE (elem_type
))
11745 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
11747 tree r
= build_real (elem_type
, dconst1
);
11748 /* omit_two_operands will call fold_convert for us. */
11749 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11753 /* (-A) / (-B) -> A / B */
11754 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11755 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11756 TREE_OPERAND (arg0
, 0),
11757 negate_expr (arg1
));
11758 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11759 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11760 negate_expr (arg0
),
11761 TREE_OPERAND (arg1
, 0));
11763 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11764 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11765 && real_onep (arg1
))
11766 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11768 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11769 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11770 && real_minus_onep (arg1
))
11771 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
11772 negate_expr (arg0
)));
11774 /* If ARG1 is a constant, we can convert this to a multiply by the
11775 reciprocal. This does not have the same rounding properties,
11776 so only do this if -freciprocal-math. We can actually
11777 always safely do it if ARG1 is a power of two, but it's hard to
11778 tell if it is or not in a portable manner. */
11780 && (TREE_CODE (arg1
) == REAL_CST
11781 || (TREE_CODE (arg1
) == COMPLEX_CST
11782 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1
)))
11783 || (TREE_CODE (arg1
) == VECTOR_CST
11784 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1
)))))
11786 if (flag_reciprocal_math
11787 && 0 != (tem
= const_binop (code
, build_one_cst (type
), arg1
)))
11788 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
11789 /* Find the reciprocal if optimizing and the result is exact.
11790 TODO: Complex reciprocal not implemented. */
11791 if (TREE_CODE (arg1
) != COMPLEX_CST
)
11793 tree inverse
= exact_inverse (TREE_TYPE (arg0
), arg1
);
11796 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, inverse
);
11799 /* Convert A/B/C to A/(B*C). */
11800 if (flag_reciprocal_math
11801 && TREE_CODE (arg0
) == RDIV_EXPR
)
11802 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11803 fold_build2_loc (loc
, MULT_EXPR
, type
,
11804 TREE_OPERAND (arg0
, 1), arg1
));
11806 /* Convert A/(B/C) to (A/B)*C. */
11807 if (flag_reciprocal_math
11808 && TREE_CODE (arg1
) == RDIV_EXPR
)
11809 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11810 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
11811 TREE_OPERAND (arg1
, 0)),
11812 TREE_OPERAND (arg1
, 1));
11814 /* Convert C1/(X*C2) into (C1/C2)/X. */
11815 if (flag_reciprocal_math
11816 && TREE_CODE (arg1
) == MULT_EXPR
11817 && TREE_CODE (arg0
) == REAL_CST
11818 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11820 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11821 TREE_OPERAND (arg1
, 1));
11823 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11824 TREE_OPERAND (arg1
, 0));
11827 if (flag_unsafe_math_optimizations
)
11829 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11830 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11832 /* Optimize sin(x)/cos(x) as tan(x). */
11833 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11834 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11835 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11836 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11837 CALL_EXPR_ARG (arg1
, 0), 0))
11839 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11841 if (tanfn
!= NULL_TREE
)
11842 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11845 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11846 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
11847 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
11848 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
11849 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11850 CALL_EXPR_ARG (arg1
, 0), 0))
11852 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11854 if (tanfn
!= NULL_TREE
)
11856 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
11857 CALL_EXPR_ARG (arg0
, 0));
11858 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11859 build_real (type
, dconst1
), tmp
);
11863 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11864 NaNs or Infinities. */
11865 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
11866 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
11867 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
11869 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11870 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11872 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11873 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11874 && operand_equal_p (arg00
, arg01
, 0))
11876 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11878 if (cosfn
!= NULL_TREE
)
11879 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11883 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11884 NaNs or Infinities. */
11885 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
11886 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
11887 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
11889 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11890 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11892 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11893 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11894 && operand_equal_p (arg00
, arg01
, 0))
11896 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11898 if (cosfn
!= NULL_TREE
)
11900 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11901 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11902 build_real (type
, dconst1
),
11908 /* Optimize pow(x,c)/x as pow(x,c-1). */
11909 if (fcode0
== BUILT_IN_POW
11910 || fcode0
== BUILT_IN_POWF
11911 || fcode0
== BUILT_IN_POWL
)
11913 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11914 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11915 if (TREE_CODE (arg01
) == REAL_CST
11916 && !TREE_OVERFLOW (arg01
)
11917 && operand_equal_p (arg1
, arg00
, 0))
11919 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11923 c
= TREE_REAL_CST (arg01
);
11924 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
11925 arg
= build_real (type
, c
);
11926 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11930 /* Optimize a/root(b/c) into a*root(c/b). */
11931 if (BUILTIN_ROOT_P (fcode1
))
11933 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
11935 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
11937 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11938 tree b
= TREE_OPERAND (rootarg
, 0);
11939 tree c
= TREE_OPERAND (rootarg
, 1);
11941 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
11943 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
11944 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
11948 /* Optimize x/expN(y) into x*expN(-y). */
11949 if (BUILTIN_EXPONENT_P (fcode1
))
11951 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11952 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
11953 arg1
= build_call_expr_loc (loc
,
11955 fold_convert_loc (loc
, type
, arg
));
11956 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11959 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11960 if (fcode1
== BUILT_IN_POW
11961 || fcode1
== BUILT_IN_POWF
11962 || fcode1
== BUILT_IN_POWL
)
11964 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11965 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11966 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11967 tree neg11
= fold_convert_loc (loc
, type
,
11968 negate_expr (arg11
));
11969 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
11970 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11975 case TRUNC_DIV_EXPR
:
11976 /* Optimize (X & (-A)) / A where A is a power of 2,
11978 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11979 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
11980 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
11982 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
11983 arg1
, TREE_OPERAND (arg0
, 1));
11984 if (sum
&& integer_zerop (sum
)) {
11985 unsigned long pow2
;
11987 if (TREE_INT_CST_LOW (arg1
))
11988 pow2
= exact_log2 (TREE_INT_CST_LOW (arg1
));
11990 pow2
= exact_log2 (TREE_INT_CST_HIGH (arg1
))
11991 + HOST_BITS_PER_WIDE_INT
;
11993 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11994 TREE_OPERAND (arg0
, 0),
11995 build_int_cst (integer_type_node
, pow2
));
12001 case FLOOR_DIV_EXPR
:
12002 /* Simplify A / (B << N) where A and B are positive and B is
12003 a power of 2, to A >> (N + log2(B)). */
12004 strict_overflow_p
= false;
12005 if (TREE_CODE (arg1
) == LSHIFT_EXPR
12006 && (TYPE_UNSIGNED (type
)
12007 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12009 tree sval
= TREE_OPERAND (arg1
, 0);
12010 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
12012 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
12013 unsigned long pow2
;
12015 if (TREE_INT_CST_LOW (sval
))
12016 pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
12018 pow2
= exact_log2 (TREE_INT_CST_HIGH (sval
))
12019 + HOST_BITS_PER_WIDE_INT
;
12021 if (strict_overflow_p
)
12022 fold_overflow_warning (("assuming signed overflow does not "
12023 "occur when simplifying A / (B << N)"),
12024 WARN_STRICT_OVERFLOW_MISC
);
12026 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
12028 build_int_cst (TREE_TYPE (sh_cnt
),
12030 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12031 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
12035 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12036 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12037 if (INTEGRAL_TYPE_P (type
)
12038 && TYPE_UNSIGNED (type
)
12039 && code
== FLOOR_DIV_EXPR
)
12040 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
12044 case ROUND_DIV_EXPR
:
12045 case CEIL_DIV_EXPR
:
12046 case EXACT_DIV_EXPR
:
12047 if (integer_onep (arg1
))
12048 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12049 if (integer_zerop (arg1
))
12051 /* X / -1 is -X. */
12052 if (!TYPE_UNSIGNED (type
)
12053 && TREE_CODE (arg1
) == INTEGER_CST
12054 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
12055 && TREE_INT_CST_HIGH (arg1
) == -1)
12056 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
12058 /* Convert -A / -B to A / B when the type is signed and overflow is
12060 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12061 && TREE_CODE (arg0
) == NEGATE_EXPR
12062 && negate_expr_p (arg1
))
12064 if (INTEGRAL_TYPE_P (type
))
12065 fold_overflow_warning (("assuming signed overflow does not occur "
12066 "when distributing negation across "
12068 WARN_STRICT_OVERFLOW_MISC
);
12069 return fold_build2_loc (loc
, code
, type
,
12070 fold_convert_loc (loc
, type
,
12071 TREE_OPERAND (arg0
, 0)),
12072 fold_convert_loc (loc
, type
,
12073 negate_expr (arg1
)));
12075 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12076 && TREE_CODE (arg1
) == NEGATE_EXPR
12077 && negate_expr_p (arg0
))
12079 if (INTEGRAL_TYPE_P (type
))
12080 fold_overflow_warning (("assuming signed overflow does not occur "
12081 "when distributing negation across "
12083 WARN_STRICT_OVERFLOW_MISC
);
12084 return fold_build2_loc (loc
, code
, type
,
12085 fold_convert_loc (loc
, type
,
12086 negate_expr (arg0
)),
12087 fold_convert_loc (loc
, type
,
12088 TREE_OPERAND (arg1
, 0)));
12091 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12092 operation, EXACT_DIV_EXPR.
12094 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12095 At one time others generated faster code, it's not clear if they do
12096 after the last round to changes to the DIV code in expmed.c. */
12097 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
12098 && multiple_of_p (type
, arg0
, arg1
))
12099 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
12101 strict_overflow_p
= false;
12102 if (TREE_CODE (arg1
) == INTEGER_CST
12103 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12104 &strict_overflow_p
)))
12106 if (strict_overflow_p
)
12107 fold_overflow_warning (("assuming signed overflow does not occur "
12108 "when simplifying division"),
12109 WARN_STRICT_OVERFLOW_MISC
);
12110 return fold_convert_loc (loc
, type
, tem
);
12115 case CEIL_MOD_EXPR
:
12116 case FLOOR_MOD_EXPR
:
12117 case ROUND_MOD_EXPR
:
12118 case TRUNC_MOD_EXPR
:
12119 /* X % 1 is always zero, but be sure to preserve any side
12121 if (integer_onep (arg1
))
12122 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12124 /* X % 0, return X % 0 unchanged so that we can get the
12125 proper warnings and errors. */
12126 if (integer_zerop (arg1
))
12129 /* 0 % X is always zero, but be sure to preserve any side
12130 effects in X. Place this after checking for X == 0. */
12131 if (integer_zerop (arg0
))
12132 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12134 /* X % -1 is zero. */
12135 if (!TYPE_UNSIGNED (type
)
12136 && TREE_CODE (arg1
) == INTEGER_CST
12137 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
12138 && TREE_INT_CST_HIGH (arg1
) == -1)
12139 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12141 /* X % -C is the same as X % C. */
12142 if (code
== TRUNC_MOD_EXPR
12143 && !TYPE_UNSIGNED (type
)
12144 && TREE_CODE (arg1
) == INTEGER_CST
12145 && !TREE_OVERFLOW (arg1
)
12146 && TREE_INT_CST_HIGH (arg1
) < 0
12147 && !TYPE_OVERFLOW_TRAPS (type
)
12148 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12149 && !sign_bit_p (arg1
, arg1
))
12150 return fold_build2_loc (loc
, code
, type
,
12151 fold_convert_loc (loc
, type
, arg0
),
12152 fold_convert_loc (loc
, type
,
12153 negate_expr (arg1
)));
12155 /* X % -Y is the same as X % Y. */
12156 if (code
== TRUNC_MOD_EXPR
12157 && !TYPE_UNSIGNED (type
)
12158 && TREE_CODE (arg1
) == NEGATE_EXPR
12159 && !TYPE_OVERFLOW_TRAPS (type
))
12160 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
12161 fold_convert_loc (loc
, type
,
12162 TREE_OPERAND (arg1
, 0)));
12164 strict_overflow_p
= false;
12165 if (TREE_CODE (arg1
) == INTEGER_CST
12166 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12167 &strict_overflow_p
)))
12169 if (strict_overflow_p
)
12170 fold_overflow_warning (("assuming signed overflow does not occur "
12171 "when simplifying modulus"),
12172 WARN_STRICT_OVERFLOW_MISC
);
12173 return fold_convert_loc (loc
, type
, tem
);
12176 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12177 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12178 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
12179 && (TYPE_UNSIGNED (type
)
12180 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12183 /* Also optimize A % (C << N) where C is a power of 2,
12184 to A & ((C << N) - 1). */
12185 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
12186 c
= TREE_OPERAND (arg1
, 0);
12188 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
12191 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
12192 build_int_cst (TREE_TYPE (arg1
), 1));
12193 if (strict_overflow_p
)
12194 fold_overflow_warning (("assuming signed overflow does not "
12195 "occur when simplifying "
12196 "X % (power of two)"),
12197 WARN_STRICT_OVERFLOW_MISC
);
12198 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12199 fold_convert_loc (loc
, type
, arg0
),
12200 fold_convert_loc (loc
, type
, mask
));
12208 if (integer_all_onesp (arg0
))
12209 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12213 /* Optimize -1 >> x for arithmetic right shifts. */
12214 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
12215 && tree_expr_nonnegative_p (arg1
))
12216 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12217 /* ... fall through ... */
12221 if (integer_zerop (arg1
))
12222 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12223 if (integer_zerop (arg0
))
12224 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12226 /* Since negative shift count is not well-defined,
12227 don't try to compute it in the compiler. */
12228 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
12231 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12232 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
12233 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
12234 && host_integerp (TREE_OPERAND (arg0
, 1), false)
12235 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
12237 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
12238 + TREE_INT_CST_LOW (arg1
));
12240 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12241 being well defined. */
12242 if (low
>= TYPE_PRECISION (type
))
12244 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
12245 low
= low
% TYPE_PRECISION (type
);
12246 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
12247 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 0),
12248 TREE_OPERAND (arg0
, 0));
12250 low
= TYPE_PRECISION (type
) - 1;
12253 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12254 build_int_cst (type
, low
));
12257 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12258 into x & ((unsigned)-1 >> c) for unsigned types. */
12259 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
12260 || (TYPE_UNSIGNED (type
)
12261 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
12262 && host_integerp (arg1
, false)
12263 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
12264 && host_integerp (TREE_OPERAND (arg0
, 1), false)
12265 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
12267 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
12268 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
12274 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12276 lshift
= build_int_cst (type
, -1);
12277 lshift
= int_const_binop (code
, lshift
, arg1
);
12279 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
12283 /* Rewrite an LROTATE_EXPR by a constant into an
12284 RROTATE_EXPR by a new constant. */
12285 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
12287 tree tem
= build_int_cst (TREE_TYPE (arg1
),
12288 TYPE_PRECISION (type
));
12289 tem
= const_binop (MINUS_EXPR
, tem
, arg1
);
12290 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
12293 /* If we have a rotate of a bit operation with the rotate count and
12294 the second operand of the bit operation both constant,
12295 permute the two operations. */
12296 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12297 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12298 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12299 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12300 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12301 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12302 fold_build2_loc (loc
, code
, type
,
12303 TREE_OPERAND (arg0
, 0), arg1
),
12304 fold_build2_loc (loc
, code
, type
,
12305 TREE_OPERAND (arg0
, 1), arg1
));
12307 /* Two consecutive rotates adding up to the precision of the
12308 type can be ignored. */
12309 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12310 && TREE_CODE (arg0
) == RROTATE_EXPR
12311 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12312 && TREE_INT_CST_HIGH (arg1
) == 0
12313 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
12314 && ((TREE_INT_CST_LOW (arg1
)
12315 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
12316 == (unsigned int) TYPE_PRECISION (type
)))
12317 return TREE_OPERAND (arg0
, 0);
12319 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12320 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12321 if the latter can be further optimized. */
12322 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12323 && TREE_CODE (arg0
) == BIT_AND_EXPR
12324 && TREE_CODE (arg1
) == INTEGER_CST
12325 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12327 tree mask
= fold_build2_loc (loc
, code
, type
,
12328 fold_convert_loc (loc
, type
,
12329 TREE_OPERAND (arg0
, 1)),
12331 tree shift
= fold_build2_loc (loc
, code
, type
,
12332 fold_convert_loc (loc
, type
,
12333 TREE_OPERAND (arg0
, 0)),
12335 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12343 if (operand_equal_p (arg0
, arg1
, 0))
12344 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12345 if (INTEGRAL_TYPE_P (type
)
12346 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
12347 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12348 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12354 if (operand_equal_p (arg0
, arg1
, 0))
12355 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12356 if (INTEGRAL_TYPE_P (type
)
12357 && TYPE_MAX_VALUE (type
)
12358 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
12359 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12360 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12365 case TRUTH_ANDIF_EXPR
:
12366 /* Note that the operands of this must be ints
12367 and their values must be 0 or 1.
12368 ("true" is a fixed value perhaps depending on the language.) */
12369 /* If first arg is constant zero, return it. */
12370 if (integer_zerop (arg0
))
12371 return fold_convert_loc (loc
, type
, arg0
);
12372 case TRUTH_AND_EXPR
:
12373 /* If either arg is constant true, drop it. */
12374 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12375 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12376 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12377 /* Preserve sequence points. */
12378 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12379 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12380 /* If second arg is constant zero, result is zero, but first arg
12381 must be evaluated. */
12382 if (integer_zerop (arg1
))
12383 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12384 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12385 case will be handled here. */
12386 if (integer_zerop (arg0
))
12387 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12389 /* !X && X is always false. */
12390 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12391 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12392 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12393 /* X && !X is always false. */
12394 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12395 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12396 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12398 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12399 means A >= Y && A != MAX, but in this case we know that
12402 if (!TREE_SIDE_EFFECTS (arg0
)
12403 && !TREE_SIDE_EFFECTS (arg1
))
12405 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12406 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12407 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12409 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12410 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12411 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12414 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12420 case TRUTH_ORIF_EXPR
:
12421 /* Note that the operands of this must be ints
12422 and their values must be 0 or true.
12423 ("true" is a fixed value perhaps depending on the language.) */
12424 /* If first arg is constant true, return it. */
12425 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12426 return fold_convert_loc (loc
, type
, arg0
);
12427 case TRUTH_OR_EXPR
:
12428 /* If either arg is constant zero, drop it. */
12429 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12430 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12431 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12432 /* Preserve sequence points. */
12433 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12434 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12435 /* If second arg is constant true, result is true, but we must
12436 evaluate first arg. */
12437 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12438 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12439 /* Likewise for first arg, but note this only occurs here for
12441 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12442 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12444 /* !X || X is always true. */
12445 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12446 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12447 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12448 /* X || !X is always true. */
12449 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12450 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12451 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12453 /* (X && !Y) || (!X && Y) is X ^ Y */
12454 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12455 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12457 tree a0
, a1
, l0
, l1
, n0
, n1
;
12459 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12460 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12462 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12463 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12465 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12466 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12468 if ((operand_equal_p (n0
, a0
, 0)
12469 && operand_equal_p (n1
, a1
, 0))
12470 || (operand_equal_p (n0
, a1
, 0)
12471 && operand_equal_p (n1
, a0
, 0)))
12472 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12475 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12481 case TRUTH_XOR_EXPR
:
12482 /* If the second arg is constant zero, drop it. */
12483 if (integer_zerop (arg1
))
12484 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12485 /* If the second arg is constant true, this is a logical inversion. */
12486 if (integer_onep (arg1
))
12488 /* Only call invert_truthvalue if operand is a truth value. */
12489 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
12490 tem
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
12492 tem
= invert_truthvalue_loc (loc
, arg0
);
12493 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12495 /* Identical arguments cancel to zero. */
12496 if (operand_equal_p (arg0
, arg1
, 0))
12497 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12499 /* !X ^ X is always true. */
12500 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12501 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12502 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12504 /* X ^ !X is always true. */
12505 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12506 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12507 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12516 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12517 if (tem
!= NULL_TREE
)
12520 /* bool_var != 0 becomes bool_var. */
12521 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12522 && code
== NE_EXPR
)
12523 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12525 /* bool_var == 1 becomes bool_var. */
12526 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12527 && code
== EQ_EXPR
)
12528 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12530 /* bool_var != 1 becomes !bool_var. */
12531 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12532 && code
== NE_EXPR
)
12533 return fold_convert_loc (loc
, type
,
12534 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12535 TREE_TYPE (arg0
), arg0
));
12537 /* bool_var == 0 becomes !bool_var. */
12538 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12539 && code
== EQ_EXPR
)
12540 return fold_convert_loc (loc
, type
,
12541 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12542 TREE_TYPE (arg0
), arg0
));
12544 /* !exp != 0 becomes !exp */
12545 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12546 && code
== NE_EXPR
)
12547 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12549 /* If this is an equality comparison of the address of two non-weak,
12550 unaliased symbols neither of which are extern (since we do not
12551 have access to attributes for externs), then we know the result. */
12552 if (TREE_CODE (arg0
) == ADDR_EXPR
12553 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12554 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12555 && ! lookup_attribute ("alias",
12556 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12557 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12558 && TREE_CODE (arg1
) == ADDR_EXPR
12559 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12560 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12561 && ! lookup_attribute ("alias",
12562 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12563 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12565 /* We know that we're looking at the address of two
12566 non-weak, unaliased, static _DECL nodes.
12568 It is both wasteful and incorrect to call operand_equal_p
12569 to compare the two ADDR_EXPR nodes. It is wasteful in that
12570 all we need to do is test pointer equality for the arguments
12571 to the two ADDR_EXPR nodes. It is incorrect to use
12572 operand_equal_p as that function is NOT equivalent to a
12573 C equality test. It can in fact return false for two
12574 objects which would test as equal using the C equality
12576 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12577 return constant_boolean_node (equal
12578 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12582 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12583 a MINUS_EXPR of a constant, we can convert it into a comparison with
12584 a revised constant as long as no overflow occurs. */
12585 if (TREE_CODE (arg1
) == INTEGER_CST
12586 && (TREE_CODE (arg0
) == PLUS_EXPR
12587 || TREE_CODE (arg0
) == MINUS_EXPR
)
12588 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12589 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
12590 ? MINUS_EXPR
: PLUS_EXPR
,
12591 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12593 TREE_OPERAND (arg0
, 1)))
12594 && !TREE_OVERFLOW (tem
))
12595 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12597 /* Similarly for a NEGATE_EXPR. */
12598 if (TREE_CODE (arg0
) == NEGATE_EXPR
12599 && TREE_CODE (arg1
) == INTEGER_CST
12600 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
12602 && TREE_CODE (tem
) == INTEGER_CST
12603 && !TREE_OVERFLOW (tem
))
12604 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12606 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12607 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12608 && TREE_CODE (arg1
) == INTEGER_CST
12609 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12610 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12611 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12612 fold_convert_loc (loc
,
12615 TREE_OPERAND (arg0
, 1)));
12617 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12618 if ((TREE_CODE (arg0
) == PLUS_EXPR
12619 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12620 || TREE_CODE (arg0
) == MINUS_EXPR
)
12621 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12624 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12625 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12627 tree val
= TREE_OPERAND (arg0
, 1);
12628 return omit_two_operands_loc (loc
, type
,
12629 fold_build2_loc (loc
, code
, type
,
12631 build_int_cst (TREE_TYPE (val
),
12633 TREE_OPERAND (arg0
, 0), arg1
);
12636 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12637 if (TREE_CODE (arg0
) == MINUS_EXPR
12638 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12639 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12642 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 0)) & 1) == 1)
12644 return omit_two_operands_loc (loc
, type
,
12646 ? boolean_true_node
: boolean_false_node
,
12647 TREE_OPERAND (arg0
, 1), arg1
);
12650 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12651 for !=. Don't do this for ordered comparisons due to overflow. */
12652 if (TREE_CODE (arg0
) == MINUS_EXPR
12653 && integer_zerop (arg1
))
12654 return fold_build2_loc (loc
, code
, type
,
12655 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
12657 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12658 if (TREE_CODE (arg0
) == ABS_EXPR
12659 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12660 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12662 /* If this is an EQ or NE comparison with zero and ARG0 is
12663 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12664 two operations, but the latter can be done in one less insn
12665 on machines that have only two-operand insns or on which a
12666 constant cannot be the first operand. */
12667 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12668 && integer_zerop (arg1
))
12670 tree arg00
= TREE_OPERAND (arg0
, 0);
12671 tree arg01
= TREE_OPERAND (arg0
, 1);
12672 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12673 && integer_onep (TREE_OPERAND (arg00
, 0)))
12675 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12676 arg01
, TREE_OPERAND (arg00
, 1));
12677 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12678 build_int_cst (TREE_TYPE (arg0
), 1));
12679 return fold_build2_loc (loc
, code
, type
,
12680 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12683 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12684 && integer_onep (TREE_OPERAND (arg01
, 0)))
12686 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12687 arg00
, TREE_OPERAND (arg01
, 1));
12688 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12689 build_int_cst (TREE_TYPE (arg0
), 1));
12690 return fold_build2_loc (loc
, code
, type
,
12691 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12696 /* If this is an NE or EQ comparison of zero against the result of a
12697 signed MOD operation whose second operand is a power of 2, make
12698 the MOD operation unsigned since it is simpler and equivalent. */
12699 if (integer_zerop (arg1
)
12700 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12701 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12702 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12703 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12704 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12705 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12707 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12708 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
12709 fold_convert_loc (loc
, newtype
,
12710 TREE_OPERAND (arg0
, 0)),
12711 fold_convert_loc (loc
, newtype
,
12712 TREE_OPERAND (arg0
, 1)));
12714 return fold_build2_loc (loc
, code
, type
, newmod
,
12715 fold_convert_loc (loc
, newtype
, arg1
));
12718 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12719 C1 is a valid shift constant, and C2 is a power of two, i.e.
12721 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12722 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12723 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12725 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12726 && integer_zerop (arg1
))
12728 tree itype
= TREE_TYPE (arg0
);
12729 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
12730 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12732 /* Check for a valid shift count. */
12733 if (TREE_INT_CST_HIGH (arg001
) == 0
12734 && TREE_INT_CST_LOW (arg001
) < prec
)
12736 tree arg01
= TREE_OPERAND (arg0
, 1);
12737 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12738 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12739 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12740 can be rewritten as (X & (C2 << C1)) != 0. */
12741 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12743 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
12744 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
12745 return fold_build2_loc (loc
, code
, type
, tem
,
12746 fold_convert_loc (loc
, itype
, arg1
));
12748 /* Otherwise, for signed (arithmetic) shifts,
12749 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12750 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12751 else if (!TYPE_UNSIGNED (itype
))
12752 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12753 arg000
, build_int_cst (itype
, 0));
12754 /* Otherwise, of unsigned (logical) shifts,
12755 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12756 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12758 return omit_one_operand_loc (loc
, type
,
12759 code
== EQ_EXPR
? integer_one_node
12760 : integer_zero_node
,
12765 /* If we have (A & C) == C where C is a power of 2, convert this into
12766 (A & C) != 0. Similarly for NE_EXPR. */
12767 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12768 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12769 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12770 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12771 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
12772 integer_zero_node
));
12774 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12775 bit, then fold the expression into A < 0 or A >= 0. */
12776 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
12780 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12781 Similarly for NE_EXPR. */
12782 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12783 && TREE_CODE (arg1
) == INTEGER_CST
12784 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12786 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
12787 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12788 TREE_OPERAND (arg0
, 1));
12790 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12791 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
12793 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12794 if (integer_nonzerop (dandnotc
))
12795 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12798 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12799 Similarly for NE_EXPR. */
12800 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12801 && TREE_CODE (arg1
) == INTEGER_CST
12802 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12804 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12806 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12807 TREE_OPERAND (arg0
, 1),
12808 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
12809 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12810 if (integer_nonzerop (candnotd
))
12811 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12814 /* If this is a comparison of a field, we may be able to simplify it. */
12815 if ((TREE_CODE (arg0
) == COMPONENT_REF
12816 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12817 /* Handle the constant case even without -O
12818 to make sure the warnings are given. */
12819 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12821 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12826 /* Optimize comparisons of strlen vs zero to a compare of the
12827 first character of the string vs zero. To wit,
12828 strlen(ptr) == 0 => *ptr == 0
12829 strlen(ptr) != 0 => *ptr != 0
12830 Other cases should reduce to one of these two (or a constant)
12831 due to the return value of strlen being unsigned. */
12832 if (TREE_CODE (arg0
) == CALL_EXPR
12833 && integer_zerop (arg1
))
12835 tree fndecl
= get_callee_fndecl (arg0
);
12838 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
12839 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
12840 && call_expr_nargs (arg0
) == 1
12841 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
12843 tree iref
= build_fold_indirect_ref_loc (loc
,
12844 CALL_EXPR_ARG (arg0
, 0));
12845 return fold_build2_loc (loc
, code
, type
, iref
,
12846 build_int_cst (TREE_TYPE (iref
), 0));
12850 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12851 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12852 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12853 && integer_zerop (arg1
)
12854 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12856 tree arg00
= TREE_OPERAND (arg0
, 0);
12857 tree arg01
= TREE_OPERAND (arg0
, 1);
12858 tree itype
= TREE_TYPE (arg00
);
12859 if (TREE_INT_CST_HIGH (arg01
) == 0
12860 && TREE_INT_CST_LOW (arg01
)
12861 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
12863 if (TYPE_UNSIGNED (itype
))
12865 itype
= signed_type_for (itype
);
12866 arg00
= fold_convert_loc (loc
, itype
, arg00
);
12868 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12869 type
, arg00
, build_int_cst (itype
, 0));
12873 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12874 if (integer_zerop (arg1
)
12875 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12876 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12877 TREE_OPERAND (arg0
, 1));
12879 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12880 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12881 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12882 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12883 build_int_cst (TREE_TYPE (arg0
), 0));
12884 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12885 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12886 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12887 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
12888 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
12889 build_int_cst (TREE_TYPE (arg0
), 0));
12891 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12892 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12893 && TREE_CODE (arg1
) == INTEGER_CST
12894 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12895 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12896 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
12897 TREE_OPERAND (arg0
, 1), arg1
));
12899 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12900 (X & C) == 0 when C is a single bit. */
12901 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12902 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
12903 && integer_zerop (arg1
)
12904 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12906 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12907 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
12908 TREE_OPERAND (arg0
, 1));
12909 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
12911 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12915 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12916 constant C is a power of two, i.e. a single bit. */
12917 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12918 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12919 && integer_zerop (arg1
)
12920 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12921 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12922 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12924 tree arg00
= TREE_OPERAND (arg0
, 0);
12925 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12926 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
12929 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12930 when is C is a power of two, i.e. a single bit. */
12931 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12932 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
12933 && integer_zerop (arg1
)
12934 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12935 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12936 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12938 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12939 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
12940 arg000
, TREE_OPERAND (arg0
, 1));
12941 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12942 tem
, build_int_cst (TREE_TYPE (tem
), 0));
12945 if (integer_zerop (arg1
)
12946 && tree_expr_nonzero_p (arg0
))
12948 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
12949 return omit_one_operand_loc (loc
, type
, res
, arg0
);
12952 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12953 if (TREE_CODE (arg0
) == NEGATE_EXPR
12954 && TREE_CODE (arg1
) == NEGATE_EXPR
)
12955 return fold_build2_loc (loc
, code
, type
,
12956 TREE_OPERAND (arg0
, 0),
12957 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12958 TREE_OPERAND (arg1
, 0)));
12960 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12961 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12962 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
12964 tree arg00
= TREE_OPERAND (arg0
, 0);
12965 tree arg01
= TREE_OPERAND (arg0
, 1);
12966 tree arg10
= TREE_OPERAND (arg1
, 0);
12967 tree arg11
= TREE_OPERAND (arg1
, 1);
12968 tree itype
= TREE_TYPE (arg0
);
12970 if (operand_equal_p (arg01
, arg11
, 0))
12971 return fold_build2_loc (loc
, code
, type
,
12972 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12973 fold_build2_loc (loc
,
12974 BIT_XOR_EXPR
, itype
,
12977 build_int_cst (itype
, 0));
12979 if (operand_equal_p (arg01
, arg10
, 0))
12980 return fold_build2_loc (loc
, code
, type
,
12981 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12982 fold_build2_loc (loc
,
12983 BIT_XOR_EXPR
, itype
,
12986 build_int_cst (itype
, 0));
12988 if (operand_equal_p (arg00
, arg11
, 0))
12989 return fold_build2_loc (loc
, code
, type
,
12990 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12991 fold_build2_loc (loc
,
12992 BIT_XOR_EXPR
, itype
,
12995 build_int_cst (itype
, 0));
12997 if (operand_equal_p (arg00
, arg10
, 0))
12998 return fold_build2_loc (loc
, code
, type
,
12999 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13000 fold_build2_loc (loc
,
13001 BIT_XOR_EXPR
, itype
,
13004 build_int_cst (itype
, 0));
13007 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13008 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
13010 tree arg00
= TREE_OPERAND (arg0
, 0);
13011 tree arg01
= TREE_OPERAND (arg0
, 1);
13012 tree arg10
= TREE_OPERAND (arg1
, 0);
13013 tree arg11
= TREE_OPERAND (arg1
, 1);
13014 tree itype
= TREE_TYPE (arg0
);
13016 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13017 operand_equal_p guarantees no side-effects so we don't need
13018 to use omit_one_operand on Z. */
13019 if (operand_equal_p (arg01
, arg11
, 0))
13020 return fold_build2_loc (loc
, code
, type
, arg00
,
13021 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13023 if (operand_equal_p (arg01
, arg10
, 0))
13024 return fold_build2_loc (loc
, code
, type
, arg00
,
13025 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13027 if (operand_equal_p (arg00
, arg11
, 0))
13028 return fold_build2_loc (loc
, code
, type
, arg01
,
13029 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13031 if (operand_equal_p (arg00
, arg10
, 0))
13032 return fold_build2_loc (loc
, code
, type
, arg01
,
13033 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13036 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13037 if (TREE_CODE (arg01
) == INTEGER_CST
13038 && TREE_CODE (arg11
) == INTEGER_CST
)
13040 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
13041 fold_convert_loc (loc
, itype
, arg11
));
13042 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
13043 return fold_build2_loc (loc
, code
, type
, tem
,
13044 fold_convert_loc (loc
, itype
, arg10
));
13048 /* Attempt to simplify equality/inequality comparisons of complex
13049 values. Only lower the comparison if the result is known or
13050 can be simplified to a single scalar comparison. */
13051 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
13052 || TREE_CODE (arg0
) == COMPLEX_CST
)
13053 && (TREE_CODE (arg1
) == COMPLEX_EXPR
13054 || TREE_CODE (arg1
) == COMPLEX_CST
))
13056 tree real0
, imag0
, real1
, imag1
;
13059 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
13061 real0
= TREE_OPERAND (arg0
, 0);
13062 imag0
= TREE_OPERAND (arg0
, 1);
13066 real0
= TREE_REALPART (arg0
);
13067 imag0
= TREE_IMAGPART (arg0
);
13070 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
13072 real1
= TREE_OPERAND (arg1
, 0);
13073 imag1
= TREE_OPERAND (arg1
, 1);
13077 real1
= TREE_REALPART (arg1
);
13078 imag1
= TREE_IMAGPART (arg1
);
13081 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
13082 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
13084 if (integer_zerop (rcond
))
13086 if (code
== EQ_EXPR
)
13087 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13089 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
13093 if (code
== NE_EXPR
)
13094 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13096 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
13100 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
13101 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
13103 if (integer_zerop (icond
))
13105 if (code
== EQ_EXPR
)
13106 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13108 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
13112 if (code
== NE_EXPR
)
13113 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13115 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
13126 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
13127 if (tem
!= NULL_TREE
)
13130 /* Transform comparisons of the form X +- C CMP X. */
13131 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
13132 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13133 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
13134 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
13135 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
13136 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
13138 tree arg01
= TREE_OPERAND (arg0
, 1);
13139 enum tree_code code0
= TREE_CODE (arg0
);
13142 if (TREE_CODE (arg01
) == REAL_CST
)
13143 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
13145 is_positive
= tree_int_cst_sgn (arg01
);
13147 /* (X - c) > X becomes false. */
13148 if (code
== GT_EXPR
13149 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13150 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13152 if (TREE_CODE (arg01
) == INTEGER_CST
13153 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13154 fold_overflow_warning (("assuming signed overflow does not "
13155 "occur when assuming that (X - c) > X "
13156 "is always false"),
13157 WARN_STRICT_OVERFLOW_ALL
);
13158 return constant_boolean_node (0, type
);
13161 /* Likewise (X + c) < X becomes false. */
13162 if (code
== LT_EXPR
13163 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13164 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13166 if (TREE_CODE (arg01
) == INTEGER_CST
13167 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13168 fold_overflow_warning (("assuming signed overflow does not "
13169 "occur when assuming that "
13170 "(X + c) < X is always false"),
13171 WARN_STRICT_OVERFLOW_ALL
);
13172 return constant_boolean_node (0, type
);
13175 /* Convert (X - c) <= X to true. */
13176 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13178 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13179 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13181 if (TREE_CODE (arg01
) == INTEGER_CST
13182 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13183 fold_overflow_warning (("assuming signed overflow does not "
13184 "occur when assuming that "
13185 "(X - c) <= X is always true"),
13186 WARN_STRICT_OVERFLOW_ALL
);
13187 return constant_boolean_node (1, type
);
13190 /* Convert (X + c) >= X to true. */
13191 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13193 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13194 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13196 if (TREE_CODE (arg01
) == INTEGER_CST
13197 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13198 fold_overflow_warning (("assuming signed overflow does not "
13199 "occur when assuming that "
13200 "(X + c) >= X is always true"),
13201 WARN_STRICT_OVERFLOW_ALL
);
13202 return constant_boolean_node (1, type
);
13205 if (TREE_CODE (arg01
) == INTEGER_CST
)
13207 /* Convert X + c > X and X - c < X to true for integers. */
13208 if (code
== GT_EXPR
13209 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13210 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13212 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13213 fold_overflow_warning (("assuming signed overflow does "
13214 "not occur when assuming that "
13215 "(X + c) > X is always true"),
13216 WARN_STRICT_OVERFLOW_ALL
);
13217 return constant_boolean_node (1, type
);
13220 if (code
== LT_EXPR
13221 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13222 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13224 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13225 fold_overflow_warning (("assuming signed overflow does "
13226 "not occur when assuming that "
13227 "(X - c) < X is always true"),
13228 WARN_STRICT_OVERFLOW_ALL
);
13229 return constant_boolean_node (1, type
);
13232 /* Convert X + c <= X and X - c >= X to false for integers. */
13233 if (code
== LE_EXPR
13234 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13235 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13237 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13238 fold_overflow_warning (("assuming signed overflow does "
13239 "not occur when assuming that "
13240 "(X + c) <= X is always false"),
13241 WARN_STRICT_OVERFLOW_ALL
);
13242 return constant_boolean_node (0, type
);
13245 if (code
== GE_EXPR
13246 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13247 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13249 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13250 fold_overflow_warning (("assuming signed overflow does "
13251 "not occur when assuming that "
13252 "(X - c) >= X is always false"),
13253 WARN_STRICT_OVERFLOW_ALL
);
13254 return constant_boolean_node (0, type
);
13259 /* Comparisons with the highest or lowest possible integer of
13260 the specified precision will have known values. */
13262 tree arg1_type
= TREE_TYPE (arg1
);
13263 unsigned int width
= TYPE_PRECISION (arg1_type
);
13265 if (TREE_CODE (arg1
) == INTEGER_CST
13266 && width
<= 2 * HOST_BITS_PER_WIDE_INT
13267 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
13269 HOST_WIDE_INT signed_max_hi
;
13270 unsigned HOST_WIDE_INT signed_max_lo
;
13271 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
13273 if (width
<= HOST_BITS_PER_WIDE_INT
)
13275 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13280 if (TYPE_UNSIGNED (arg1_type
))
13282 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13288 max_lo
= signed_max_lo
;
13289 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13295 width
-= HOST_BITS_PER_WIDE_INT
;
13296 signed_max_lo
= -1;
13297 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13302 if (TYPE_UNSIGNED (arg1_type
))
13304 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13309 max_hi
= signed_max_hi
;
13310 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13314 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
13315 && TREE_INT_CST_LOW (arg1
) == max_lo
)
13319 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13322 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13325 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13328 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13330 /* The GE_EXPR and LT_EXPR cases above are not normally
13331 reached because of previous transformations. */
13336 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13338 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
13342 arg1
= const_binop (PLUS_EXPR
, arg1
,
13343 build_int_cst (TREE_TYPE (arg1
), 1));
13344 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13345 fold_convert_loc (loc
,
13346 TREE_TYPE (arg1
), arg0
),
13349 arg1
= const_binop (PLUS_EXPR
, arg1
,
13350 build_int_cst (TREE_TYPE (arg1
), 1));
13351 return fold_build2_loc (loc
, NE_EXPR
, type
,
13352 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13358 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13360 && TREE_INT_CST_LOW (arg1
) == min_lo
)
13364 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13367 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13370 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13373 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13378 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13380 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
13384 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13385 return fold_build2_loc (loc
, NE_EXPR
, type
,
13386 fold_convert_loc (loc
,
13387 TREE_TYPE (arg1
), arg0
),
13390 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13391 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13392 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13399 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
13400 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
13401 && TYPE_UNSIGNED (arg1_type
)
13402 /* We will flip the signedness of the comparison operator
13403 associated with the mode of arg1, so the sign bit is
13404 specified by this mode. Check that arg1 is the signed
13405 max associated with this sign bit. */
13406 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
13407 /* signed_type does not work on pointer types. */
13408 && INTEGRAL_TYPE_P (arg1_type
))
13410 /* The following case also applies to X < signed_max+1
13411 and X >= signed_max+1 because previous transformations. */
13412 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13415 st
= signed_type_for (TREE_TYPE (arg1
));
13416 return fold_build2_loc (loc
,
13417 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13418 type
, fold_convert_loc (loc
, st
, arg0
),
13419 build_int_cst (st
, 0));
13425 /* If we are comparing an ABS_EXPR with a constant, we can
13426 convert all the cases into explicit comparisons, but they may
13427 well not be faster than doing the ABS and one comparison.
13428 But ABS (X) <= C is a range comparison, which becomes a subtraction
13429 and a comparison, and is probably faster. */
13430 if (code
== LE_EXPR
13431 && TREE_CODE (arg1
) == INTEGER_CST
13432 && TREE_CODE (arg0
) == ABS_EXPR
13433 && ! TREE_SIDE_EFFECTS (arg0
)
13434 && (0 != (tem
= negate_expr (arg1
)))
13435 && TREE_CODE (tem
) == INTEGER_CST
13436 && !TREE_OVERFLOW (tem
))
13437 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13438 build2 (GE_EXPR
, type
,
13439 TREE_OPERAND (arg0
, 0), tem
),
13440 build2 (LE_EXPR
, type
,
13441 TREE_OPERAND (arg0
, 0), arg1
));
13443 /* Convert ABS_EXPR<x> >= 0 to true. */
13444 strict_overflow_p
= false;
13445 if (code
== GE_EXPR
13446 && (integer_zerop (arg1
)
13447 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
13448 && real_zerop (arg1
)))
13449 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13451 if (strict_overflow_p
)
13452 fold_overflow_warning (("assuming signed overflow does not occur "
13453 "when simplifying comparison of "
13454 "absolute value and zero"),
13455 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13456 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13459 /* Convert ABS_EXPR<x> < 0 to false. */
13460 strict_overflow_p
= false;
13461 if (code
== LT_EXPR
13462 && (integer_zerop (arg1
) || real_zerop (arg1
))
13463 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13465 if (strict_overflow_p
)
13466 fold_overflow_warning (("assuming signed overflow does not occur "
13467 "when simplifying comparison of "
13468 "absolute value and zero"),
13469 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13470 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13473 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13474 and similarly for >= into !=. */
13475 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13476 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13477 && TREE_CODE (arg1
) == LSHIFT_EXPR
13478 && integer_onep (TREE_OPERAND (arg1
, 0)))
13479 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13480 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13481 TREE_OPERAND (arg1
, 1)),
13482 build_int_cst (TREE_TYPE (arg0
), 0));
13484 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13485 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13486 && CONVERT_EXPR_P (arg1
)
13487 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13488 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13490 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13491 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13492 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13493 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13494 build_int_cst (TREE_TYPE (arg0
), 0));
13499 case UNORDERED_EXPR
:
13507 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13509 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13510 if (t1
!= NULL_TREE
)
13514 /* If the first operand is NaN, the result is constant. */
13515 if (TREE_CODE (arg0
) == REAL_CST
13516 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13517 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13519 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13520 ? integer_zero_node
13521 : integer_one_node
;
13522 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13525 /* If the second operand is NaN, the result is constant. */
13526 if (TREE_CODE (arg1
) == REAL_CST
13527 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13528 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13530 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13531 ? integer_zero_node
13532 : integer_one_node
;
13533 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13536 /* Simplify unordered comparison of something with itself. */
13537 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13538 && operand_equal_p (arg0
, arg1
, 0))
13539 return constant_boolean_node (1, type
);
13541 if (code
== LTGT_EXPR
13542 && !flag_trapping_math
13543 && operand_equal_p (arg0
, arg1
, 0))
13544 return constant_boolean_node (0, type
);
13546 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13548 tree targ0
= strip_float_extensions (arg0
);
13549 tree targ1
= strip_float_extensions (arg1
);
13550 tree newtype
= TREE_TYPE (targ0
);
13552 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13553 newtype
= TREE_TYPE (targ1
);
13555 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13556 return fold_build2_loc (loc
, code
, type
,
13557 fold_convert_loc (loc
, newtype
, targ0
),
13558 fold_convert_loc (loc
, newtype
, targ1
));
13563 case COMPOUND_EXPR
:
13564 /* When pedantic, a compound expression can be neither an lvalue
13565 nor an integer constant expression. */
13566 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13568 /* Don't let (0, 0) be null pointer constant. */
13569 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13570 : fold_convert_loc (loc
, type
, arg1
);
13571 return pedantic_non_lvalue_loc (loc
, tem
);
13574 if ((TREE_CODE (arg0
) == REAL_CST
13575 && TREE_CODE (arg1
) == REAL_CST
)
13576 || (TREE_CODE (arg0
) == INTEGER_CST
13577 && TREE_CODE (arg1
) == INTEGER_CST
))
13578 return build_complex (type
, arg0
, arg1
);
13579 if (TREE_CODE (arg0
) == REALPART_EXPR
13580 && TREE_CODE (arg1
) == IMAGPART_EXPR
13581 && TREE_TYPE (TREE_OPERAND (arg0
, 0)) == type
13582 && operand_equal_p (TREE_OPERAND (arg0
, 0),
13583 TREE_OPERAND (arg1
, 0), 0))
13584 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
13585 TREE_OPERAND (arg1
, 0));
13589 /* An ASSERT_EXPR should never be passed to fold_binary. */
13590 gcc_unreachable ();
13592 case VEC_PACK_TRUNC_EXPR
:
13593 case VEC_PACK_FIX_TRUNC_EXPR
:
13595 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13598 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
/ 2
13599 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2);
13600 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13603 elts
= XALLOCAVEC (tree
, nelts
);
13604 if (!vec_cst_ctor_to_array (arg0
, elts
)
13605 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
/ 2))
13608 for (i
= 0; i
< nelts
; i
++)
13610 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
13611 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
13612 TREE_TYPE (type
), elts
[i
]);
13613 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
13617 return build_vector (type
, elts
);
13620 case VEC_WIDEN_MULT_LO_EXPR
:
13621 case VEC_WIDEN_MULT_HI_EXPR
:
13623 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13626 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2
13627 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2);
13628 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13631 elts
= XALLOCAVEC (tree
, nelts
* 4);
13632 if (!vec_cst_ctor_to_array (arg0
, elts
)
13633 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
* 2))
13636 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_WIDEN_MULT_LO_EXPR
))
13639 for (i
= 0; i
< nelts
; i
++)
13641 elts
[i
] = fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[i
]);
13642 elts
[i
+ nelts
* 2]
13643 = fold_convert_const (NOP_EXPR
, TREE_TYPE (type
),
13644 elts
[i
+ nelts
* 2]);
13645 if (elts
[i
] == NULL_TREE
|| elts
[i
+ nelts
* 2] == NULL_TREE
)
13647 elts
[i
] = const_binop (MULT_EXPR
, elts
[i
], elts
[i
+ nelts
* 2]);
13648 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
13652 return build_vector (type
, elts
);
13657 } /* switch (code) */
13660 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13661 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13665 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13667 switch (TREE_CODE (*tp
))
13673 *walk_subtrees
= 0;
13675 /* ... fall through ... */
13682 /* Return whether the sub-tree ST contains a label which is accessible from
13683 outside the sub-tree. */
13686 contains_label_p (tree st
)
13689 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13692 /* Fold a ternary expression of code CODE and type TYPE with operands
13693 OP0, OP1, and OP2. Return the folded expression if folding is
13694 successful. Otherwise, return NULL_TREE. */
13697 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13698 tree op0
, tree op1
, tree op2
)
13701 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
13702 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13704 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13705 && TREE_CODE_LENGTH (code
) == 3);
13707 /* Strip any conversions that don't change the mode. This is safe
13708 for every expression, except for a comparison expression because
13709 its signedness is derived from its operands. So, in the latter
13710 case, only strip conversions that don't change the signedness.
13712 Note that this is done as an internal manipulation within the
13713 constant folder, in order to find the simplest representation of
13714 the arguments so that their form can be studied. In any cases,
13715 the appropriate type conversions should be put back in the tree
13716 that will get out of the constant folder. */
13737 case COMPONENT_REF
:
13738 if (TREE_CODE (arg0
) == CONSTRUCTOR
13739 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13741 unsigned HOST_WIDE_INT idx
;
13743 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13750 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13751 so all simple results must be passed through pedantic_non_lvalue. */
13752 if (TREE_CODE (arg0
) == INTEGER_CST
)
13754 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13755 tem
= integer_zerop (arg0
) ? op2
: op1
;
13756 /* Only optimize constant conditions when the selected branch
13757 has the same type as the COND_EXPR. This avoids optimizing
13758 away "c ? x : throw", where the throw has a void type.
13759 Avoid throwing away that operand which contains label. */
13760 if ((!TREE_SIDE_EFFECTS (unused_op
)
13761 || !contains_label_p (unused_op
))
13762 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13763 || VOID_TYPE_P (type
)))
13764 return pedantic_non_lvalue_loc (loc
, tem
);
13767 if (operand_equal_p (arg1
, op2
, 0))
13768 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
13770 /* If we have A op B ? A : C, we may be able to convert this to a
13771 simpler expression, depending on the operation and the values
13772 of B and C. Signed zeros prevent all of these transformations,
13773 for reasons given above each one.
13775 Also try swapping the arguments and inverting the conditional. */
13776 if (COMPARISON_CLASS_P (arg0
)
13777 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13778 arg1
, TREE_OPERAND (arg0
, 1))
13779 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
13781 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
13786 if (COMPARISON_CLASS_P (arg0
)
13787 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13789 TREE_OPERAND (arg0
, 1))
13790 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
13792 location_t loc0
= expr_location_or (arg0
, loc
);
13793 tem
= fold_truth_not_expr (loc0
, arg0
);
13794 if (tem
&& COMPARISON_CLASS_P (tem
))
13796 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
13802 /* If the second operand is simpler than the third, swap them
13803 since that produces better jump optimization results. */
13804 if (truth_value_p (TREE_CODE (arg0
))
13805 && tree_swap_operands_p (op1
, op2
, false))
13807 location_t loc0
= expr_location_or (arg0
, loc
);
13808 /* See if this can be inverted. If it can't, possibly because
13809 it was a floating-point inequality comparison, don't do
13811 tem
= fold_truth_not_expr (loc0
, arg0
);
13813 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
13816 /* Convert A ? 1 : 0 to simply A. */
13817 if (integer_onep (op1
)
13818 && integer_zerop (op2
)
13819 /* If we try to convert OP0 to our type, the
13820 call to fold will try to move the conversion inside
13821 a COND, which will recurse. In that case, the COND_EXPR
13822 is probably the best choice, so leave it alone. */
13823 && type
== TREE_TYPE (arg0
))
13824 return pedantic_non_lvalue_loc (loc
, arg0
);
13826 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13827 over COND_EXPR in cases such as floating point comparisons. */
13828 if (integer_zerop (op1
)
13829 && integer_onep (op2
)
13830 && truth_value_p (TREE_CODE (arg0
)))
13831 return pedantic_non_lvalue_loc (loc
,
13832 fold_convert_loc (loc
, type
,
13833 invert_truthvalue_loc (loc
,
13836 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13837 if (TREE_CODE (arg0
) == LT_EXPR
13838 && integer_zerop (TREE_OPERAND (arg0
, 1))
13839 && integer_zerop (op2
)
13840 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
13842 /* sign_bit_p only checks ARG1 bits within A's precision.
13843 If <sign bit of A> has wider type than A, bits outside
13844 of A's precision in <sign bit of A> need to be checked.
13845 If they are all 0, this optimization needs to be done
13846 in unsigned A's type, if they are all 1 in signed A's type,
13847 otherwise this can't be done. */
13848 if (TYPE_PRECISION (TREE_TYPE (tem
))
13849 < TYPE_PRECISION (TREE_TYPE (arg1
))
13850 && TYPE_PRECISION (TREE_TYPE (tem
))
13851 < TYPE_PRECISION (type
))
13853 unsigned HOST_WIDE_INT mask_lo
;
13854 HOST_WIDE_INT mask_hi
;
13855 int inner_width
, outer_width
;
13858 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
13859 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
13860 if (outer_width
> TYPE_PRECISION (type
))
13861 outer_width
= TYPE_PRECISION (type
);
13863 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
13865 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
13866 >> (2 * HOST_BITS_PER_WIDE_INT
- outer_width
));
13872 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
13873 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
13875 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
13877 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
13878 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13882 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
13883 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13885 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
13886 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
13888 tem_type
= signed_type_for (TREE_TYPE (tem
));
13889 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13891 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
13892 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
13894 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
13895 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13903 fold_convert_loc (loc
, type
,
13904 fold_build2_loc (loc
, BIT_AND_EXPR
,
13905 TREE_TYPE (tem
), tem
,
13906 fold_convert_loc (loc
,
13911 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13912 already handled above. */
13913 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13914 && integer_onep (TREE_OPERAND (arg0
, 1))
13915 && integer_zerop (op2
)
13916 && integer_pow2p (arg1
))
13918 tree tem
= TREE_OPERAND (arg0
, 0);
13920 if (TREE_CODE (tem
) == RSHIFT_EXPR
13921 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
13922 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
13923 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
13924 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
13925 TREE_OPERAND (tem
, 0), arg1
);
13928 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13929 is probably obsolete because the first operand should be a
13930 truth value (that's why we have the two cases above), but let's
13931 leave it in until we can confirm this for all front-ends. */
13932 if (integer_zerop (op2
)
13933 && TREE_CODE (arg0
) == NE_EXPR
13934 && integer_zerop (TREE_OPERAND (arg0
, 1))
13935 && integer_pow2p (arg1
)
13936 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13937 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13938 arg1
, OEP_ONLY_CONST
))
13939 return pedantic_non_lvalue_loc (loc
,
13940 fold_convert_loc (loc
, type
,
13941 TREE_OPERAND (arg0
, 0)));
13943 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13944 if (integer_zerop (op2
)
13945 && truth_value_p (TREE_CODE (arg0
))
13946 && truth_value_p (TREE_CODE (arg1
)))
13947 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13948 fold_convert_loc (loc
, type
, arg0
),
13951 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13952 if (integer_onep (op2
)
13953 && truth_value_p (TREE_CODE (arg0
))
13954 && truth_value_p (TREE_CODE (arg1
)))
13956 location_t loc0
= expr_location_or (arg0
, loc
);
13957 /* Only perform transformation if ARG0 is easily inverted. */
13958 tem
= fold_truth_not_expr (loc0
, arg0
);
13960 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
13961 fold_convert_loc (loc
, type
, tem
),
13965 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13966 if (integer_zerop (arg1
)
13967 && truth_value_p (TREE_CODE (arg0
))
13968 && truth_value_p (TREE_CODE (op2
)))
13970 location_t loc0
= expr_location_or (arg0
, loc
);
13971 /* Only perform transformation if ARG0 is easily inverted. */
13972 tem
= fold_truth_not_expr (loc0
, arg0
);
13974 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13975 fold_convert_loc (loc
, type
, tem
),
13979 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13980 if (integer_onep (arg1
)
13981 && truth_value_p (TREE_CODE (arg0
))
13982 && truth_value_p (TREE_CODE (op2
)))
13983 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
13984 fold_convert_loc (loc
, type
, arg0
),
13990 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13991 of fold_ternary on them. */
13992 gcc_unreachable ();
13994 case BIT_FIELD_REF
:
13995 if ((TREE_CODE (arg0
) == VECTOR_CST
13996 || TREE_CODE (arg0
) == CONSTRUCTOR
)
13997 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
13998 || (TREE_CODE (type
) == VECTOR_TYPE
13999 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
14001 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
14002 unsigned HOST_WIDE_INT width
= tree_low_cst (TYPE_SIZE (eltype
), 1);
14003 unsigned HOST_WIDE_INT n
= tree_low_cst (arg1
, 1);
14004 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
14007 && (idx
% width
) == 0
14008 && (n
% width
) == 0
14009 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
14013 if (TREE_CODE (type
) == VECTOR_TYPE
)
14015 if (TREE_CODE (arg0
) == VECTOR_CST
)
14017 tree
*vals
= XALLOCAVEC (tree
, n
);
14019 for (i
= 0; i
< n
; ++i
)
14020 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
14021 return build_vector (type
, vals
);
14025 VEC(constructor_elt
, gc
) *vals
;
14027 if (CONSTRUCTOR_NELTS (arg0
) == 0)
14028 return build_constructor (type
, NULL
);
14029 vals
= VEC_alloc (constructor_elt
, gc
, n
);
14030 for (i
= 0; i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
14032 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
14034 (arg0
, idx
+ i
)->value
);
14035 return build_constructor (type
, vals
);
14040 if (TREE_CODE (arg0
) == VECTOR_CST
)
14041 return VECTOR_CST_ELT (arg0
, idx
);
14042 else if (idx
< CONSTRUCTOR_NELTS (arg0
))
14043 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
14044 return build_zero_cst (type
);
14049 /* A bit-field-ref that referenced the full argument can be stripped. */
14050 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
14051 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_low_cst (arg1
, 1)
14052 && integer_zerop (op2
))
14053 return fold_convert_loc (loc
, type
, arg0
);
14055 /* On constants we can use native encode/interpret to constant
14056 fold (nearly) all BIT_FIELD_REFs. */
14057 if (CONSTANT_CLASS_P (arg0
)
14058 && can_native_interpret_type_p (type
)
14059 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)), 1)
14060 /* This limitation should not be necessary, we just need to
14061 round this up to mode size. */
14062 && tree_low_cst (op1
, 1) % BITS_PER_UNIT
== 0
14063 /* Need bit-shifting of the buffer to relax the following. */
14064 && tree_low_cst (op2
, 1) % BITS_PER_UNIT
== 0)
14066 unsigned HOST_WIDE_INT bitpos
= tree_low_cst (op2
, 1);
14067 unsigned HOST_WIDE_INT bitsize
= tree_low_cst (op1
, 1);
14068 unsigned HOST_WIDE_INT clen
;
14069 clen
= tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)), 1);
14070 /* ??? We cannot tell native_encode_expr to start at
14071 some random byte only. So limit us to a reasonable amount
14075 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
14076 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
14078 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
14080 tree v
= native_interpret_expr (type
,
14081 b
+ bitpos
/ BITS_PER_UNIT
,
14082 bitsize
/ BITS_PER_UNIT
);
14092 /* For integers we can decompose the FMA if possible. */
14093 if (TREE_CODE (arg0
) == INTEGER_CST
14094 && TREE_CODE (arg1
) == INTEGER_CST
)
14095 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
14096 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
14097 if (integer_zerop (arg2
))
14098 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
14100 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
14102 case VEC_PERM_EXPR
:
14103 if (TREE_CODE (arg2
) == VECTOR_CST
)
14105 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
14106 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
14108 bool need_mask_canon
= false;
14110 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
14111 for (i
= 0; i
< nelts
; i
++)
14113 tree val
= VECTOR_CST_ELT (arg2
, i
);
14114 if (TREE_CODE (val
) != INTEGER_CST
)
14117 sel
[i
] = TREE_INT_CST_LOW (val
) & (2 * nelts
- 1);
14118 if (TREE_INT_CST_HIGH (val
)
14119 || ((unsigned HOST_WIDE_INT
)
14120 TREE_INT_CST_LOW (val
) != sel
[i
]))
14121 need_mask_canon
= true;
14124 if ((TREE_CODE (arg0
) == VECTOR_CST
14125 || TREE_CODE (arg0
) == CONSTRUCTOR
)
14126 && (TREE_CODE (arg1
) == VECTOR_CST
14127 || TREE_CODE (arg1
) == CONSTRUCTOR
))
14129 t
= fold_vec_perm (type
, arg0
, arg1
, sel
);
14130 if (t
!= NULL_TREE
)
14134 if (need_mask_canon
&& arg2
== op2
)
14136 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
14137 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
14138 for (i
= 0; i
< nelts
; i
++)
14139 tsel
[i
] = build_int_cst (eltype
, sel
[nelts
- i
- 1]);
14140 t
= build_vector (TREE_TYPE (arg2
), tsel
);
14141 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, t
);
14148 } /* switch (code) */
14151 /* Perform constant folding and related simplification of EXPR.
14152 The related simplifications include x*1 => x, x*0 => 0, etc.,
14153 and application of the associative law.
14154 NOP_EXPR conversions may be removed freely (as long as we
14155 are careful not to change the type of the overall expression).
14156 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14157 but we can constant-fold them if they have constant operands. */
14159 #ifdef ENABLE_FOLD_CHECKING
14160 # define fold(x) fold_1 (x)
14161 static tree
fold_1 (tree
);
14167 const tree t
= expr
;
14168 enum tree_code code
= TREE_CODE (t
);
14169 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14171 location_t loc
= EXPR_LOCATION (expr
);
14173 /* Return right away if a constant. */
14174 if (kind
== tcc_constant
)
14177 /* CALL_EXPR-like objects with variable numbers of operands are
14178 treated specially. */
14179 if (kind
== tcc_vl_exp
)
14181 if (code
== CALL_EXPR
)
14183 tem
= fold_call_expr (loc
, expr
, false);
14184 return tem
? tem
: expr
;
14189 if (IS_EXPR_CODE_CLASS (kind
))
14191 tree type
= TREE_TYPE (t
);
14192 tree op0
, op1
, op2
;
14194 switch (TREE_CODE_LENGTH (code
))
14197 op0
= TREE_OPERAND (t
, 0);
14198 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14199 return tem
? tem
: expr
;
14201 op0
= TREE_OPERAND (t
, 0);
14202 op1
= TREE_OPERAND (t
, 1);
14203 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14204 return tem
? tem
: expr
;
14206 op0
= TREE_OPERAND (t
, 0);
14207 op1
= TREE_OPERAND (t
, 1);
14208 op2
= TREE_OPERAND (t
, 2);
14209 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14210 return tem
? tem
: expr
;
14220 tree op0
= TREE_OPERAND (t
, 0);
14221 tree op1
= TREE_OPERAND (t
, 1);
14223 if (TREE_CODE (op1
) == INTEGER_CST
14224 && TREE_CODE (op0
) == CONSTRUCTOR
14225 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
14227 VEC(constructor_elt
,gc
) *elts
= CONSTRUCTOR_ELTS (op0
);
14228 unsigned HOST_WIDE_INT end
= VEC_length (constructor_elt
, elts
);
14229 unsigned HOST_WIDE_INT begin
= 0;
14231 /* Find a matching index by means of a binary search. */
14232 while (begin
!= end
)
14234 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
14235 tree index
= VEC_index (constructor_elt
, elts
, middle
)->index
;
14237 if (TREE_CODE (index
) == INTEGER_CST
14238 && tree_int_cst_lt (index
, op1
))
14239 begin
= middle
+ 1;
14240 else if (TREE_CODE (index
) == INTEGER_CST
14241 && tree_int_cst_lt (op1
, index
))
14243 else if (TREE_CODE (index
) == RANGE_EXPR
14244 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
14245 begin
= middle
+ 1;
14246 else if (TREE_CODE (index
) == RANGE_EXPR
14247 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
14250 return VEC_index (constructor_elt
, elts
, middle
)->value
;
14258 return fold (DECL_INITIAL (t
));
14262 } /* switch (code) */
14265 #ifdef ENABLE_FOLD_CHECKING
14268 static void fold_checksum_tree (const_tree
, struct md5_ctx
*, htab_t
);
14269 static void fold_check_failed (const_tree
, const_tree
);
14270 void print_fold_checksum (const_tree
);
14272 /* When --enable-checking=fold, compute a digest of expr before
14273 and after actual fold call to see if fold did not accidentally
14274 change original expr. */
14280 struct md5_ctx ctx
;
14281 unsigned char checksum_before
[16], checksum_after
[16];
14284 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14285 md5_init_ctx (&ctx
);
14286 fold_checksum_tree (expr
, &ctx
, ht
);
14287 md5_finish_ctx (&ctx
, checksum_before
);
14290 ret
= fold_1 (expr
);
14292 md5_init_ctx (&ctx
);
14293 fold_checksum_tree (expr
, &ctx
, ht
);
14294 md5_finish_ctx (&ctx
, checksum_after
);
14297 if (memcmp (checksum_before
, checksum_after
, 16))
14298 fold_check_failed (expr
, ret
);
14304 print_fold_checksum (const_tree expr
)
14306 struct md5_ctx ctx
;
14307 unsigned char checksum
[16], cnt
;
14310 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14311 md5_init_ctx (&ctx
);
14312 fold_checksum_tree (expr
, &ctx
, ht
);
14313 md5_finish_ctx (&ctx
, checksum
);
14315 for (cnt
= 0; cnt
< 16; ++cnt
)
14316 fprintf (stderr
, "%02x", checksum
[cnt
]);
14317 putc ('\n', stderr
);
14321 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
14323 internal_error ("fold check: original tree changed by fold");
14327 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
14330 enum tree_code code
;
14331 union tree_node buf
;
14337 slot
= (void **) htab_find_slot (ht
, expr
, INSERT
);
14340 *slot
= CONST_CAST_TREE (expr
);
14341 code
= TREE_CODE (expr
);
14342 if (TREE_CODE_CLASS (code
) == tcc_declaration
14343 && DECL_ASSEMBLER_NAME_SET_P (expr
))
14345 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14346 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14347 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14348 expr
= (tree
) &buf
;
14350 else if (TREE_CODE_CLASS (code
) == tcc_type
14351 && (TYPE_POINTER_TO (expr
)
14352 || TYPE_REFERENCE_TO (expr
)
14353 || TYPE_CACHED_VALUES_P (expr
)
14354 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14355 || TYPE_NEXT_VARIANT (expr
)))
14357 /* Allow these fields to be modified. */
14359 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14360 expr
= tmp
= (tree
) &buf
;
14361 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14362 TYPE_POINTER_TO (tmp
) = NULL
;
14363 TYPE_REFERENCE_TO (tmp
) = NULL
;
14364 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14365 if (TYPE_CACHED_VALUES_P (tmp
))
14367 TYPE_CACHED_VALUES_P (tmp
) = 0;
14368 TYPE_CACHED_VALUES (tmp
) = NULL
;
14371 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14372 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
14373 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14374 if (TREE_CODE_CLASS (code
) != tcc_type
14375 && TREE_CODE_CLASS (code
) != tcc_declaration
14376 && code
!= TREE_LIST
14377 && code
!= SSA_NAME
14378 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
14379 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14380 switch (TREE_CODE_CLASS (code
))
14386 md5_process_bytes (TREE_STRING_POINTER (expr
),
14387 TREE_STRING_LENGTH (expr
), ctx
);
14390 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14391 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14394 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
14395 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
14401 case tcc_exceptional
:
14405 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
14406 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
14407 expr
= TREE_CHAIN (expr
);
14408 goto recursive_label
;
14411 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
14412 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
14418 case tcc_expression
:
14419 case tcc_reference
:
14420 case tcc_comparison
:
14423 case tcc_statement
:
14425 len
= TREE_OPERAND_LENGTH (expr
);
14426 for (i
= 0; i
< len
; ++i
)
14427 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
14429 case tcc_declaration
:
14430 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
14431 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
14432 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
14434 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
14435 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
14436 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
14437 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
14438 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
14440 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
14441 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
14443 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
14445 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
14446 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
14447 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
14451 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
14452 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
14453 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
14454 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
14455 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
14456 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
14457 if (INTEGRAL_TYPE_P (expr
)
14458 || SCALAR_FLOAT_TYPE_P (expr
))
14460 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
14461 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
14463 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
14464 if (TREE_CODE (expr
) == RECORD_TYPE
14465 || TREE_CODE (expr
) == UNION_TYPE
14466 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
14467 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
14468 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
14475 /* Helper function for outputting the checksum of a tree T. When
14476 debugging with gdb, you can "define mynext" to be "next" followed
14477 by "call debug_fold_checksum (op0)", then just trace down till the
14480 DEBUG_FUNCTION
void
14481 debug_fold_checksum (const_tree t
)
14484 unsigned char checksum
[16];
14485 struct md5_ctx ctx
;
14486 htab_t ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14488 md5_init_ctx (&ctx
);
14489 fold_checksum_tree (t
, &ctx
, ht
);
14490 md5_finish_ctx (&ctx
, checksum
);
14493 for (i
= 0; i
< 16; i
++)
14494 fprintf (stderr
, "%d ", checksum
[i
]);
14496 fprintf (stderr
, "\n");
14501 /* Fold a unary tree expression with code CODE of type TYPE with an
14502 operand OP0. LOC is the location of the resulting expression.
14503 Return a folded expression if successful. Otherwise, return a tree
14504 expression with code CODE of type TYPE with an operand OP0. */
14507 fold_build1_stat_loc (location_t loc
,
14508 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14511 #ifdef ENABLE_FOLD_CHECKING
14512 unsigned char checksum_before
[16], checksum_after
[16];
14513 struct md5_ctx ctx
;
14516 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14517 md5_init_ctx (&ctx
);
14518 fold_checksum_tree (op0
, &ctx
, ht
);
14519 md5_finish_ctx (&ctx
, checksum_before
);
14523 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14525 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
14527 #ifdef ENABLE_FOLD_CHECKING
14528 md5_init_ctx (&ctx
);
14529 fold_checksum_tree (op0
, &ctx
, ht
);
14530 md5_finish_ctx (&ctx
, checksum_after
);
14533 if (memcmp (checksum_before
, checksum_after
, 16))
14534 fold_check_failed (op0
, tem
);
14539 /* Fold a binary tree expression with code CODE of type TYPE with
14540 operands OP0 and OP1. LOC is the location of the resulting
14541 expression. Return a folded expression if successful. Otherwise,
14542 return a tree expression with code CODE of type TYPE with operands
14546 fold_build2_stat_loc (location_t loc
,
14547 enum tree_code code
, tree type
, tree op0
, tree op1
14551 #ifdef ENABLE_FOLD_CHECKING
14552 unsigned char checksum_before_op0
[16],
14553 checksum_before_op1
[16],
14554 checksum_after_op0
[16],
14555 checksum_after_op1
[16];
14556 struct md5_ctx ctx
;
14559 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14560 md5_init_ctx (&ctx
);
14561 fold_checksum_tree (op0
, &ctx
, ht
);
14562 md5_finish_ctx (&ctx
, checksum_before_op0
);
14565 md5_init_ctx (&ctx
);
14566 fold_checksum_tree (op1
, &ctx
, ht
);
14567 md5_finish_ctx (&ctx
, checksum_before_op1
);
14571 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14573 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
14575 #ifdef ENABLE_FOLD_CHECKING
14576 md5_init_ctx (&ctx
);
14577 fold_checksum_tree (op0
, &ctx
, ht
);
14578 md5_finish_ctx (&ctx
, checksum_after_op0
);
14581 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14582 fold_check_failed (op0
, tem
);
14584 md5_init_ctx (&ctx
);
14585 fold_checksum_tree (op1
, &ctx
, ht
);
14586 md5_finish_ctx (&ctx
, checksum_after_op1
);
14589 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14590 fold_check_failed (op1
, tem
);
14595 /* Fold a ternary tree expression with code CODE of type TYPE with
14596 operands OP0, OP1, and OP2. Return a folded expression if
14597 successful. Otherwise, return a tree expression with code CODE of
14598 type TYPE with operands OP0, OP1, and OP2. */
14601 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
14602 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
14605 #ifdef ENABLE_FOLD_CHECKING
14606 unsigned char checksum_before_op0
[16],
14607 checksum_before_op1
[16],
14608 checksum_before_op2
[16],
14609 checksum_after_op0
[16],
14610 checksum_after_op1
[16],
14611 checksum_after_op2
[16];
14612 struct md5_ctx ctx
;
14615 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14616 md5_init_ctx (&ctx
);
14617 fold_checksum_tree (op0
, &ctx
, ht
);
14618 md5_finish_ctx (&ctx
, checksum_before_op0
);
14621 md5_init_ctx (&ctx
);
14622 fold_checksum_tree (op1
, &ctx
, ht
);
14623 md5_finish_ctx (&ctx
, checksum_before_op1
);
14626 md5_init_ctx (&ctx
);
14627 fold_checksum_tree (op2
, &ctx
, ht
);
14628 md5_finish_ctx (&ctx
, checksum_before_op2
);
14632 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
14633 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14635 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
14637 #ifdef ENABLE_FOLD_CHECKING
14638 md5_init_ctx (&ctx
);
14639 fold_checksum_tree (op0
, &ctx
, ht
);
14640 md5_finish_ctx (&ctx
, checksum_after_op0
);
14643 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14644 fold_check_failed (op0
, tem
);
14646 md5_init_ctx (&ctx
);
14647 fold_checksum_tree (op1
, &ctx
, ht
);
14648 md5_finish_ctx (&ctx
, checksum_after_op1
);
14651 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14652 fold_check_failed (op1
, tem
);
14654 md5_init_ctx (&ctx
);
14655 fold_checksum_tree (op2
, &ctx
, ht
);
14656 md5_finish_ctx (&ctx
, checksum_after_op2
);
14659 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
14660 fold_check_failed (op2
, tem
);
14665 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14666 arguments in ARGARRAY, and a null static chain.
14667 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14668 of type TYPE from the given operands as constructed by build_call_array. */
14671 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
14672 int nargs
, tree
*argarray
)
14675 #ifdef ENABLE_FOLD_CHECKING
14676 unsigned char checksum_before_fn
[16],
14677 checksum_before_arglist
[16],
14678 checksum_after_fn
[16],
14679 checksum_after_arglist
[16];
14680 struct md5_ctx ctx
;
14684 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14685 md5_init_ctx (&ctx
);
14686 fold_checksum_tree (fn
, &ctx
, ht
);
14687 md5_finish_ctx (&ctx
, checksum_before_fn
);
14690 md5_init_ctx (&ctx
);
14691 for (i
= 0; i
< nargs
; i
++)
14692 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
14693 md5_finish_ctx (&ctx
, checksum_before_arglist
);
14697 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
14699 #ifdef ENABLE_FOLD_CHECKING
14700 md5_init_ctx (&ctx
);
14701 fold_checksum_tree (fn
, &ctx
, ht
);
14702 md5_finish_ctx (&ctx
, checksum_after_fn
);
14705 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
14706 fold_check_failed (fn
, tem
);
14708 md5_init_ctx (&ctx
);
14709 for (i
= 0; i
< nargs
; i
++)
14710 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
14711 md5_finish_ctx (&ctx
, checksum_after_arglist
);
14714 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
14715 fold_check_failed (NULL_TREE
, tem
);
14720 /* Perform constant folding and related simplification of initializer
14721 expression EXPR. These behave identically to "fold_buildN" but ignore
14722 potential run-time traps and exceptions that fold must preserve. */
14724 #define START_FOLD_INIT \
14725 int saved_signaling_nans = flag_signaling_nans;\
14726 int saved_trapping_math = flag_trapping_math;\
14727 int saved_rounding_math = flag_rounding_math;\
14728 int saved_trapv = flag_trapv;\
14729 int saved_folding_initializer = folding_initializer;\
14730 flag_signaling_nans = 0;\
14731 flag_trapping_math = 0;\
14732 flag_rounding_math = 0;\
14734 folding_initializer = 1;
14736 #define END_FOLD_INIT \
14737 flag_signaling_nans = saved_signaling_nans;\
14738 flag_trapping_math = saved_trapping_math;\
14739 flag_rounding_math = saved_rounding_math;\
14740 flag_trapv = saved_trapv;\
14741 folding_initializer = saved_folding_initializer;
14744 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
14745 tree type
, tree op
)
14750 result
= fold_build1_loc (loc
, code
, type
, op
);
14757 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
14758 tree type
, tree op0
, tree op1
)
14763 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
14770 fold_build3_initializer_loc (location_t loc
, enum tree_code code
,
14771 tree type
, tree op0
, tree op1
, tree op2
)
14776 result
= fold_build3_loc (loc
, code
, type
, op0
, op1
, op2
);
14783 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
14784 int nargs
, tree
*argarray
)
14789 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
14795 #undef START_FOLD_INIT
14796 #undef END_FOLD_INIT
14798 /* Determine if first argument is a multiple of second argument. Return 0 if
14799 it is not, or we cannot easily determined it to be.
14801 An example of the sort of thing we care about (at this point; this routine
14802 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14803 fold cases do now) is discovering that
14805 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14811 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14813 This code also handles discovering that
14815 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14817 is a multiple of 8 so we don't have to worry about dealing with a
14818 possible remainder.
14820 Note that we *look* inside a SAVE_EXPR only to determine how it was
14821 calculated; it is not safe for fold to do much of anything else with the
14822 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14823 at run time. For example, the latter example above *cannot* be implemented
14824 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14825 evaluation time of the original SAVE_EXPR is not necessarily the same at
14826 the time the new expression is evaluated. The only optimization of this
14827 sort that would be valid is changing
14829 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14833 SAVE_EXPR (I) * SAVE_EXPR (J)
14835 (where the same SAVE_EXPR (J) is used in the original and the
14836 transformed version). */
14839 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
14841 if (operand_equal_p (top
, bottom
, 0))
14844 if (TREE_CODE (type
) != INTEGER_TYPE
)
14847 switch (TREE_CODE (top
))
14850 /* Bitwise and provides a power of two multiple. If the mask is
14851 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14852 if (!integer_pow2p (bottom
))
14857 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14858 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14862 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14863 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14866 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
14870 op1
= TREE_OPERAND (top
, 1);
14871 /* const_binop may not detect overflow correctly,
14872 so check for it explicitly here. */
14873 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
14874 > TREE_INT_CST_LOW (op1
)
14875 && TREE_INT_CST_HIGH (op1
) == 0
14876 && 0 != (t1
= fold_convert (type
,
14877 const_binop (LSHIFT_EXPR
,
14880 && !TREE_OVERFLOW (t1
))
14881 return multiple_of_p (type
, t1
, bottom
);
14886 /* Can't handle conversions from non-integral or wider integral type. */
14887 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
14888 || (TYPE_PRECISION (type
)
14889 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
14892 /* .. fall through ... */
14895 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
14898 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
14899 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
14902 if (TREE_CODE (bottom
) != INTEGER_CST
14903 || integer_zerop (bottom
)
14904 || (TYPE_UNSIGNED (type
)
14905 && (tree_int_cst_sgn (top
) < 0
14906 || tree_int_cst_sgn (bottom
) < 0)))
14908 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
14916 /* Return true if CODE or TYPE is known to be non-negative. */
14919 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
14921 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
14922 && truth_value_p (code
))
14923 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14924 have a signed:1 type (where the value is -1 and 0). */
14929 /* Return true if (CODE OP0) is known to be non-negative. If the return
14930 value is based on the assumption that signed overflow is undefined,
14931 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14932 *STRICT_OVERFLOW_P. */
14935 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14936 bool *strict_overflow_p
)
14938 if (TYPE_UNSIGNED (type
))
14944 /* We can't return 1 if flag_wrapv is set because
14945 ABS_EXPR<INT_MIN> = INT_MIN. */
14946 if (!INTEGRAL_TYPE_P (type
))
14948 if (TYPE_OVERFLOW_UNDEFINED (type
))
14950 *strict_overflow_p
= true;
14955 case NON_LVALUE_EXPR
:
14957 case FIX_TRUNC_EXPR
:
14958 return tree_expr_nonnegative_warnv_p (op0
,
14959 strict_overflow_p
);
14963 tree inner_type
= TREE_TYPE (op0
);
14964 tree outer_type
= type
;
14966 if (TREE_CODE (outer_type
) == REAL_TYPE
)
14968 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14969 return tree_expr_nonnegative_warnv_p (op0
,
14970 strict_overflow_p
);
14971 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
14973 if (TYPE_UNSIGNED (inner_type
))
14975 return tree_expr_nonnegative_warnv_p (op0
,
14976 strict_overflow_p
);
14979 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
14981 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14982 return tree_expr_nonnegative_warnv_p (op0
,
14983 strict_overflow_p
);
14984 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
14985 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
14986 && TYPE_UNSIGNED (inner_type
);
14992 return tree_simple_nonnegative_warnv_p (code
, type
);
14995 /* We don't know sign of `t', so be conservative and return false. */
14999 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15000 value is based on the assumption that signed overflow is undefined,
15001 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15002 *STRICT_OVERFLOW_P. */
15005 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15006 tree op1
, bool *strict_overflow_p
)
15008 if (TYPE_UNSIGNED (type
))
15013 case POINTER_PLUS_EXPR
:
15015 if (FLOAT_TYPE_P (type
))
15016 return (tree_expr_nonnegative_warnv_p (op0
,
15018 && tree_expr_nonnegative_warnv_p (op1
,
15019 strict_overflow_p
));
15021 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15022 both unsigned and at least 2 bits shorter than the result. */
15023 if (TREE_CODE (type
) == INTEGER_TYPE
15024 && TREE_CODE (op0
) == NOP_EXPR
15025 && TREE_CODE (op1
) == NOP_EXPR
)
15027 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
15028 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
15029 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
15030 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
15032 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
15033 TYPE_PRECISION (inner2
)) + 1;
15034 return prec
< TYPE_PRECISION (type
);
15040 if (FLOAT_TYPE_P (type
))
15042 /* x * x for floating point x is always non-negative. */
15043 if (operand_equal_p (op0
, op1
, 0))
15045 return (tree_expr_nonnegative_warnv_p (op0
,
15047 && tree_expr_nonnegative_warnv_p (op1
,
15048 strict_overflow_p
));
15051 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15052 both unsigned and their total bits is shorter than the result. */
15053 if (TREE_CODE (type
) == INTEGER_TYPE
15054 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
15055 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
15057 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
15058 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
15060 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
15061 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
15064 bool unsigned0
= TYPE_UNSIGNED (inner0
);
15065 bool unsigned1
= TYPE_UNSIGNED (inner1
);
15067 if (TREE_CODE (op0
) == INTEGER_CST
)
15068 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
15070 if (TREE_CODE (op1
) == INTEGER_CST
)
15071 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
15073 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
15074 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
15076 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
15077 ? tree_int_cst_min_precision (op0
, /*unsignedp=*/true)
15078 : TYPE_PRECISION (inner0
);
15080 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
15081 ? tree_int_cst_min_precision (op1
, /*unsignedp=*/true)
15082 : TYPE_PRECISION (inner1
);
15084 return precision0
+ precision1
< TYPE_PRECISION (type
);
15091 return (tree_expr_nonnegative_warnv_p (op0
,
15093 || tree_expr_nonnegative_warnv_p (op1
,
15094 strict_overflow_p
));
15100 case TRUNC_DIV_EXPR
:
15101 case CEIL_DIV_EXPR
:
15102 case FLOOR_DIV_EXPR
:
15103 case ROUND_DIV_EXPR
:
15104 return (tree_expr_nonnegative_warnv_p (op0
,
15106 && tree_expr_nonnegative_warnv_p (op1
,
15107 strict_overflow_p
));
15109 case TRUNC_MOD_EXPR
:
15110 case CEIL_MOD_EXPR
:
15111 case FLOOR_MOD_EXPR
:
15112 case ROUND_MOD_EXPR
:
15113 return tree_expr_nonnegative_warnv_p (op0
,
15114 strict_overflow_p
);
15116 return tree_simple_nonnegative_warnv_p (code
, type
);
15119 /* We don't know sign of `t', so be conservative and return false. */
15123 /* Return true if T is known to be non-negative. If the return
15124 value is based on the assumption that signed overflow is undefined,
15125 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15126 *STRICT_OVERFLOW_P. */
15129 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15131 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15134 switch (TREE_CODE (t
))
15137 return tree_int_cst_sgn (t
) >= 0;
15140 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
15143 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
15146 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15148 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
15149 strict_overflow_p
));
15151 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15154 /* We don't know sign of `t', so be conservative and return false. */
15158 /* Return true if T is known to be non-negative. If the return
15159 value is based on the assumption that signed overflow is undefined,
15160 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15161 *STRICT_OVERFLOW_P. */
15164 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
15165 tree arg0
, tree arg1
, bool *strict_overflow_p
)
15167 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
15168 switch (DECL_FUNCTION_CODE (fndecl
))
15170 CASE_FLT_FN (BUILT_IN_ACOS
):
15171 CASE_FLT_FN (BUILT_IN_ACOSH
):
15172 CASE_FLT_FN (BUILT_IN_CABS
):
15173 CASE_FLT_FN (BUILT_IN_COSH
):
15174 CASE_FLT_FN (BUILT_IN_ERFC
):
15175 CASE_FLT_FN (BUILT_IN_EXP
):
15176 CASE_FLT_FN (BUILT_IN_EXP10
):
15177 CASE_FLT_FN (BUILT_IN_EXP2
):
15178 CASE_FLT_FN (BUILT_IN_FABS
):
15179 CASE_FLT_FN (BUILT_IN_FDIM
):
15180 CASE_FLT_FN (BUILT_IN_HYPOT
):
15181 CASE_FLT_FN (BUILT_IN_POW10
):
15182 CASE_INT_FN (BUILT_IN_FFS
):
15183 CASE_INT_FN (BUILT_IN_PARITY
):
15184 CASE_INT_FN (BUILT_IN_POPCOUNT
):
15185 case BUILT_IN_BSWAP32
:
15186 case BUILT_IN_BSWAP64
:
15190 CASE_FLT_FN (BUILT_IN_SQRT
):
15191 /* sqrt(-0.0) is -0.0. */
15192 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
15194 return tree_expr_nonnegative_warnv_p (arg0
,
15195 strict_overflow_p
);
15197 CASE_FLT_FN (BUILT_IN_ASINH
):
15198 CASE_FLT_FN (BUILT_IN_ATAN
):
15199 CASE_FLT_FN (BUILT_IN_ATANH
):
15200 CASE_FLT_FN (BUILT_IN_CBRT
):
15201 CASE_FLT_FN (BUILT_IN_CEIL
):
15202 CASE_FLT_FN (BUILT_IN_ERF
):
15203 CASE_FLT_FN (BUILT_IN_EXPM1
):
15204 CASE_FLT_FN (BUILT_IN_FLOOR
):
15205 CASE_FLT_FN (BUILT_IN_FMOD
):
15206 CASE_FLT_FN (BUILT_IN_FREXP
):
15207 CASE_FLT_FN (BUILT_IN_ICEIL
):
15208 CASE_FLT_FN (BUILT_IN_IFLOOR
):
15209 CASE_FLT_FN (BUILT_IN_IRINT
):
15210 CASE_FLT_FN (BUILT_IN_IROUND
):
15211 CASE_FLT_FN (BUILT_IN_LCEIL
):
15212 CASE_FLT_FN (BUILT_IN_LDEXP
):
15213 CASE_FLT_FN (BUILT_IN_LFLOOR
):
15214 CASE_FLT_FN (BUILT_IN_LLCEIL
):
15215 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
15216 CASE_FLT_FN (BUILT_IN_LLRINT
):
15217 CASE_FLT_FN (BUILT_IN_LLROUND
):
15218 CASE_FLT_FN (BUILT_IN_LRINT
):
15219 CASE_FLT_FN (BUILT_IN_LROUND
):
15220 CASE_FLT_FN (BUILT_IN_MODF
):
15221 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
15222 CASE_FLT_FN (BUILT_IN_RINT
):
15223 CASE_FLT_FN (BUILT_IN_ROUND
):
15224 CASE_FLT_FN (BUILT_IN_SCALB
):
15225 CASE_FLT_FN (BUILT_IN_SCALBLN
):
15226 CASE_FLT_FN (BUILT_IN_SCALBN
):
15227 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
15228 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
15229 CASE_FLT_FN (BUILT_IN_SINH
):
15230 CASE_FLT_FN (BUILT_IN_TANH
):
15231 CASE_FLT_FN (BUILT_IN_TRUNC
):
15232 /* True if the 1st argument is nonnegative. */
15233 return tree_expr_nonnegative_warnv_p (arg0
,
15234 strict_overflow_p
);
15236 CASE_FLT_FN (BUILT_IN_FMAX
):
15237 /* True if the 1st OR 2nd arguments are nonnegative. */
15238 return (tree_expr_nonnegative_warnv_p (arg0
,
15240 || (tree_expr_nonnegative_warnv_p (arg1
,
15241 strict_overflow_p
)));
15243 CASE_FLT_FN (BUILT_IN_FMIN
):
15244 /* True if the 1st AND 2nd arguments are nonnegative. */
15245 return (tree_expr_nonnegative_warnv_p (arg0
,
15247 && (tree_expr_nonnegative_warnv_p (arg1
,
15248 strict_overflow_p
)));
15250 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15251 /* True if the 2nd argument is nonnegative. */
15252 return tree_expr_nonnegative_warnv_p (arg1
,
15253 strict_overflow_p
);
15255 CASE_FLT_FN (BUILT_IN_POWI
):
15256 /* True if the 1st argument is nonnegative or the second
15257 argument is an even integer. */
15258 if (TREE_CODE (arg1
) == INTEGER_CST
15259 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
15261 return tree_expr_nonnegative_warnv_p (arg0
,
15262 strict_overflow_p
);
15264 CASE_FLT_FN (BUILT_IN_POW
):
15265 /* True if the 1st argument is nonnegative or the second
15266 argument is an even integer valued real. */
15267 if (TREE_CODE (arg1
) == REAL_CST
)
15272 c
= TREE_REAL_CST (arg1
);
15273 n
= real_to_integer (&c
);
15276 REAL_VALUE_TYPE cint
;
15277 real_from_integer (&cint
, VOIDmode
, n
,
15278 n
< 0 ? -1 : 0, 0);
15279 if (real_identical (&c
, &cint
))
15283 return tree_expr_nonnegative_warnv_p (arg0
,
15284 strict_overflow_p
);
15289 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
15293 /* Return true if T is known to be non-negative. If the return
15294 value is based on the assumption that signed overflow is undefined,
15295 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15296 *STRICT_OVERFLOW_P. */
15299 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15301 enum tree_code code
= TREE_CODE (t
);
15302 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15309 tree temp
= TARGET_EXPR_SLOT (t
);
15310 t
= TARGET_EXPR_INITIAL (t
);
15312 /* If the initializer is non-void, then it's a normal expression
15313 that will be assigned to the slot. */
15314 if (!VOID_TYPE_P (t
))
15315 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
15317 /* Otherwise, the initializer sets the slot in some way. One common
15318 way is an assignment statement at the end of the initializer. */
15321 if (TREE_CODE (t
) == BIND_EXPR
)
15322 t
= expr_last (BIND_EXPR_BODY (t
));
15323 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15324 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15325 t
= expr_last (TREE_OPERAND (t
, 0));
15326 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15331 if (TREE_CODE (t
) == MODIFY_EXPR
15332 && TREE_OPERAND (t
, 0) == temp
)
15333 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15334 strict_overflow_p
);
15341 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
15342 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15344 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15345 get_callee_fndecl (t
),
15348 strict_overflow_p
);
15350 case COMPOUND_EXPR
:
15352 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15353 strict_overflow_p
);
15355 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15356 strict_overflow_p
);
15358 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15359 strict_overflow_p
);
15362 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15366 /* We don't know sign of `t', so be conservative and return false. */
15370 /* Return true if T is known to be non-negative. If the return
15371 value is based on the assumption that signed overflow is undefined,
15372 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15373 *STRICT_OVERFLOW_P. */
15376 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15378 enum tree_code code
;
15379 if (t
== error_mark_node
)
15382 code
= TREE_CODE (t
);
15383 switch (TREE_CODE_CLASS (code
))
15386 case tcc_comparison
:
15387 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15389 TREE_OPERAND (t
, 0),
15390 TREE_OPERAND (t
, 1),
15391 strict_overflow_p
);
15394 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15396 TREE_OPERAND (t
, 0),
15397 strict_overflow_p
);
15400 case tcc_declaration
:
15401 case tcc_reference
:
15402 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15410 case TRUTH_AND_EXPR
:
15411 case TRUTH_OR_EXPR
:
15412 case TRUTH_XOR_EXPR
:
15413 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15415 TREE_OPERAND (t
, 0),
15416 TREE_OPERAND (t
, 1),
15417 strict_overflow_p
);
15418 case TRUTH_NOT_EXPR
:
15419 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15421 TREE_OPERAND (t
, 0),
15422 strict_overflow_p
);
15429 case WITH_SIZE_EXPR
:
15431 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15434 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
15438 /* Return true if `t' is known to be non-negative. Handle warnings
15439 about undefined signed overflow. */
15442 tree_expr_nonnegative_p (tree t
)
15444 bool ret
, strict_overflow_p
;
15446 strict_overflow_p
= false;
15447 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15448 if (strict_overflow_p
)
15449 fold_overflow_warning (("assuming signed overflow does not occur when "
15450 "determining that expression is always "
15452 WARN_STRICT_OVERFLOW_MISC
);
15457 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15458 For floating point we further ensure that T is not denormal.
15459 Similar logic is present in nonzero_address in rtlanal.h.
15461 If the return value is based on the assumption that signed overflow
15462 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15463 change *STRICT_OVERFLOW_P. */
15466 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15467 bool *strict_overflow_p
)
15472 return tree_expr_nonzero_warnv_p (op0
,
15473 strict_overflow_p
);
15477 tree inner_type
= TREE_TYPE (op0
);
15478 tree outer_type
= type
;
15480 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15481 && tree_expr_nonzero_warnv_p (op0
,
15482 strict_overflow_p
));
15486 case NON_LVALUE_EXPR
:
15487 return tree_expr_nonzero_warnv_p (op0
,
15488 strict_overflow_p
);
15497 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15498 For floating point we further ensure that T is not denormal.
15499 Similar logic is present in nonzero_address in rtlanal.h.
15501 If the return value is based on the assumption that signed overflow
15502 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15503 change *STRICT_OVERFLOW_P. */
15506 tree_binary_nonzero_warnv_p (enum tree_code code
,
15509 tree op1
, bool *strict_overflow_p
)
15511 bool sub_strict_overflow_p
;
15514 case POINTER_PLUS_EXPR
:
15516 if (TYPE_OVERFLOW_UNDEFINED (type
))
15518 /* With the presence of negative values it is hard
15519 to say something. */
15520 sub_strict_overflow_p
= false;
15521 if (!tree_expr_nonnegative_warnv_p (op0
,
15522 &sub_strict_overflow_p
)
15523 || !tree_expr_nonnegative_warnv_p (op1
,
15524 &sub_strict_overflow_p
))
15526 /* One of operands must be positive and the other non-negative. */
15527 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15528 overflows, on a twos-complement machine the sum of two
15529 nonnegative numbers can never be zero. */
15530 return (tree_expr_nonzero_warnv_p (op0
,
15532 || tree_expr_nonzero_warnv_p (op1
,
15533 strict_overflow_p
));
15538 if (TYPE_OVERFLOW_UNDEFINED (type
))
15540 if (tree_expr_nonzero_warnv_p (op0
,
15542 && tree_expr_nonzero_warnv_p (op1
,
15543 strict_overflow_p
))
15545 *strict_overflow_p
= true;
15552 sub_strict_overflow_p
= false;
15553 if (tree_expr_nonzero_warnv_p (op0
,
15554 &sub_strict_overflow_p
)
15555 && tree_expr_nonzero_warnv_p (op1
,
15556 &sub_strict_overflow_p
))
15558 if (sub_strict_overflow_p
)
15559 *strict_overflow_p
= true;
15564 sub_strict_overflow_p
= false;
15565 if (tree_expr_nonzero_warnv_p (op0
,
15566 &sub_strict_overflow_p
))
15568 if (sub_strict_overflow_p
)
15569 *strict_overflow_p
= true;
15571 /* When both operands are nonzero, then MAX must be too. */
15572 if (tree_expr_nonzero_warnv_p (op1
,
15573 strict_overflow_p
))
15576 /* MAX where operand 0 is positive is positive. */
15577 return tree_expr_nonnegative_warnv_p (op0
,
15578 strict_overflow_p
);
15580 /* MAX where operand 1 is positive is positive. */
15581 else if (tree_expr_nonzero_warnv_p (op1
,
15582 &sub_strict_overflow_p
)
15583 && tree_expr_nonnegative_warnv_p (op1
,
15584 &sub_strict_overflow_p
))
15586 if (sub_strict_overflow_p
)
15587 *strict_overflow_p
= true;
15593 return (tree_expr_nonzero_warnv_p (op1
,
15595 || tree_expr_nonzero_warnv_p (op0
,
15596 strict_overflow_p
));
15605 /* Return true when T is an address and is known to be nonzero.
15606 For floating point we further ensure that T is not denormal.
15607 Similar logic is present in nonzero_address in rtlanal.h.
15609 If the return value is based on the assumption that signed overflow
15610 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15611 change *STRICT_OVERFLOW_P. */
15614 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15616 bool sub_strict_overflow_p
;
15617 switch (TREE_CODE (t
))
15620 return !integer_zerop (t
);
15624 tree base
= TREE_OPERAND (t
, 0);
15625 if (!DECL_P (base
))
15626 base
= get_base_address (base
);
15631 /* Weak declarations may link to NULL. Other things may also be NULL
15632 so protect with -fdelete-null-pointer-checks; but not variables
15633 allocated on the stack. */
15635 && (flag_delete_null_pointer_checks
15636 || (DECL_CONTEXT (base
)
15637 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
15638 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
)))))
15639 return !VAR_OR_FUNCTION_DECL_P (base
) || !DECL_WEAK (base
);
15641 /* Constants are never weak. */
15642 if (CONSTANT_CLASS_P (base
))
15649 sub_strict_overflow_p
= false;
15650 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15651 &sub_strict_overflow_p
)
15652 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
15653 &sub_strict_overflow_p
))
15655 if (sub_strict_overflow_p
)
15656 *strict_overflow_p
= true;
15667 /* Return true when T is an address and is known to be nonzero.
15668 For floating point we further ensure that T is not denormal.
15669 Similar logic is present in nonzero_address in rtlanal.h.
15671 If the return value is based on the assumption that signed overflow
15672 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15673 change *STRICT_OVERFLOW_P. */
15676 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15678 tree type
= TREE_TYPE (t
);
15679 enum tree_code code
;
15681 /* Doing something useful for floating point would need more work. */
15682 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
15685 code
= TREE_CODE (t
);
15686 switch (TREE_CODE_CLASS (code
))
15689 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
15690 strict_overflow_p
);
15692 case tcc_comparison
:
15693 return tree_binary_nonzero_warnv_p (code
, type
,
15694 TREE_OPERAND (t
, 0),
15695 TREE_OPERAND (t
, 1),
15696 strict_overflow_p
);
15698 case tcc_declaration
:
15699 case tcc_reference
:
15700 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15708 case TRUTH_NOT_EXPR
:
15709 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
15710 strict_overflow_p
);
15712 case TRUTH_AND_EXPR
:
15713 case TRUTH_OR_EXPR
:
15714 case TRUTH_XOR_EXPR
:
15715 return tree_binary_nonzero_warnv_p (code
, type
,
15716 TREE_OPERAND (t
, 0),
15717 TREE_OPERAND (t
, 1),
15718 strict_overflow_p
);
15725 case WITH_SIZE_EXPR
:
15727 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15729 case COMPOUND_EXPR
:
15732 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15733 strict_overflow_p
);
15736 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
15737 strict_overflow_p
);
15740 return alloca_call_p (t
);
15748 /* Return true when T is an address and is known to be nonzero.
15749 Handle warnings about undefined signed overflow. */
15752 tree_expr_nonzero_p (tree t
)
15754 bool ret
, strict_overflow_p
;
15756 strict_overflow_p
= false;
15757 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
15758 if (strict_overflow_p
)
15759 fold_overflow_warning (("assuming signed overflow does not occur when "
15760 "determining that expression is always "
15762 WARN_STRICT_OVERFLOW_MISC
);
15766 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15767 attempt to fold the expression to a constant without modifying TYPE,
15770 If the expression could be simplified to a constant, then return
15771 the constant. If the expression would not be simplified to a
15772 constant, then return NULL_TREE. */
15775 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
15777 tree tem
= fold_binary (code
, type
, op0
, op1
);
15778 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15781 /* Given the components of a unary expression CODE, TYPE and OP0,
15782 attempt to fold the expression to a constant without modifying
15785 If the expression could be simplified to a constant, then return
15786 the constant. If the expression would not be simplified to a
15787 constant, then return NULL_TREE. */
15790 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
15792 tree tem
= fold_unary (code
, type
, op0
);
15793 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15796 /* If EXP represents referencing an element in a constant string
15797 (either via pointer arithmetic or array indexing), return the
15798 tree representing the value accessed, otherwise return NULL. */
15801 fold_read_from_constant_string (tree exp
)
15803 if ((TREE_CODE (exp
) == INDIRECT_REF
15804 || TREE_CODE (exp
) == ARRAY_REF
)
15805 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
15807 tree exp1
= TREE_OPERAND (exp
, 0);
15810 location_t loc
= EXPR_LOCATION (exp
);
15812 if (TREE_CODE (exp
) == INDIRECT_REF
)
15813 string
= string_constant (exp1
, &index
);
15816 tree low_bound
= array_ref_low_bound (exp
);
15817 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
15819 /* Optimize the special-case of a zero lower bound.
15821 We convert the low_bound to sizetype to avoid some problems
15822 with constant folding. (E.g. suppose the lower bound is 1,
15823 and its mode is QI. Without the conversion,l (ARRAY
15824 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15825 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15826 if (! integer_zerop (low_bound
))
15827 index
= size_diffop_loc (loc
, index
,
15828 fold_convert_loc (loc
, sizetype
, low_bound
));
15834 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
15835 && TREE_CODE (string
) == STRING_CST
15836 && TREE_CODE (index
) == INTEGER_CST
15837 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
15838 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
15840 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
15841 return build_int_cst_type (TREE_TYPE (exp
),
15842 (TREE_STRING_POINTER (string
)
15843 [TREE_INT_CST_LOW (index
)]));
15848 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15849 an integer constant, real, or fixed-point constant.
15851 TYPE is the type of the result. */
15854 fold_negate_const (tree arg0
, tree type
)
15856 tree t
= NULL_TREE
;
15858 switch (TREE_CODE (arg0
))
15862 double_int val
= tree_to_double_int (arg0
);
15863 int overflow
= neg_double (val
.low
, val
.high
, &val
.low
, &val
.high
);
15865 t
= force_fit_type_double (type
, val
, 1,
15866 (overflow
| TREE_OVERFLOW (arg0
))
15867 && !TYPE_UNSIGNED (type
));
15872 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15877 FIXED_VALUE_TYPE f
;
15878 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
15879 &(TREE_FIXED_CST (arg0
)), NULL
,
15880 TYPE_SATURATING (type
));
15881 t
= build_fixed (type
, f
);
15882 /* Propagate overflow flags. */
15883 if (overflow_p
| TREE_OVERFLOW (arg0
))
15884 TREE_OVERFLOW (t
) = 1;
15889 gcc_unreachable ();
15895 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15896 an integer constant or real constant.
15898 TYPE is the type of the result. */
15901 fold_abs_const (tree arg0
, tree type
)
15903 tree t
= NULL_TREE
;
15905 switch (TREE_CODE (arg0
))
15909 double_int val
= tree_to_double_int (arg0
);
15911 /* If the value is unsigned or non-negative, then the absolute value
15912 is the same as the ordinary value. */
15913 if (TYPE_UNSIGNED (type
)
15914 || !double_int_negative_p (val
))
15917 /* If the value is negative, then the absolute value is
15923 overflow
= neg_double (val
.low
, val
.high
, &val
.low
, &val
.high
);
15924 t
= force_fit_type_double (type
, val
, -1,
15925 overflow
| TREE_OVERFLOW (arg0
));
15931 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
15932 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15938 gcc_unreachable ();
15944 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15945 constant. TYPE is the type of the result. */
15948 fold_not_const (const_tree arg0
, tree type
)
15952 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
15954 val
= double_int_not (tree_to_double_int (arg0
));
15955 return force_fit_type_double (type
, val
, 0, TREE_OVERFLOW (arg0
));
15958 /* Given CODE, a relational operator, the target type, TYPE and two
15959 constant operands OP0 and OP1, return the result of the
15960 relational operation. If the result is not a compile time
15961 constant, then return NULL_TREE. */
15964 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
15966 int result
, invert
;
15968 /* From here on, the only cases we handle are when the result is
15969 known to be a constant. */
15971 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
15973 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
15974 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
15976 /* Handle the cases where either operand is a NaN. */
15977 if (real_isnan (c0
) || real_isnan (c1
))
15987 case UNORDERED_EXPR
:
16001 if (flag_trapping_math
)
16007 gcc_unreachable ();
16010 return constant_boolean_node (result
, type
);
16013 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
16016 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
16018 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
16019 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
16020 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
16023 /* Handle equality/inequality of complex constants. */
16024 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
16026 tree rcond
= fold_relational_const (code
, type
,
16027 TREE_REALPART (op0
),
16028 TREE_REALPART (op1
));
16029 tree icond
= fold_relational_const (code
, type
,
16030 TREE_IMAGPART (op0
),
16031 TREE_IMAGPART (op1
));
16032 if (code
== EQ_EXPR
)
16033 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
16034 else if (code
== NE_EXPR
)
16035 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
16040 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16042 To compute GT, swap the arguments and do LT.
16043 To compute GE, do LT and invert the result.
16044 To compute LE, swap the arguments, do LT and invert the result.
16045 To compute NE, do EQ and invert the result.
16047 Therefore, the code below must handle only EQ and LT. */
16049 if (code
== LE_EXPR
|| code
== GT_EXPR
)
16054 code
= swap_tree_comparison (code
);
16057 /* Note that it is safe to invert for real values here because we
16058 have already handled the one case that it matters. */
16061 if (code
== NE_EXPR
|| code
== GE_EXPR
)
16064 code
= invert_tree_comparison (code
, false);
16067 /* Compute a result for LT or EQ if args permit;
16068 Otherwise return T. */
16069 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
16071 if (code
== EQ_EXPR
)
16072 result
= tree_int_cst_equal (op0
, op1
);
16073 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
16074 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
16076 result
= INT_CST_LT (op0
, op1
);
16083 return constant_boolean_node (result
, type
);
16086 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16087 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16091 fold_build_cleanup_point_expr (tree type
, tree expr
)
16093 /* If the expression does not have side effects then we don't have to wrap
16094 it with a cleanup point expression. */
16095 if (!TREE_SIDE_EFFECTS (expr
))
16098 /* If the expression is a return, check to see if the expression inside the
16099 return has no side effects or the right hand side of the modify expression
16100 inside the return. If either don't have side effects set we don't need to
16101 wrap the expression in a cleanup point expression. Note we don't check the
16102 left hand side of the modify because it should always be a return decl. */
16103 if (TREE_CODE (expr
) == RETURN_EXPR
)
16105 tree op
= TREE_OPERAND (expr
, 0);
16106 if (!op
|| !TREE_SIDE_EFFECTS (op
))
16108 op
= TREE_OPERAND (op
, 1);
16109 if (!TREE_SIDE_EFFECTS (op
))
16113 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
16116 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16117 of an indirection through OP0, or NULL_TREE if no simplification is
16121 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
16127 subtype
= TREE_TYPE (sub
);
16128 if (!POINTER_TYPE_P (subtype
))
16131 if (TREE_CODE (sub
) == ADDR_EXPR
)
16133 tree op
= TREE_OPERAND (sub
, 0);
16134 tree optype
= TREE_TYPE (op
);
16135 /* *&CONST_DECL -> to the value of the const decl. */
16136 if (TREE_CODE (op
) == CONST_DECL
)
16137 return DECL_INITIAL (op
);
16138 /* *&p => p; make sure to handle *&"str"[cst] here. */
16139 if (type
== optype
)
16141 tree fop
= fold_read_from_constant_string (op
);
16147 /* *(foo *)&fooarray => fooarray[0] */
16148 else if (TREE_CODE (optype
) == ARRAY_TYPE
16149 && type
== TREE_TYPE (optype
)
16150 && (!in_gimple_form
16151 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16153 tree type_domain
= TYPE_DOMAIN (optype
);
16154 tree min_val
= size_zero_node
;
16155 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16156 min_val
= TYPE_MIN_VALUE (type_domain
);
16158 && TREE_CODE (min_val
) != INTEGER_CST
)
16160 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
16161 NULL_TREE
, NULL_TREE
);
16163 /* *(foo *)&complexfoo => __real__ complexfoo */
16164 else if (TREE_CODE (optype
) == COMPLEX_TYPE
16165 && type
== TREE_TYPE (optype
))
16166 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
16167 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16168 else if (TREE_CODE (optype
) == VECTOR_TYPE
16169 && type
== TREE_TYPE (optype
))
16171 tree part_width
= TYPE_SIZE (type
);
16172 tree index
= bitsize_int (0);
16173 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
16177 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
16178 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
16180 tree op00
= TREE_OPERAND (sub
, 0);
16181 tree op01
= TREE_OPERAND (sub
, 1);
16184 if (TREE_CODE (op00
) == ADDR_EXPR
)
16187 op00
= TREE_OPERAND (op00
, 0);
16188 op00type
= TREE_TYPE (op00
);
16190 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16191 if (TREE_CODE (op00type
) == VECTOR_TYPE
16192 && type
== TREE_TYPE (op00type
))
16194 HOST_WIDE_INT offset
= tree_low_cst (op01
, 0);
16195 tree part_width
= TYPE_SIZE (type
);
16196 unsigned HOST_WIDE_INT part_widthi
= tree_low_cst (part_width
, 0)/BITS_PER_UNIT
;
16197 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
16198 tree index
= bitsize_int (indexi
);
16200 if (offset
/part_widthi
<= TYPE_VECTOR_SUBPARTS (op00type
))
16201 return fold_build3_loc (loc
,
16202 BIT_FIELD_REF
, type
, op00
,
16203 part_width
, index
);
16206 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16207 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
16208 && type
== TREE_TYPE (op00type
))
16210 tree size
= TYPE_SIZE_UNIT (type
);
16211 if (tree_int_cst_equal (size
, op01
))
16212 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
16214 /* ((foo *)&fooarray)[1] => fooarray[1] */
16215 else if (TREE_CODE (op00type
) == ARRAY_TYPE
16216 && type
== TREE_TYPE (op00type
))
16218 tree type_domain
= TYPE_DOMAIN (op00type
);
16219 tree min_val
= size_zero_node
;
16220 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16221 min_val
= TYPE_MIN_VALUE (type_domain
);
16222 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
16223 TYPE_SIZE_UNIT (type
));
16224 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
16225 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
16226 NULL_TREE
, NULL_TREE
);
16231 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16232 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
16233 && type
== TREE_TYPE (TREE_TYPE (subtype
))
16234 && (!in_gimple_form
16235 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16238 tree min_val
= size_zero_node
;
16239 sub
= build_fold_indirect_ref_loc (loc
, sub
);
16240 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
16241 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16242 min_val
= TYPE_MIN_VALUE (type_domain
);
16244 && TREE_CODE (min_val
) != INTEGER_CST
)
16246 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
16253 /* Builds an expression for an indirection through T, simplifying some
16257 build_fold_indirect_ref_loc (location_t loc
, tree t
)
16259 tree type
= TREE_TYPE (TREE_TYPE (t
));
16260 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
16265 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
16268 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16271 fold_indirect_ref_loc (location_t loc
, tree t
)
16273 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
16281 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16282 whose result is ignored. The type of the returned tree need not be
16283 the same as the original expression. */
16286 fold_ignored_result (tree t
)
16288 if (!TREE_SIDE_EFFECTS (t
))
16289 return integer_zero_node
;
16292 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
16295 t
= TREE_OPERAND (t
, 0);
16299 case tcc_comparison
:
16300 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16301 t
= TREE_OPERAND (t
, 0);
16302 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
16303 t
= TREE_OPERAND (t
, 1);
16308 case tcc_expression
:
16309 switch (TREE_CODE (t
))
16311 case COMPOUND_EXPR
:
16312 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16314 t
= TREE_OPERAND (t
, 0);
16318 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
16319 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
16321 t
= TREE_OPERAND (t
, 0);
16334 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16335 This can only be applied to objects of a sizetype. */
16338 round_up_loc (location_t loc
, tree value
, int divisor
)
16340 tree div
= NULL_TREE
;
16342 gcc_assert (divisor
> 0);
16346 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16347 have to do anything. Only do this when we are not given a const,
16348 because in that case, this check is more expensive than just
16350 if (TREE_CODE (value
) != INTEGER_CST
)
16352 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16354 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16358 /* If divisor is a power of two, simplify this to bit manipulation. */
16359 if (divisor
== (divisor
& -divisor
))
16361 if (TREE_CODE (value
) == INTEGER_CST
)
16363 double_int val
= tree_to_double_int (value
);
16366 if ((val
.low
& (divisor
- 1)) == 0)
16369 overflow_p
= TREE_OVERFLOW (value
);
16370 val
.low
&= ~(divisor
- 1);
16371 val
.low
+= divisor
;
16379 return force_fit_type_double (TREE_TYPE (value
), val
,
16386 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16387 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16388 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16389 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16395 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16396 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16397 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16403 /* Likewise, but round down. */
16406 round_down_loc (location_t loc
, tree value
, int divisor
)
16408 tree div
= NULL_TREE
;
16410 gcc_assert (divisor
> 0);
16414 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16415 have to do anything. Only do this when we are not given a const,
16416 because in that case, this check is more expensive than just
16418 if (TREE_CODE (value
) != INTEGER_CST
)
16420 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16422 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16426 /* If divisor is a power of two, simplify this to bit manipulation. */
16427 if (divisor
== (divisor
& -divisor
))
16431 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16432 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16437 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16438 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16439 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16445 /* Returns the pointer to the base of the object addressed by EXP and
16446 extracts the information about the offset of the access, storing it
16447 to PBITPOS and POFFSET. */
16450 split_address_to_core_and_offset (tree exp
,
16451 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16454 enum machine_mode mode
;
16455 int unsignedp
, volatilep
;
16456 HOST_WIDE_INT bitsize
;
16457 location_t loc
= EXPR_LOCATION (exp
);
16459 if (TREE_CODE (exp
) == ADDR_EXPR
)
16461 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16462 poffset
, &mode
, &unsignedp
, &volatilep
,
16464 core
= build_fold_addr_expr_loc (loc
, core
);
16470 *poffset
= NULL_TREE
;
16476 /* Returns true if addresses of E1 and E2 differ by a constant, false
16477 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16480 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16483 HOST_WIDE_INT bitpos1
, bitpos2
;
16484 tree toffset1
, toffset2
, tdiff
, type
;
16486 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16487 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16489 if (bitpos1
% BITS_PER_UNIT
!= 0
16490 || bitpos2
% BITS_PER_UNIT
!= 0
16491 || !operand_equal_p (core1
, core2
, 0))
16494 if (toffset1
&& toffset2
)
16496 type
= TREE_TYPE (toffset1
);
16497 if (type
!= TREE_TYPE (toffset2
))
16498 toffset2
= fold_convert (type
, toffset2
);
16500 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16501 if (!cst_and_fits_in_hwi (tdiff
))
16504 *diff
= int_cst_value (tdiff
);
16506 else if (toffset1
|| toffset2
)
16508 /* If only one of the offsets is non-constant, the difference cannot
16515 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16519 /* Simplify the floating point expression EXP when the sign of the
16520 result is not significant. Return NULL_TREE if no simplification
16524 fold_strip_sign_ops (tree exp
)
16527 location_t loc
= EXPR_LOCATION (exp
);
16529 switch (TREE_CODE (exp
))
16533 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16534 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16538 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
16540 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16541 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16542 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16543 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16544 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16545 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16548 case COMPOUND_EXPR
:
16549 arg0
= TREE_OPERAND (exp
, 0);
16550 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16552 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16556 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16557 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16559 return fold_build3_loc (loc
,
16560 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16561 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16562 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16567 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16570 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16571 /* Strip copysign function call, return the 1st argument. */
16572 arg0
= CALL_EXPR_ARG (exp
, 0);
16573 arg1
= CALL_EXPR_ARG (exp
, 1);
16574 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16577 /* Strip sign ops from the argument of "odd" math functions. */
16578 if (negate_mathfn_p (fcode
))
16580 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16582 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);