1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
53 #include "fold-const.h"
54 #include "stor-layout.h"
56 #include "tree-iterator.h"
58 #include "insn-config.h"
68 #include "diagnostic-core.h"
70 #include "langhooks.h"
72 #include "internal-fn.h"
78 #include "generic-match.h"
79 #include "optabs-query.h"
80 #include "gimple-fold.h"
82 #include "tree-ssa-operands.h"
83 #include "tree-into-ssa.h"
85 #ifndef LOAD_EXTEND_OP
86 #define LOAD_EXTEND_OP(M) UNKNOWN
89 /* Nonzero if we are folding constants inside an initializer; zero
91 int folding_initializer
= 0;
93 /* The following constants represent a bit based encoding of GCC's
94 comparison operators. This encoding simplifies transformations
95 on relational comparison operators, such as AND and OR. */
96 enum comparison_code
{
115 static bool negate_expr_p (tree
);
116 static tree
negate_expr (tree
);
117 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
118 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
119 static enum comparison_code
comparison_to_compcode (enum tree_code
);
120 static enum tree_code
compcode_to_comparison (enum comparison_code
);
121 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
122 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
123 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
124 static tree
make_bit_field_ref (location_t
, tree
, tree
,
125 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
126 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
128 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
130 machine_mode
*, int *, int *,
132 static int simple_operand_p (const_tree
);
133 static bool simple_operand_p_2 (tree
);
134 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
135 static tree
range_predecessor (tree
);
136 static tree
range_successor (tree
);
137 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
138 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
139 static tree
unextend (tree
, int, int, tree
);
140 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
142 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
143 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
144 static tree
fold_binary_op_with_conditional_arg (location_t
,
145 enum tree_code
, tree
,
148 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
149 static bool reorder_operands_p (const_tree
, const_tree
);
150 static tree
fold_negate_const (tree
, tree
);
151 static tree
fold_not_const (const_tree
, tree
);
152 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
153 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
154 static tree
fold_view_convert_expr (tree
, tree
);
155 static bool vec_cst_ctor_to_array (tree
, tree
*);
158 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
159 Otherwise, return LOC. */
162 expr_location_or (tree t
, location_t loc
)
164 location_t tloc
= EXPR_LOCATION (t
);
165 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
168 /* Similar to protected_set_expr_location, but never modify x in place,
169 if location can and needs to be set, unshare it. */
172 protected_set_expr_location_unshare (tree x
, location_t loc
)
174 if (CAN_HAVE_LOCATION_P (x
)
175 && EXPR_LOCATION (x
) != loc
176 && !(TREE_CODE (x
) == SAVE_EXPR
177 || TREE_CODE (x
) == TARGET_EXPR
178 || TREE_CODE (x
) == BIND_EXPR
))
181 SET_EXPR_LOCATION (x
, loc
);
186 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
187 division and returns the quotient. Otherwise returns
191 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
195 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
197 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
202 /* This is nonzero if we should defer warnings about undefined
203 overflow. This facility exists because these warnings are a
204 special case. The code to estimate loop iterations does not want
205 to issue any warnings, since it works with expressions which do not
206 occur in user code. Various bits of cleanup code call fold(), but
207 only use the result if it has certain characteristics (e.g., is a
208 constant); that code only wants to issue a warning if the result is
211 static int fold_deferring_overflow_warnings
;
213 /* If a warning about undefined overflow is deferred, this is the
214 warning. Note that this may cause us to turn two warnings into
215 one, but that is fine since it is sufficient to only give one
216 warning per expression. */
218 static const char* fold_deferred_overflow_warning
;
220 /* If a warning about undefined overflow is deferred, this is the
221 level at which the warning should be emitted. */
223 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
225 /* Start deferring overflow warnings. We could use a stack here to
226 permit nested calls, but at present it is not necessary. */
229 fold_defer_overflow_warnings (void)
231 ++fold_deferring_overflow_warnings
;
234 /* Stop deferring overflow warnings. If there is a pending warning,
235 and ISSUE is true, then issue the warning if appropriate. STMT is
236 the statement with which the warning should be associated (used for
237 location information); STMT may be NULL. CODE is the level of the
238 warning--a warn_strict_overflow_code value. This function will use
239 the smaller of CODE and the deferred code when deciding whether to
240 issue the warning. CODE may be zero to mean to always use the
244 fold_undefer_overflow_warnings (bool issue
, const gimple
*stmt
, int code
)
249 gcc_assert (fold_deferring_overflow_warnings
> 0);
250 --fold_deferring_overflow_warnings
;
251 if (fold_deferring_overflow_warnings
> 0)
253 if (fold_deferred_overflow_warning
!= NULL
255 && code
< (int) fold_deferred_overflow_code
)
256 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
260 warnmsg
= fold_deferred_overflow_warning
;
261 fold_deferred_overflow_warning
= NULL
;
263 if (!issue
|| warnmsg
== NULL
)
266 if (gimple_no_warning_p (stmt
))
269 /* Use the smallest code level when deciding to issue the
271 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
272 code
= fold_deferred_overflow_code
;
274 if (!issue_strict_overflow_warning (code
))
278 locus
= input_location
;
280 locus
= gimple_location (stmt
);
281 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
284 /* Stop deferring overflow warnings, ignoring any deferred
288 fold_undefer_and_ignore_overflow_warnings (void)
290 fold_undefer_overflow_warnings (false, NULL
, 0);
293 /* Whether we are deferring overflow warnings. */
296 fold_deferring_overflow_warnings_p (void)
298 return fold_deferring_overflow_warnings
> 0;
301 /* This is called when we fold something based on the fact that signed
302 overflow is undefined. */
305 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
307 if (fold_deferring_overflow_warnings
> 0)
309 if (fold_deferred_overflow_warning
== NULL
310 || wc
< fold_deferred_overflow_code
)
312 fold_deferred_overflow_warning
= gmsgid
;
313 fold_deferred_overflow_code
= wc
;
316 else if (issue_strict_overflow_warning (wc
))
317 warning (OPT_Wstrict_overflow
, gmsgid
);
320 /* Return true if the built-in mathematical function specified by CODE
321 is odd, i.e. -f(x) == f(-x). */
324 negate_mathfn_p (enum built_in_function code
)
328 CASE_FLT_FN (BUILT_IN_ASIN
):
329 CASE_FLT_FN (BUILT_IN_ASINH
):
330 CASE_FLT_FN (BUILT_IN_ATAN
):
331 CASE_FLT_FN (BUILT_IN_ATANH
):
332 CASE_FLT_FN (BUILT_IN_CASIN
):
333 CASE_FLT_FN (BUILT_IN_CASINH
):
334 CASE_FLT_FN (BUILT_IN_CATAN
):
335 CASE_FLT_FN (BUILT_IN_CATANH
):
336 CASE_FLT_FN (BUILT_IN_CBRT
):
337 CASE_FLT_FN (BUILT_IN_CPROJ
):
338 CASE_FLT_FN (BUILT_IN_CSIN
):
339 CASE_FLT_FN (BUILT_IN_CSINH
):
340 CASE_FLT_FN (BUILT_IN_CTAN
):
341 CASE_FLT_FN (BUILT_IN_CTANH
):
342 CASE_FLT_FN (BUILT_IN_ERF
):
343 CASE_FLT_FN (BUILT_IN_LLROUND
):
344 CASE_FLT_FN (BUILT_IN_LROUND
):
345 CASE_FLT_FN (BUILT_IN_ROUND
):
346 CASE_FLT_FN (BUILT_IN_SIN
):
347 CASE_FLT_FN (BUILT_IN_SINH
):
348 CASE_FLT_FN (BUILT_IN_TAN
):
349 CASE_FLT_FN (BUILT_IN_TANH
):
350 CASE_FLT_FN (BUILT_IN_TRUNC
):
353 CASE_FLT_FN (BUILT_IN_LLRINT
):
354 CASE_FLT_FN (BUILT_IN_LRINT
):
355 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
356 CASE_FLT_FN (BUILT_IN_RINT
):
357 return !flag_rounding_math
;
365 /* Check whether we may negate an integer constant T without causing
369 may_negate_without_overflow_p (const_tree t
)
373 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
375 type
= TREE_TYPE (t
);
376 if (TYPE_UNSIGNED (type
))
379 return !wi::only_sign_bit_p (t
);
382 /* Determine whether an expression T can be cheaply negated using
383 the function negate_expr without introducing undefined overflow. */
386 negate_expr_p (tree t
)
393 type
= TREE_TYPE (t
);
396 switch (TREE_CODE (t
))
399 if (INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_WRAPS (type
))
402 /* Check that -CST will not overflow type. */
403 return may_negate_without_overflow_p (t
);
405 return (INTEGRAL_TYPE_P (type
)
406 && TYPE_OVERFLOW_WRAPS (type
));
412 return !TYPE_OVERFLOW_SANITIZED (type
);
415 /* We want to canonicalize to positive real constants. Pretend
416 that only negative ones can be easily negated. */
417 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
420 return negate_expr_p (TREE_REALPART (t
))
421 && negate_expr_p (TREE_IMAGPART (t
));
425 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
428 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
430 for (i
= 0; i
< count
; i
++)
431 if (!negate_expr_p (VECTOR_CST_ELT (t
, i
)))
438 return negate_expr_p (TREE_OPERAND (t
, 0))
439 && negate_expr_p (TREE_OPERAND (t
, 1));
442 return negate_expr_p (TREE_OPERAND (t
, 0));
445 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
446 || HONOR_SIGNED_ZEROS (element_mode (type
)))
448 /* -(A + B) -> (-B) - A. */
449 if (negate_expr_p (TREE_OPERAND (t
, 1))
450 && reorder_operands_p (TREE_OPERAND (t
, 0),
451 TREE_OPERAND (t
, 1)))
453 /* -(A + B) -> (-A) - B. */
454 return negate_expr_p (TREE_OPERAND (t
, 0));
457 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
458 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
459 && !HONOR_SIGNED_ZEROS (element_mode (type
))
460 && reorder_operands_p (TREE_OPERAND (t
, 0),
461 TREE_OPERAND (t
, 1));
464 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
470 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t
))))
471 return negate_expr_p (TREE_OPERAND (t
, 1))
472 || negate_expr_p (TREE_OPERAND (t
, 0));
478 /* In general we can't negate A / B, because if A is INT_MIN and
479 B is 1, we may turn this into INT_MIN / -1 which is undefined
480 and actually traps on some architectures. But if overflow is
481 undefined, we can negate, because - (INT_MIN / 1) is an
483 if (INTEGRAL_TYPE_P (TREE_TYPE (t
)))
485 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
487 /* If overflow is undefined then we have to be careful because
488 we ask whether it's ok to associate the negate with the
489 division which is not ok for example for
490 -((a - b) / c) where (-(a - b)) / c may invoke undefined
491 overflow because of negating INT_MIN. So do not use
492 negate_expr_p here but open-code the two important cases. */
493 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NEGATE_EXPR
494 || (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
495 && may_negate_without_overflow_p (TREE_OPERAND (t
, 0))))
498 else if (negate_expr_p (TREE_OPERAND (t
, 0)))
500 return negate_expr_p (TREE_OPERAND (t
, 1));
503 /* Negate -((double)float) as (double)(-float). */
504 if (TREE_CODE (type
) == REAL_TYPE
)
506 tree tem
= strip_float_extensions (t
);
508 return negate_expr_p (tem
);
513 /* Negate -f(x) as f(-x). */
514 if (negate_mathfn_p (builtin_mathfn_code (t
)))
515 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
519 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
520 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
522 tree op1
= TREE_OPERAND (t
, 1);
523 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
534 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
535 simplification is possible.
536 If negate_expr_p would return true for T, NULL_TREE will never be
540 fold_negate_expr (location_t loc
, tree t
)
542 tree type
= TREE_TYPE (t
);
545 switch (TREE_CODE (t
))
547 /* Convert - (~A) to A + 1. */
549 if (INTEGRAL_TYPE_P (type
))
550 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
551 build_one_cst (type
));
555 tem
= fold_negate_const (t
, type
);
556 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
557 || (ANY_INTEGRAL_TYPE_P (type
)
558 && !TYPE_OVERFLOW_TRAPS (type
)
559 && TYPE_OVERFLOW_WRAPS (type
))
560 || (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
565 tem
= fold_negate_const (t
, type
);
569 tem
= fold_negate_const (t
, type
);
574 tree rpart
= fold_negate_expr (loc
, TREE_REALPART (t
));
575 tree ipart
= fold_negate_expr (loc
, TREE_IMAGPART (t
));
577 return build_complex (type
, rpart
, ipart
);
583 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
584 tree
*elts
= XALLOCAVEC (tree
, count
);
586 for (i
= 0; i
< count
; i
++)
588 elts
[i
] = fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
589 if (elts
[i
] == NULL_TREE
)
593 return build_vector (type
, elts
);
597 if (negate_expr_p (t
))
598 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
599 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
600 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
604 if (negate_expr_p (t
))
605 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
606 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
610 if (!TYPE_OVERFLOW_SANITIZED (type
))
611 return TREE_OPERAND (t
, 0);
615 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
616 && !HONOR_SIGNED_ZEROS (element_mode (type
)))
618 /* -(A + B) -> (-B) - A. */
619 if (negate_expr_p (TREE_OPERAND (t
, 1))
620 && reorder_operands_p (TREE_OPERAND (t
, 0),
621 TREE_OPERAND (t
, 1)))
623 tem
= negate_expr (TREE_OPERAND (t
, 1));
624 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
625 tem
, TREE_OPERAND (t
, 0));
628 /* -(A + B) -> (-A) - B. */
629 if (negate_expr_p (TREE_OPERAND (t
, 0)))
631 tem
= negate_expr (TREE_OPERAND (t
, 0));
632 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
633 tem
, TREE_OPERAND (t
, 1));
639 /* - (A - B) -> B - A */
640 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
641 && !HONOR_SIGNED_ZEROS (element_mode (type
))
642 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
643 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
644 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
648 if (TYPE_UNSIGNED (type
))
654 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
)))
656 tem
= TREE_OPERAND (t
, 1);
657 if (negate_expr_p (tem
))
658 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
659 TREE_OPERAND (t
, 0), negate_expr (tem
));
660 tem
= TREE_OPERAND (t
, 0);
661 if (negate_expr_p (tem
))
662 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
663 negate_expr (tem
), TREE_OPERAND (t
, 1));
670 /* In general we can't negate A / B, because if A is INT_MIN and
671 B is 1, we may turn this into INT_MIN / -1 which is undefined
672 and actually traps on some architectures. But if overflow is
673 undefined, we can negate, because - (INT_MIN / 1) is an
675 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
677 const char * const warnmsg
= G_("assuming signed overflow does not "
678 "occur when negating a division");
679 tem
= TREE_OPERAND (t
, 1);
680 if (negate_expr_p (tem
))
682 if (INTEGRAL_TYPE_P (type
)
683 && (TREE_CODE (tem
) != INTEGER_CST
684 || integer_onep (tem
)))
685 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
686 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
687 TREE_OPERAND (t
, 0), negate_expr (tem
));
689 /* If overflow is undefined then we have to be careful because
690 we ask whether it's ok to associate the negate with the
691 division which is not ok for example for
692 -((a - b) / c) where (-(a - b)) / c may invoke undefined
693 overflow because of negating INT_MIN. So do not use
694 negate_expr_p here but open-code the two important cases. */
695 tem
= TREE_OPERAND (t
, 0);
696 if ((INTEGRAL_TYPE_P (type
)
697 && (TREE_CODE (tem
) == NEGATE_EXPR
698 || (TREE_CODE (tem
) == INTEGER_CST
699 && may_negate_without_overflow_p (tem
))))
700 || !INTEGRAL_TYPE_P (type
))
701 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
702 negate_expr (tem
), TREE_OPERAND (t
, 1));
707 /* Convert -((double)float) into (double)(-float). */
708 if (TREE_CODE (type
) == REAL_TYPE
)
710 tem
= strip_float_extensions (t
);
711 if (tem
!= t
&& negate_expr_p (tem
))
712 return fold_convert_loc (loc
, type
, negate_expr (tem
));
717 /* Negate -f(x) as f(-x). */
718 if (negate_mathfn_p (builtin_mathfn_code (t
))
719 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
723 fndecl
= get_callee_fndecl (t
);
724 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
725 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
730 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
731 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
733 tree op1
= TREE_OPERAND (t
, 1);
734 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
736 tree ntype
= TYPE_UNSIGNED (type
)
737 ? signed_type_for (type
)
738 : unsigned_type_for (type
);
739 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
740 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
741 return fold_convert_loc (loc
, type
, temp
);
753 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
754 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
766 loc
= EXPR_LOCATION (t
);
767 type
= TREE_TYPE (t
);
770 tem
= fold_negate_expr (loc
, t
);
772 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
773 return fold_convert_loc (loc
, type
, tem
);
776 /* Split a tree IN into a constant, literal and variable parts that could be
777 combined with CODE to make IN. "constant" means an expression with
778 TREE_CONSTANT but that isn't an actual constant. CODE must be a
779 commutative arithmetic operation. Store the constant part into *CONP,
780 the literal in *LITP and return the variable part. If a part isn't
781 present, set it to null. If the tree does not decompose in this way,
782 return the entire tree as the variable part and the other parts as null.
784 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
785 case, we negate an operand that was subtracted. Except if it is a
786 literal for which we use *MINUS_LITP instead.
788 If NEGATE_P is true, we are negating all of IN, again except a literal
789 for which we use *MINUS_LITP instead.
791 If IN is itself a literal or constant, return it as appropriate.
793 Note that we do not guarantee that any of the three values will be the
794 same type as IN, but they will have the same signedness and mode. */
797 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
798 tree
*minus_litp
, int negate_p
)
806 /* Strip any conversions that don't change the machine mode or signedness. */
807 STRIP_SIGN_NOPS (in
);
809 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
810 || TREE_CODE (in
) == FIXED_CST
)
812 else if (TREE_CODE (in
) == code
813 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
814 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
815 /* We can associate addition and subtraction together (even
816 though the C standard doesn't say so) for integers because
817 the value is not affected. For reals, the value might be
818 affected, so we can't. */
819 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
820 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
822 tree op0
= TREE_OPERAND (in
, 0);
823 tree op1
= TREE_OPERAND (in
, 1);
824 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
825 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
827 /* First see if either of the operands is a literal, then a constant. */
828 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
829 || TREE_CODE (op0
) == FIXED_CST
)
830 *litp
= op0
, op0
= 0;
831 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
832 || TREE_CODE (op1
) == FIXED_CST
)
833 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
835 if (op0
!= 0 && TREE_CONSTANT (op0
))
836 *conp
= op0
, op0
= 0;
837 else if (op1
!= 0 && TREE_CONSTANT (op1
))
838 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
840 /* If we haven't dealt with either operand, this is not a case we can
841 decompose. Otherwise, VAR is either of the ones remaining, if any. */
842 if (op0
!= 0 && op1
!= 0)
847 var
= op1
, neg_var_p
= neg1_p
;
849 /* Now do any needed negations. */
851 *minus_litp
= *litp
, *litp
= 0;
853 *conp
= negate_expr (*conp
);
855 var
= negate_expr (var
);
857 else if (TREE_CODE (in
) == BIT_NOT_EXPR
858 && code
== PLUS_EXPR
)
860 /* -X - 1 is folded to ~X, undo that here. */
861 *minus_litp
= build_one_cst (TREE_TYPE (in
));
862 var
= negate_expr (TREE_OPERAND (in
, 0));
864 else if (TREE_CONSTANT (in
))
872 *minus_litp
= *litp
, *litp
= 0;
873 else if (*minus_litp
)
874 *litp
= *minus_litp
, *minus_litp
= 0;
875 *conp
= negate_expr (*conp
);
876 var
= negate_expr (var
);
882 /* Re-associate trees split by the above function. T1 and T2 are
883 either expressions to associate or null. Return the new
884 expression, if any. LOC is the location of the new expression. If
885 we build an operation, do it in TYPE and with CODE. */
888 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
895 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
896 try to fold this since we will have infinite recursion. But do
897 deal with any NEGATE_EXPRs. */
898 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
899 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
901 if (code
== PLUS_EXPR
)
903 if (TREE_CODE (t1
) == NEGATE_EXPR
)
904 return build2_loc (loc
, MINUS_EXPR
, type
,
905 fold_convert_loc (loc
, type
, t2
),
906 fold_convert_loc (loc
, type
,
907 TREE_OPERAND (t1
, 0)));
908 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
909 return build2_loc (loc
, MINUS_EXPR
, type
,
910 fold_convert_loc (loc
, type
, t1
),
911 fold_convert_loc (loc
, type
,
912 TREE_OPERAND (t2
, 0)));
913 else if (integer_zerop (t2
))
914 return fold_convert_loc (loc
, type
, t1
);
916 else if (code
== MINUS_EXPR
)
918 if (integer_zerop (t2
))
919 return fold_convert_loc (loc
, type
, t1
);
922 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
923 fold_convert_loc (loc
, type
, t2
));
926 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
927 fold_convert_loc (loc
, type
, t2
));
930 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
931 for use in int_const_binop, size_binop and size_diffop. */
934 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
936 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
938 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
953 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
954 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
955 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
959 /* Combine two integer constants ARG1 and ARG2 under operation CODE
960 to produce a new constant. Return NULL_TREE if we don't know how
961 to evaluate CODE at compile-time. */
964 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree parg2
,
969 tree type
= TREE_TYPE (arg1
);
970 signop sign
= TYPE_SIGN (type
);
971 bool overflow
= false;
973 wide_int arg2
= wide_int::from (parg2
, TYPE_PRECISION (type
),
974 TYPE_SIGN (TREE_TYPE (parg2
)));
979 res
= wi::bit_or (arg1
, arg2
);
983 res
= wi::bit_xor (arg1
, arg2
);
987 res
= wi::bit_and (arg1
, arg2
);
992 if (wi::neg_p (arg2
))
995 if (code
== RSHIFT_EXPR
)
1001 if (code
== RSHIFT_EXPR
)
1002 /* It's unclear from the C standard whether shifts can overflow.
1003 The following code ignores overflow; perhaps a C standard
1004 interpretation ruling is needed. */
1005 res
= wi::rshift (arg1
, arg2
, sign
);
1007 res
= wi::lshift (arg1
, arg2
);
1012 if (wi::neg_p (arg2
))
1015 if (code
== RROTATE_EXPR
)
1016 code
= LROTATE_EXPR
;
1018 code
= RROTATE_EXPR
;
1021 if (code
== RROTATE_EXPR
)
1022 res
= wi::rrotate (arg1
, arg2
);
1024 res
= wi::lrotate (arg1
, arg2
);
1028 res
= wi::add (arg1
, arg2
, sign
, &overflow
);
1032 res
= wi::sub (arg1
, arg2
, sign
, &overflow
);
1036 res
= wi::mul (arg1
, arg2
, sign
, &overflow
);
1039 case MULT_HIGHPART_EXPR
:
1040 res
= wi::mul_high (arg1
, arg2
, sign
);
1043 case TRUNC_DIV_EXPR
:
1044 case EXACT_DIV_EXPR
:
1047 res
= wi::div_trunc (arg1
, arg2
, sign
, &overflow
);
1050 case FLOOR_DIV_EXPR
:
1053 res
= wi::div_floor (arg1
, arg2
, sign
, &overflow
);
1059 res
= wi::div_ceil (arg1
, arg2
, sign
, &overflow
);
1062 case ROUND_DIV_EXPR
:
1065 res
= wi::div_round (arg1
, arg2
, sign
, &overflow
);
1068 case TRUNC_MOD_EXPR
:
1071 res
= wi::mod_trunc (arg1
, arg2
, sign
, &overflow
);
1074 case FLOOR_MOD_EXPR
:
1077 res
= wi::mod_floor (arg1
, arg2
, sign
, &overflow
);
1083 res
= wi::mod_ceil (arg1
, arg2
, sign
, &overflow
);
1086 case ROUND_MOD_EXPR
:
1089 res
= wi::mod_round (arg1
, arg2
, sign
, &overflow
);
1093 res
= wi::min (arg1
, arg2
, sign
);
1097 res
= wi::max (arg1
, arg2
, sign
);
1104 t
= force_fit_type (type
, res
, overflowable
,
1105 (((sign
== SIGNED
|| overflowable
== -1)
1107 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (parg2
)));
1113 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1115 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1118 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1119 constant. We assume ARG1 and ARG2 have the same data type, or at least
1120 are the same kind of constant and the same machine mode. Return zero if
1121 combining the constants is not allowed in the current operating mode. */
1124 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1126 /* Sanity check for the recursive cases. */
1133 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg2
) == INTEGER_CST
)
1135 if (code
== POINTER_PLUS_EXPR
)
1136 return int_const_binop (PLUS_EXPR
,
1137 arg1
, fold_convert (TREE_TYPE (arg1
), arg2
));
1139 return int_const_binop (code
, arg1
, arg2
);
1142 if (TREE_CODE (arg1
) == REAL_CST
&& TREE_CODE (arg2
) == REAL_CST
)
1147 REAL_VALUE_TYPE value
;
1148 REAL_VALUE_TYPE result
;
1152 /* The following codes are handled by real_arithmetic. */
1167 d1
= TREE_REAL_CST (arg1
);
1168 d2
= TREE_REAL_CST (arg2
);
1170 type
= TREE_TYPE (arg1
);
1171 mode
= TYPE_MODE (type
);
1173 /* Don't perform operation if we honor signaling NaNs and
1174 either operand is a NaN. */
1175 if (HONOR_SNANS (mode
)
1176 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1179 /* Don't perform operation if it would raise a division
1180 by zero exception. */
1181 if (code
== RDIV_EXPR
1182 && real_equal (&d2
, &dconst0
)
1183 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1186 /* If either operand is a NaN, just return it. Otherwise, set up
1187 for floating-point trap; we return an overflow. */
1188 if (REAL_VALUE_ISNAN (d1
))
1190 else if (REAL_VALUE_ISNAN (d2
))
1193 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1194 real_convert (&result
, mode
, &value
);
1196 /* Don't constant fold this floating point operation if
1197 the result has overflowed and flag_trapping_math. */
1198 if (flag_trapping_math
1199 && MODE_HAS_INFINITIES (mode
)
1200 && REAL_VALUE_ISINF (result
)
1201 && !REAL_VALUE_ISINF (d1
)
1202 && !REAL_VALUE_ISINF (d2
))
1205 /* Don't constant fold this floating point operation if the
1206 result may dependent upon the run-time rounding mode and
1207 flag_rounding_math is set, or if GCC's software emulation
1208 is unable to accurately represent the result. */
1209 if ((flag_rounding_math
1210 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1211 && (inexact
|| !real_identical (&result
, &value
)))
1214 t
= build_real (type
, result
);
1216 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1220 if (TREE_CODE (arg1
) == FIXED_CST
)
1222 FIXED_VALUE_TYPE f1
;
1223 FIXED_VALUE_TYPE f2
;
1224 FIXED_VALUE_TYPE result
;
1229 /* The following codes are handled by fixed_arithmetic. */
1235 case TRUNC_DIV_EXPR
:
1236 if (TREE_CODE (arg2
) != FIXED_CST
)
1238 f2
= TREE_FIXED_CST (arg2
);
1244 if (TREE_CODE (arg2
) != INTEGER_CST
)
1247 f2
.data
.high
= w2
.elt (1);
1248 f2
.data
.low
= w2
.elt (0);
1257 f1
= TREE_FIXED_CST (arg1
);
1258 type
= TREE_TYPE (arg1
);
1259 sat_p
= TYPE_SATURATING (type
);
1260 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1261 t
= build_fixed (type
, result
);
1262 /* Propagate overflow flags. */
1263 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1264 TREE_OVERFLOW (t
) = 1;
1268 if (TREE_CODE (arg1
) == COMPLEX_CST
&& TREE_CODE (arg2
) == COMPLEX_CST
)
1270 tree type
= TREE_TYPE (arg1
);
1271 tree r1
= TREE_REALPART (arg1
);
1272 tree i1
= TREE_IMAGPART (arg1
);
1273 tree r2
= TREE_REALPART (arg2
);
1274 tree i2
= TREE_IMAGPART (arg2
);
1281 real
= const_binop (code
, r1
, r2
);
1282 imag
= const_binop (code
, i1
, i2
);
1286 if (COMPLEX_FLOAT_TYPE_P (type
))
1287 return do_mpc_arg2 (arg1
, arg2
, type
,
1288 /* do_nonfinite= */ folding_initializer
,
1291 real
= const_binop (MINUS_EXPR
,
1292 const_binop (MULT_EXPR
, r1
, r2
),
1293 const_binop (MULT_EXPR
, i1
, i2
));
1294 imag
= const_binop (PLUS_EXPR
,
1295 const_binop (MULT_EXPR
, r1
, i2
),
1296 const_binop (MULT_EXPR
, i1
, r2
));
1300 if (COMPLEX_FLOAT_TYPE_P (type
))
1301 return do_mpc_arg2 (arg1
, arg2
, type
,
1302 /* do_nonfinite= */ folding_initializer
,
1305 case TRUNC_DIV_EXPR
:
1307 case FLOOR_DIV_EXPR
:
1308 case ROUND_DIV_EXPR
:
1309 if (flag_complex_method
== 0)
1311 /* Keep this algorithm in sync with
1312 tree-complex.c:expand_complex_div_straight().
1314 Expand complex division to scalars, straightforward algorithm.
1315 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1319 = const_binop (PLUS_EXPR
,
1320 const_binop (MULT_EXPR
, r2
, r2
),
1321 const_binop (MULT_EXPR
, i2
, i2
));
1323 = const_binop (PLUS_EXPR
,
1324 const_binop (MULT_EXPR
, r1
, r2
),
1325 const_binop (MULT_EXPR
, i1
, i2
));
1327 = const_binop (MINUS_EXPR
,
1328 const_binop (MULT_EXPR
, i1
, r2
),
1329 const_binop (MULT_EXPR
, r1
, i2
));
1331 real
= const_binop (code
, t1
, magsquared
);
1332 imag
= const_binop (code
, t2
, magsquared
);
1336 /* Keep this algorithm in sync with
1337 tree-complex.c:expand_complex_div_wide().
1339 Expand complex division to scalars, modified algorithm to minimize
1340 overflow with wide input ranges. */
1341 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1342 fold_abs_const (r2
, TREE_TYPE (type
)),
1343 fold_abs_const (i2
, TREE_TYPE (type
)));
1345 if (integer_nonzerop (compare
))
1347 /* In the TRUE branch, we compute
1349 div = (br * ratio) + bi;
1350 tr = (ar * ratio) + ai;
1351 ti = (ai * ratio) - ar;
1354 tree ratio
= const_binop (code
, r2
, i2
);
1355 tree div
= const_binop (PLUS_EXPR
, i2
,
1356 const_binop (MULT_EXPR
, r2
, ratio
));
1357 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1358 real
= const_binop (PLUS_EXPR
, real
, i1
);
1359 real
= const_binop (code
, real
, div
);
1361 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1362 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1363 imag
= const_binop (code
, imag
, div
);
1367 /* In the FALSE branch, we compute
1369 divisor = (d * ratio) + c;
1370 tr = (b * ratio) + a;
1371 ti = b - (a * ratio);
1374 tree ratio
= const_binop (code
, i2
, r2
);
1375 tree div
= const_binop (PLUS_EXPR
, r2
,
1376 const_binop (MULT_EXPR
, i2
, ratio
));
1378 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1379 real
= const_binop (PLUS_EXPR
, real
, r1
);
1380 real
= const_binop (code
, real
, div
);
1382 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1383 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1384 imag
= const_binop (code
, imag
, div
);
1394 return build_complex (type
, real
, imag
);
1397 if (TREE_CODE (arg1
) == VECTOR_CST
1398 && TREE_CODE (arg2
) == VECTOR_CST
)
1400 tree type
= TREE_TYPE (arg1
);
1401 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1402 tree
*elts
= XALLOCAVEC (tree
, count
);
1404 for (i
= 0; i
< count
; i
++)
1406 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1407 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1409 elts
[i
] = const_binop (code
, elem1
, elem2
);
1411 /* It is possible that const_binop cannot handle the given
1412 code and return NULL_TREE */
1413 if (elts
[i
] == NULL_TREE
)
1417 return build_vector (type
, elts
);
1420 /* Shifts allow a scalar offset for a vector. */
1421 if (TREE_CODE (arg1
) == VECTOR_CST
1422 && TREE_CODE (arg2
) == INTEGER_CST
)
1424 tree type
= TREE_TYPE (arg1
);
1425 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1426 tree
*elts
= XALLOCAVEC (tree
, count
);
1428 for (i
= 0; i
< count
; i
++)
1430 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1432 elts
[i
] = const_binop (code
, elem1
, arg2
);
1434 /* It is possible that const_binop cannot handle the given
1435 code and return NULL_TREE. */
1436 if (elts
[i
] == NULL_TREE
)
1440 return build_vector (type
, elts
);
1445 /* Overload that adds a TYPE parameter to be able to dispatch
1446 to fold_relational_const. */
1449 const_binop (enum tree_code code
, tree type
, tree arg1
, tree arg2
)
1451 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1452 return fold_relational_const (code
, type
, arg1
, arg2
);
1454 /* ??? Until we make the const_binop worker take the type of the
1455 result as argument put those cases that need it here. */
1459 if ((TREE_CODE (arg1
) == REAL_CST
1460 && TREE_CODE (arg2
) == REAL_CST
)
1461 || (TREE_CODE (arg1
) == INTEGER_CST
1462 && TREE_CODE (arg2
) == INTEGER_CST
))
1463 return build_complex (type
, arg1
, arg2
);
1466 case VEC_PACK_TRUNC_EXPR
:
1467 case VEC_PACK_FIX_TRUNC_EXPR
:
1469 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
1472 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2
1473 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
)) == nelts
/ 2);
1474 if (TREE_CODE (arg1
) != VECTOR_CST
1475 || TREE_CODE (arg2
) != VECTOR_CST
)
1478 elts
= XALLOCAVEC (tree
, nelts
);
1479 if (!vec_cst_ctor_to_array (arg1
, elts
)
1480 || !vec_cst_ctor_to_array (arg2
, elts
+ nelts
/ 2))
1483 for (i
= 0; i
< nelts
; i
++)
1485 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
1486 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
1487 TREE_TYPE (type
), elts
[i
]);
1488 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
1492 return build_vector (type
, elts
);
1495 case VEC_WIDEN_MULT_LO_EXPR
:
1496 case VEC_WIDEN_MULT_HI_EXPR
:
1497 case VEC_WIDEN_MULT_EVEN_EXPR
:
1498 case VEC_WIDEN_MULT_ODD_EXPR
:
1500 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
1501 unsigned int out
, ofs
, scale
;
1504 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2
1505 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
)) == nelts
* 2);
1506 if (TREE_CODE (arg1
) != VECTOR_CST
|| TREE_CODE (arg2
) != VECTOR_CST
)
1509 elts
= XALLOCAVEC (tree
, nelts
* 4);
1510 if (!vec_cst_ctor_to_array (arg1
, elts
)
1511 || !vec_cst_ctor_to_array (arg2
, elts
+ nelts
* 2))
1514 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
1515 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
1516 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
1517 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
1518 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
1520 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1523 for (out
= 0; out
< nelts
; out
++)
1525 unsigned int in1
= (out
<< scale
) + ofs
;
1526 unsigned int in2
= in1
+ nelts
* 2;
1529 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
1530 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
1532 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
1534 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
1535 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
1539 return build_vector (type
, elts
);
1545 if (TREE_CODE_CLASS (code
) != tcc_binary
)
1548 /* Make sure type and arg0 have the same saturating flag. */
1549 gcc_checking_assert (TYPE_SATURATING (type
)
1550 == TYPE_SATURATING (TREE_TYPE (arg1
)));
1552 return const_binop (code
, arg1
, arg2
);
1555 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1556 Return zero if computing the constants is not possible. */
1559 const_unop (enum tree_code code
, tree type
, tree arg0
)
1565 case FIX_TRUNC_EXPR
:
1566 case FIXED_CONVERT_EXPR
:
1567 return fold_convert_const (code
, type
, arg0
);
1569 case ADDR_SPACE_CONVERT_EXPR
:
1570 if (integer_zerop (arg0
))
1571 return fold_convert_const (code
, type
, arg0
);
1574 case VIEW_CONVERT_EXPR
:
1575 return fold_view_convert_expr (type
, arg0
);
1579 /* Can't call fold_negate_const directly here as that doesn't
1580 handle all cases and we might not be able to negate some
1582 tree tem
= fold_negate_expr (UNKNOWN_LOCATION
, arg0
);
1583 if (tem
&& CONSTANT_CLASS_P (tem
))
1589 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
1590 return fold_abs_const (arg0
, type
);
1594 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1596 tree ipart
= fold_negate_const (TREE_IMAGPART (arg0
),
1598 return build_complex (type
, TREE_REALPART (arg0
), ipart
);
1603 if (TREE_CODE (arg0
) == INTEGER_CST
)
1604 return fold_not_const (arg0
, type
);
1605 /* Perform BIT_NOT_EXPR on each element individually. */
1606 else if (TREE_CODE (arg0
) == VECTOR_CST
)
1610 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
1612 elements
= XALLOCAVEC (tree
, count
);
1613 for (i
= 0; i
< count
; i
++)
1615 elem
= VECTOR_CST_ELT (arg0
, i
);
1616 elem
= const_unop (BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
1617 if (elem
== NULL_TREE
)
1622 return build_vector (type
, elements
);
1626 case TRUTH_NOT_EXPR
:
1627 if (TREE_CODE (arg0
) == INTEGER_CST
)
1628 return constant_boolean_node (integer_zerop (arg0
), type
);
1632 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1633 return fold_convert (type
, TREE_REALPART (arg0
));
1637 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1638 return fold_convert (type
, TREE_IMAGPART (arg0
));
1641 case VEC_UNPACK_LO_EXPR
:
1642 case VEC_UNPACK_HI_EXPR
:
1643 case VEC_UNPACK_FLOAT_LO_EXPR
:
1644 case VEC_UNPACK_FLOAT_HI_EXPR
:
1646 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
1648 enum tree_code subcode
;
1650 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
1651 if (TREE_CODE (arg0
) != VECTOR_CST
)
1654 elts
= XALLOCAVEC (tree
, nelts
* 2);
1655 if (!vec_cst_ctor_to_array (arg0
, elts
))
1658 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
1659 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
1662 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
1665 subcode
= FLOAT_EXPR
;
1667 for (i
= 0; i
< nelts
; i
++)
1669 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
1670 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
1674 return build_vector (type
, elts
);
1677 case REDUC_MIN_EXPR
:
1678 case REDUC_MAX_EXPR
:
1679 case REDUC_PLUS_EXPR
:
1681 unsigned int nelts
, i
;
1683 enum tree_code subcode
;
1685 if (TREE_CODE (arg0
) != VECTOR_CST
)
1687 nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
));
1689 elts
= XALLOCAVEC (tree
, nelts
);
1690 if (!vec_cst_ctor_to_array (arg0
, elts
))
1695 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
1696 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
1697 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
1698 default: gcc_unreachable ();
1701 for (i
= 1; i
< nelts
; i
++)
1703 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
1704 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
1718 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1719 indicates which particular sizetype to create. */
1722 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1724 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1727 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1728 is a tree code. The type of the result is taken from the operands.
1729 Both must be equivalent integer types, ala int_binop_types_match_p.
1730 If the operands are constant, so is the result. */
1733 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1735 tree type
= TREE_TYPE (arg0
);
1737 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1738 return error_mark_node
;
1740 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1743 /* Handle the special case of two integer constants faster. */
1744 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1746 /* And some specific cases even faster than that. */
1747 if (code
== PLUS_EXPR
)
1749 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1751 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1754 else if (code
== MINUS_EXPR
)
1756 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1759 else if (code
== MULT_EXPR
)
1761 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1765 /* Handle general case of two integer constants. For sizetype
1766 constant calculations we always want to know about overflow,
1767 even in the unsigned case. */
1768 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1771 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1774 /* Given two values, either both of sizetype or both of bitsizetype,
1775 compute the difference between the two values. Return the value
1776 in signed type corresponding to the type of the operands. */
1779 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1781 tree type
= TREE_TYPE (arg0
);
1784 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1787 /* If the type is already signed, just do the simple thing. */
1788 if (!TYPE_UNSIGNED (type
))
1789 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1791 if (type
== sizetype
)
1793 else if (type
== bitsizetype
)
1794 ctype
= sbitsizetype
;
1796 ctype
= signed_type_for (type
);
1798 /* If either operand is not a constant, do the conversions to the signed
1799 type and subtract. The hardware will do the right thing with any
1800 overflow in the subtraction. */
1801 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1802 return size_binop_loc (loc
, MINUS_EXPR
,
1803 fold_convert_loc (loc
, ctype
, arg0
),
1804 fold_convert_loc (loc
, ctype
, arg1
));
1806 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1807 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1808 overflow) and negate (which can't either). Special-case a result
1809 of zero while we're here. */
1810 if (tree_int_cst_equal (arg0
, arg1
))
1811 return build_int_cst (ctype
, 0);
1812 else if (tree_int_cst_lt (arg1
, arg0
))
1813 return fold_convert_loc (loc
, ctype
,
1814 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1816 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1817 fold_convert_loc (loc
, ctype
,
1818 size_binop_loc (loc
,
1823 /* A subroutine of fold_convert_const handling conversions of an
1824 INTEGER_CST to another integer type. */
1827 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1829 /* Given an integer constant, make new constant with new type,
1830 appropriately sign-extended or truncated. Use widest_int
1831 so that any extension is done according ARG1's type. */
1832 return force_fit_type (type
, wi::to_widest (arg1
),
1833 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1834 TREE_OVERFLOW (arg1
));
1837 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1838 to an integer type. */
1841 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1843 bool overflow
= false;
1846 /* The following code implements the floating point to integer
1847 conversion rules required by the Java Language Specification,
1848 that IEEE NaNs are mapped to zero and values that overflow
1849 the target precision saturate, i.e. values greater than
1850 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1851 are mapped to INT_MIN. These semantics are allowed by the
1852 C and C++ standards that simply state that the behavior of
1853 FP-to-integer conversion is unspecified upon overflow. */
1857 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1861 case FIX_TRUNC_EXPR
:
1862 real_trunc (&r
, VOIDmode
, &x
);
1869 /* If R is NaN, return zero and show we have an overflow. */
1870 if (REAL_VALUE_ISNAN (r
))
1873 val
= wi::zero (TYPE_PRECISION (type
));
1876 /* See if R is less than the lower bound or greater than the
1881 tree lt
= TYPE_MIN_VALUE (type
);
1882 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1883 if (real_less (&r
, &l
))
1892 tree ut
= TYPE_MAX_VALUE (type
);
1895 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1896 if (real_less (&u
, &r
))
1905 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
1907 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1911 /* A subroutine of fold_convert_const handling conversions of a
1912 FIXED_CST to an integer type. */
1915 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1918 double_int temp
, temp_trunc
;
1921 /* Right shift FIXED_CST to temp by fbit. */
1922 temp
= TREE_FIXED_CST (arg1
).data
;
1923 mode
= TREE_FIXED_CST (arg1
).mode
;
1924 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1926 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1927 HOST_BITS_PER_DOUBLE_INT
,
1928 SIGNED_FIXED_POINT_MODE_P (mode
));
1930 /* Left shift temp to temp_trunc by fbit. */
1931 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1932 HOST_BITS_PER_DOUBLE_INT
,
1933 SIGNED_FIXED_POINT_MODE_P (mode
));
1937 temp
= double_int_zero
;
1938 temp_trunc
= double_int_zero
;
1941 /* If FIXED_CST is negative, we need to round the value toward 0.
1942 By checking if the fractional bits are not zero to add 1 to temp. */
1943 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1944 && temp_trunc
.is_negative ()
1945 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1946 temp
+= double_int_one
;
1948 /* Given a fixed-point constant, make new constant with new type,
1949 appropriately sign-extended or truncated. */
1950 t
= force_fit_type (type
, temp
, -1,
1951 (temp
.is_negative ()
1952 && (TYPE_UNSIGNED (type
)
1953 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1954 | TREE_OVERFLOW (arg1
));
1959 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1960 to another floating point type. */
1963 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1965 REAL_VALUE_TYPE value
;
1968 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1969 t
= build_real (type
, value
);
1971 /* If converting an infinity or NAN to a representation that doesn't
1972 have one, set the overflow bit so that we can produce some kind of
1973 error message at the appropriate point if necessary. It's not the
1974 most user-friendly message, but it's better than nothing. */
1975 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1976 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1977 TREE_OVERFLOW (t
) = 1;
1978 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1979 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1980 TREE_OVERFLOW (t
) = 1;
1981 /* Regular overflow, conversion produced an infinity in a mode that
1982 can't represent them. */
1983 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1984 && REAL_VALUE_ISINF (value
)
1985 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1986 TREE_OVERFLOW (t
) = 1;
1988 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1992 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1993 to a floating point type. */
1996 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1998 REAL_VALUE_TYPE value
;
2001 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
2002 t
= build_real (type
, value
);
2004 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2008 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2009 to another fixed-point type. */
2012 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
2014 FIXED_VALUE_TYPE value
;
2018 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
2019 TYPE_SATURATING (type
));
2020 t
= build_fixed (type
, value
);
2022 /* Propagate overflow flags. */
2023 if (overflow_p
| TREE_OVERFLOW (arg1
))
2024 TREE_OVERFLOW (t
) = 1;
2028 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2029 to a fixed-point type. */
2032 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
2034 FIXED_VALUE_TYPE value
;
2039 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
2041 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
2042 if (TREE_INT_CST_NUNITS (arg1
) == 1)
2043 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? (HOST_WIDE_INT
) -1 : 0;
2045 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
2047 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
), di
,
2048 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
2049 TYPE_SATURATING (type
));
2050 t
= build_fixed (type
, value
);
2052 /* Propagate overflow flags. */
2053 if (overflow_p
| TREE_OVERFLOW (arg1
))
2054 TREE_OVERFLOW (t
) = 1;
2058 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2059 to a fixed-point type. */
2062 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
2064 FIXED_VALUE_TYPE value
;
2068 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
2069 &TREE_REAL_CST (arg1
),
2070 TYPE_SATURATING (type
));
2071 t
= build_fixed (type
, value
);
2073 /* Propagate overflow flags. */
2074 if (overflow_p
| TREE_OVERFLOW (arg1
))
2075 TREE_OVERFLOW (t
) = 1;
2079 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2080 type TYPE. If no simplification can be done return NULL_TREE. */
2083 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2085 if (TREE_TYPE (arg1
) == type
)
2088 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
2089 || TREE_CODE (type
) == OFFSET_TYPE
)
2091 if (TREE_CODE (arg1
) == INTEGER_CST
)
2092 return fold_convert_const_int_from_int (type
, arg1
);
2093 else if (TREE_CODE (arg1
) == REAL_CST
)
2094 return fold_convert_const_int_from_real (code
, type
, arg1
);
2095 else if (TREE_CODE (arg1
) == FIXED_CST
)
2096 return fold_convert_const_int_from_fixed (type
, arg1
);
2098 else if (TREE_CODE (type
) == REAL_TYPE
)
2100 if (TREE_CODE (arg1
) == INTEGER_CST
)
2101 return build_real_from_int_cst (type
, arg1
);
2102 else if (TREE_CODE (arg1
) == REAL_CST
)
2103 return fold_convert_const_real_from_real (type
, arg1
);
2104 else if (TREE_CODE (arg1
) == FIXED_CST
)
2105 return fold_convert_const_real_from_fixed (type
, arg1
);
2107 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
2109 if (TREE_CODE (arg1
) == FIXED_CST
)
2110 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2111 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2112 return fold_convert_const_fixed_from_int (type
, arg1
);
2113 else if (TREE_CODE (arg1
) == REAL_CST
)
2114 return fold_convert_const_fixed_from_real (type
, arg1
);
2119 /* Construct a vector of zero elements of vector type TYPE. */
2122 build_zero_vector (tree type
)
2126 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2127 return build_vector_from_val (type
, t
);
2130 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2133 fold_convertible_p (const_tree type
, const_tree arg
)
2135 tree orig
= TREE_TYPE (arg
);
2140 if (TREE_CODE (arg
) == ERROR_MARK
2141 || TREE_CODE (type
) == ERROR_MARK
2142 || TREE_CODE (orig
) == ERROR_MARK
)
2145 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2148 switch (TREE_CODE (type
))
2150 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2151 case POINTER_TYPE
: case REFERENCE_TYPE
:
2153 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2154 || TREE_CODE (orig
) == OFFSET_TYPE
)
2156 return (TREE_CODE (orig
) == VECTOR_TYPE
2157 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2160 case FIXED_POINT_TYPE
:
2164 return TREE_CODE (type
) == TREE_CODE (orig
);
2171 /* Convert expression ARG to type TYPE. Used by the middle-end for
2172 simple conversions in preference to calling the front-end's convert. */
2175 fold_convert_loc (location_t loc
, tree type
, tree arg
)
2177 tree orig
= TREE_TYPE (arg
);
2183 if (TREE_CODE (arg
) == ERROR_MARK
2184 || TREE_CODE (type
) == ERROR_MARK
2185 || TREE_CODE (orig
) == ERROR_MARK
)
2186 return error_mark_node
;
2188 switch (TREE_CODE (type
))
2191 case REFERENCE_TYPE
:
2192 /* Handle conversions between pointers to different address spaces. */
2193 if (POINTER_TYPE_P (orig
)
2194 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
2195 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
2196 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
2199 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2201 if (TREE_CODE (arg
) == INTEGER_CST
)
2203 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2204 if (tem
!= NULL_TREE
)
2207 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2208 || TREE_CODE (orig
) == OFFSET_TYPE
)
2209 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2210 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2211 return fold_convert_loc (loc
, type
,
2212 fold_build1_loc (loc
, REALPART_EXPR
,
2213 TREE_TYPE (orig
), arg
));
2214 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2215 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2216 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2219 if (TREE_CODE (arg
) == INTEGER_CST
)
2221 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2222 if (tem
!= NULL_TREE
)
2225 else if (TREE_CODE (arg
) == REAL_CST
)
2227 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2228 if (tem
!= NULL_TREE
)
2231 else if (TREE_CODE (arg
) == FIXED_CST
)
2233 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2234 if (tem
!= NULL_TREE
)
2238 switch (TREE_CODE (orig
))
2241 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2242 case POINTER_TYPE
: case REFERENCE_TYPE
:
2243 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
2246 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2248 case FIXED_POINT_TYPE
:
2249 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2252 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2253 return fold_convert_loc (loc
, type
, tem
);
2259 case FIXED_POINT_TYPE
:
2260 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2261 || TREE_CODE (arg
) == REAL_CST
)
2263 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2264 if (tem
!= NULL_TREE
)
2265 goto fold_convert_exit
;
2268 switch (TREE_CODE (orig
))
2270 case FIXED_POINT_TYPE
:
2275 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2278 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2279 return fold_convert_loc (loc
, type
, tem
);
2286 switch (TREE_CODE (orig
))
2289 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2290 case POINTER_TYPE
: case REFERENCE_TYPE
:
2292 case FIXED_POINT_TYPE
:
2293 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2294 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2295 fold_convert_loc (loc
, TREE_TYPE (type
),
2296 integer_zero_node
));
2301 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2303 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2304 TREE_OPERAND (arg
, 0));
2305 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2306 TREE_OPERAND (arg
, 1));
2307 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2310 arg
= save_expr (arg
);
2311 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2312 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2313 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2314 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2315 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2323 if (integer_zerop (arg
))
2324 return build_zero_vector (type
);
2325 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2326 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2327 || TREE_CODE (orig
) == VECTOR_TYPE
);
2328 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2331 tem
= fold_ignored_result (arg
);
2332 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2335 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2336 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2340 protected_set_expr_location_unshare (tem
, loc
);
2344 /* Return false if expr can be assumed not to be an lvalue, true
2348 maybe_lvalue_p (const_tree x
)
2350 /* We only need to wrap lvalue tree codes. */
2351 switch (TREE_CODE (x
))
2364 case ARRAY_RANGE_REF
:
2370 case PREINCREMENT_EXPR
:
2371 case PREDECREMENT_EXPR
:
2373 case TRY_CATCH_EXPR
:
2374 case WITH_CLEANUP_EXPR
:
2383 /* Assume the worst for front-end tree codes. */
2384 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2392 /* Return an expr equal to X but certainly not valid as an lvalue. */
2395 non_lvalue_loc (location_t loc
, tree x
)
2397 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2402 if (! maybe_lvalue_p (x
))
2404 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2407 /* When pedantic, return an expr equal to X but certainly not valid as a
2408 pedantic lvalue. Otherwise, return X. */
2411 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2413 return protected_set_expr_location_unshare (x
, loc
);
2416 /* Given a tree comparison code, return the code that is the logical inverse.
2417 It is generally not safe to do this for floating-point comparisons, except
2418 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2419 ERROR_MARK in this case. */
2422 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2424 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2425 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2435 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2437 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2439 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2441 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2455 return UNORDERED_EXPR
;
2456 case UNORDERED_EXPR
:
2457 return ORDERED_EXPR
;
2463 /* Similar, but return the comparison that results if the operands are
2464 swapped. This is safe for floating-point. */
2467 swap_tree_comparison (enum tree_code code
)
2474 case UNORDERED_EXPR
:
2500 /* Convert a comparison tree code from an enum tree_code representation
2501 into a compcode bit-based encoding. This function is the inverse of
2502 compcode_to_comparison. */
2504 static enum comparison_code
2505 comparison_to_compcode (enum tree_code code
)
2522 return COMPCODE_ORD
;
2523 case UNORDERED_EXPR
:
2524 return COMPCODE_UNORD
;
2526 return COMPCODE_UNLT
;
2528 return COMPCODE_UNEQ
;
2530 return COMPCODE_UNLE
;
2532 return COMPCODE_UNGT
;
2534 return COMPCODE_LTGT
;
2536 return COMPCODE_UNGE
;
2542 /* Convert a compcode bit-based encoding of a comparison operator back
2543 to GCC's enum tree_code representation. This function is the
2544 inverse of comparison_to_compcode. */
2546 static enum tree_code
2547 compcode_to_comparison (enum comparison_code code
)
2564 return ORDERED_EXPR
;
2565 case COMPCODE_UNORD
:
2566 return UNORDERED_EXPR
;
2584 /* Return a tree for the comparison which is the combination of
2585 doing the AND or OR (depending on CODE) of the two operations LCODE
2586 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2587 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2588 if this makes the transformation invalid. */
2591 combine_comparisons (location_t loc
,
2592 enum tree_code code
, enum tree_code lcode
,
2593 enum tree_code rcode
, tree truth_type
,
2594 tree ll_arg
, tree lr_arg
)
2596 bool honor_nans
= HONOR_NANS (ll_arg
);
2597 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2598 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2603 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2604 compcode
= lcompcode
& rcompcode
;
2607 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2608 compcode
= lcompcode
| rcompcode
;
2617 /* Eliminate unordered comparisons, as well as LTGT and ORD
2618 which are not used unless the mode has NaNs. */
2619 compcode
&= ~COMPCODE_UNORD
;
2620 if (compcode
== COMPCODE_LTGT
)
2621 compcode
= COMPCODE_NE
;
2622 else if (compcode
== COMPCODE_ORD
)
2623 compcode
= COMPCODE_TRUE
;
2625 else if (flag_trapping_math
)
2627 /* Check that the original operation and the optimized ones will trap
2628 under the same condition. */
2629 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2630 && (lcompcode
!= COMPCODE_EQ
)
2631 && (lcompcode
!= COMPCODE_ORD
);
2632 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2633 && (rcompcode
!= COMPCODE_EQ
)
2634 && (rcompcode
!= COMPCODE_ORD
);
2635 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2636 && (compcode
!= COMPCODE_EQ
)
2637 && (compcode
!= COMPCODE_ORD
);
2639 /* In a short-circuited boolean expression the LHS might be
2640 such that the RHS, if evaluated, will never trap. For
2641 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2642 if neither x nor y is NaN. (This is a mixed blessing: for
2643 example, the expression above will never trap, hence
2644 optimizing it to x < y would be invalid). */
2645 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2646 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2649 /* If the comparison was short-circuited, and only the RHS
2650 trapped, we may now generate a spurious trap. */
2652 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2655 /* If we changed the conditions that cause a trap, we lose. */
2656 if ((ltrap
|| rtrap
) != trap
)
2660 if (compcode
== COMPCODE_TRUE
)
2661 return constant_boolean_node (true, truth_type
);
2662 else if (compcode
== COMPCODE_FALSE
)
2663 return constant_boolean_node (false, truth_type
);
2666 enum tree_code tcode
;
2668 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2669 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2673 /* Return nonzero if two operands (typically of the same tree node)
2674 are necessarily equal. If either argument has side-effects this
2675 function returns zero. FLAGS modifies behavior as follows:
2677 If OEP_ONLY_CONST is set, only return nonzero for constants.
2678 This function tests whether the operands are indistinguishable;
2679 it does not test whether they are equal using C's == operation.
2680 The distinction is important for IEEE floating point, because
2681 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2682 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2684 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2685 even though it may hold multiple values during a function.
2686 This is because a GCC tree node guarantees that nothing else is
2687 executed between the evaluation of its "operands" (which may often
2688 be evaluated in arbitrary order). Hence if the operands themselves
2689 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2690 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2691 unset means assuming isochronic (or instantaneous) tree equivalence.
2692 Unless comparing arbitrary expression trees, such as from different
2693 statements, this flag can usually be left unset.
2695 If OEP_PURE_SAME is set, then pure functions with identical arguments
2696 are considered the same. It is used when the caller has other ways
2697 to ensure that global memory is unchanged in between.
2699 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2700 not values of expressions. OEP_CONSTANT_ADDRESS_OF in addition to
2701 OEP_ADDRESS_OF is used for ADDR_EXPR with TREE_CONSTANT flag set and we
2702 further ignore any side effects on SAVE_EXPRs then. */
2705 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2707 /* If either is ERROR_MARK, they aren't equal. */
2708 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2709 || TREE_TYPE (arg0
) == error_mark_node
2710 || TREE_TYPE (arg1
) == error_mark_node
)
2713 /* Similar, if either does not have a type (like a released SSA name),
2714 they aren't equal. */
2715 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2718 /* Check equality of integer constants before bailing out due to
2719 precision differences. */
2720 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2722 /* Address of INTEGER_CST is not defined; check that we did not forget
2723 to drop the OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2724 gcc_checking_assert (!(flags
2725 & (OEP_ADDRESS_OF
| OEP_CONSTANT_ADDRESS_OF
)));
2726 return tree_int_cst_equal (arg0
, arg1
);
2729 if (!(flags
& OEP_ADDRESS_OF
))
2731 /* If both types don't have the same signedness, then we can't consider
2732 them equal. We must check this before the STRIP_NOPS calls
2733 because they may change the signedness of the arguments. As pointers
2734 strictly don't have a signedness, require either two pointers or
2735 two non-pointers as well. */
2736 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2737 || POINTER_TYPE_P (TREE_TYPE (arg0
))
2738 != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2741 /* We cannot consider pointers to different address space equal. */
2742 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
2743 && POINTER_TYPE_P (TREE_TYPE (arg1
))
2744 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2745 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2748 /* If both types don't have the same precision, then it is not safe
2750 if (element_precision (TREE_TYPE (arg0
))
2751 != element_precision (TREE_TYPE (arg1
)))
2758 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2759 sanity check once the issue is solved. */
2761 /* Addresses of conversions and SSA_NAMEs (and many other things)
2762 are not defined. Check that we did not forget to drop the
2763 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2764 gcc_checking_assert (!CONVERT_EXPR_P (arg0
) && !CONVERT_EXPR_P (arg1
)
2765 && TREE_CODE (arg0
) != SSA_NAME
);
2768 /* In case both args are comparisons but with different comparison
2769 code, try to swap the comparison operands of one arg to produce
2770 a match and compare that variant. */
2771 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2772 && COMPARISON_CLASS_P (arg0
)
2773 && COMPARISON_CLASS_P (arg1
))
2775 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2777 if (TREE_CODE (arg0
) == swap_code
)
2778 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2779 TREE_OPERAND (arg1
, 1), flags
)
2780 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2781 TREE_OPERAND (arg1
, 0), flags
);
2784 if (TREE_CODE (arg0
) != TREE_CODE (arg1
))
2786 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2787 if (CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
))
2789 else if (flags
& OEP_ADDRESS_OF
)
2791 /* If we are interested in comparing addresses ignore
2792 MEM_REF wrappings of the base that can appear just for
2794 if (TREE_CODE (arg0
) == MEM_REF
2796 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ADDR_EXPR
2797 && TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0) == arg1
2798 && integer_zerop (TREE_OPERAND (arg0
, 1)))
2800 else if (TREE_CODE (arg1
) == MEM_REF
2802 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ADDR_EXPR
2803 && TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0) == arg0
2804 && integer_zerop (TREE_OPERAND (arg1
, 1)))
2812 /* This is needed for conversions and for COMPONENT_REF.
2813 Might as well play it safe and always test this. */
2814 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2815 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2816 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2819 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2820 We don't care about side effects in that case because the SAVE_EXPR
2821 takes care of that for us. In all other cases, two expressions are
2822 equal if they have no side effects. If we have two identical
2823 expressions with side effects that should be treated the same due
2824 to the only side effects being identical SAVE_EXPR's, that will
2825 be detected in the recursive calls below.
2826 If we are taking an invariant address of two identical objects
2827 they are necessarily equal as well. */
2828 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2829 && (TREE_CODE (arg0
) == SAVE_EXPR
2830 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2831 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2834 /* Next handle constant cases, those for which we can return 1 even
2835 if ONLY_CONST is set. */
2836 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2837 switch (TREE_CODE (arg0
))
2840 return tree_int_cst_equal (arg0
, arg1
);
2843 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2844 TREE_FIXED_CST (arg1
));
2847 if (real_identical (&TREE_REAL_CST (arg0
), &TREE_REAL_CST (arg1
)))
2851 if (!HONOR_SIGNED_ZEROS (arg0
))
2853 /* If we do not distinguish between signed and unsigned zero,
2854 consider them equal. */
2855 if (real_zerop (arg0
) && real_zerop (arg1
))
2864 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2867 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2869 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2870 VECTOR_CST_ELT (arg1
, i
), flags
))
2877 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2879 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2883 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2884 && ! memcmp (TREE_STRING_POINTER (arg0
),
2885 TREE_STRING_POINTER (arg1
),
2886 TREE_STRING_LENGTH (arg0
)));
2889 gcc_checking_assert (!(flags
2890 & (OEP_ADDRESS_OF
| OEP_CONSTANT_ADDRESS_OF
)));
2891 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2892 flags
| OEP_ADDRESS_OF
2893 | OEP_CONSTANT_ADDRESS_OF
);
2895 /* In GIMPLE empty constructors are allowed in initializers of
2897 return (!vec_safe_length (CONSTRUCTOR_ELTS (arg0
))
2898 && !vec_safe_length (CONSTRUCTOR_ELTS (arg1
)));
2903 if (flags
& OEP_ONLY_CONST
)
2906 /* Define macros to test an operand from arg0 and arg1 for equality and a
2907 variant that allows null and views null as being different from any
2908 non-null value. In the latter case, if either is null, the both
2909 must be; otherwise, do the normal comparison. */
2910 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2911 TREE_OPERAND (arg1, N), flags)
2913 #define OP_SAME_WITH_NULL(N) \
2914 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2915 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2917 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2920 /* Two conversions are equal only if signedness and modes match. */
2921 switch (TREE_CODE (arg0
))
2924 case FIX_TRUNC_EXPR
:
2925 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2926 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2936 case tcc_comparison
:
2938 if (OP_SAME (0) && OP_SAME (1))
2941 /* For commutative ops, allow the other order. */
2942 return (commutative_tree_code (TREE_CODE (arg0
))
2943 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2944 TREE_OPERAND (arg1
, 1), flags
)
2945 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2946 TREE_OPERAND (arg1
, 0), flags
));
2949 /* If either of the pointer (or reference) expressions we are
2950 dereferencing contain a side effect, these cannot be equal,
2951 but their addresses can be. */
2952 if ((flags
& OEP_CONSTANT_ADDRESS_OF
) == 0
2953 && (TREE_SIDE_EFFECTS (arg0
)
2954 || TREE_SIDE_EFFECTS (arg1
)))
2957 switch (TREE_CODE (arg0
))
2960 if (!(flags
& (OEP_ADDRESS_OF
| OEP_CONSTANT_ADDRESS_OF
))
2961 && (TYPE_ALIGN (TREE_TYPE (arg0
))
2962 != TYPE_ALIGN (TREE_TYPE (arg1
))))
2964 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2971 case TARGET_MEM_REF
:
2973 if (!(flags
& (OEP_ADDRESS_OF
| OEP_CONSTANT_ADDRESS_OF
)))
2975 /* Require equal access sizes */
2976 if (TYPE_SIZE (TREE_TYPE (arg0
)) != TYPE_SIZE (TREE_TYPE (arg1
))
2977 && (!TYPE_SIZE (TREE_TYPE (arg0
))
2978 || !TYPE_SIZE (TREE_TYPE (arg1
))
2979 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2980 TYPE_SIZE (TREE_TYPE (arg1
)),
2983 /* Verify that access happens in similar types. */
2984 if (!types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
)))
2986 /* Verify that accesses are TBAA compatible. */
2987 if (flag_strict_aliasing
2988 && (!alias_ptr_types_compatible_p
2989 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
2990 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
2991 || (MR_DEPENDENCE_CLIQUE (arg0
)
2992 != MR_DEPENDENCE_CLIQUE (arg1
))
2993 || (MR_DEPENDENCE_BASE (arg0
)
2994 != MR_DEPENDENCE_BASE (arg1
))))
2996 /* Verify that alignment is compatible. */
2997 if (TYPE_ALIGN (TREE_TYPE (arg0
))
2998 != TYPE_ALIGN (TREE_TYPE (arg1
)))
3001 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
3002 return (OP_SAME (0) && OP_SAME (1)
3003 /* TARGET_MEM_REF require equal extra operands. */
3004 && (TREE_CODE (arg0
) != TARGET_MEM_REF
3005 || (OP_SAME_WITH_NULL (2)
3006 && OP_SAME_WITH_NULL (3)
3007 && OP_SAME_WITH_NULL (4))));
3010 case ARRAY_RANGE_REF
:
3011 /* Operands 2 and 3 may be null.
3012 Compare the array index by value if it is constant first as we
3013 may have different types but same value here. */
3016 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
3017 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
3018 TREE_OPERAND (arg1
, 1))
3020 && OP_SAME_WITH_NULL (2)
3021 && OP_SAME_WITH_NULL (3));
3024 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3025 may be NULL when we're called to compare MEM_EXPRs. */
3026 if (!OP_SAME_WITH_NULL (0)
3029 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
3030 return OP_SAME_WITH_NULL (2);
3035 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
3036 return OP_SAME (1) && OP_SAME (2);
3042 case tcc_expression
:
3043 switch (TREE_CODE (arg0
))
3046 /* Be sure we pass right ADDRESS_OF flag. */
3047 gcc_checking_assert (!(flags
3049 | OEP_CONSTANT_ADDRESS_OF
)));
3050 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3051 TREE_OPERAND (arg1
, 0),
3052 flags
| OEP_ADDRESS_OF
);
3054 case TRUTH_NOT_EXPR
:
3057 case TRUTH_ANDIF_EXPR
:
3058 case TRUTH_ORIF_EXPR
:
3059 return OP_SAME (0) && OP_SAME (1);
3062 case WIDEN_MULT_PLUS_EXPR
:
3063 case WIDEN_MULT_MINUS_EXPR
:
3066 /* The multiplcation operands are commutative. */
3069 case TRUTH_AND_EXPR
:
3071 case TRUTH_XOR_EXPR
:
3072 if (OP_SAME (0) && OP_SAME (1))
3075 /* Otherwise take into account this is a commutative operation. */
3076 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
3077 TREE_OPERAND (arg1
, 1), flags
)
3078 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3079 TREE_OPERAND (arg1
, 0), flags
));
3084 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3091 switch (TREE_CODE (arg0
))
3094 if ((CALL_EXPR_FN (arg0
) == NULL_TREE
)
3095 != (CALL_EXPR_FN (arg1
) == NULL_TREE
))
3096 /* If not both CALL_EXPRs are either internal or normal function
3097 functions, then they are not equal. */
3099 else if (CALL_EXPR_FN (arg0
) == NULL_TREE
)
3101 /* If the CALL_EXPRs call different internal functions, then they
3103 if (CALL_EXPR_IFN (arg0
) != CALL_EXPR_IFN (arg1
))
3108 /* If the CALL_EXPRs call different functions, then they are not
3110 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
3116 unsigned int cef
= call_expr_flags (arg0
);
3117 if (flags
& OEP_PURE_SAME
)
3118 cef
&= ECF_CONST
| ECF_PURE
;
3125 /* Now see if all the arguments are the same. */
3127 const_call_expr_arg_iterator iter0
, iter1
;
3129 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
3130 a1
= first_const_call_expr_arg (arg1
, &iter1
);
3132 a0
= next_const_call_expr_arg (&iter0
),
3133 a1
= next_const_call_expr_arg (&iter1
))
3134 if (! operand_equal_p (a0
, a1
, flags
))
3137 /* If we get here and both argument lists are exhausted
3138 then the CALL_EXPRs are equal. */
3139 return ! (a0
|| a1
);
3145 case tcc_declaration
:
3146 /* Consider __builtin_sqrt equal to sqrt. */
3147 return (TREE_CODE (arg0
) == FUNCTION_DECL
3148 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
3149 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
3150 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
3157 #undef OP_SAME_WITH_NULL
3160 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3161 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3163 When in doubt, return 0. */
3166 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
3168 int unsignedp1
, unsignedpo
;
3169 tree primarg0
, primarg1
, primother
;
3170 unsigned int correct_width
;
3172 if (operand_equal_p (arg0
, arg1
, 0))
3175 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
3176 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
3179 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3180 and see if the inner values are the same. This removes any
3181 signedness comparison, which doesn't matter here. */
3182 primarg0
= arg0
, primarg1
= arg1
;
3183 STRIP_NOPS (primarg0
);
3184 STRIP_NOPS (primarg1
);
3185 if (operand_equal_p (primarg0
, primarg1
, 0))
3188 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3189 actual comparison operand, ARG0.
3191 First throw away any conversions to wider types
3192 already present in the operands. */
3194 primarg1
= get_narrower (arg1
, &unsignedp1
);
3195 primother
= get_narrower (other
, &unsignedpo
);
3197 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
3198 if (unsignedp1
== unsignedpo
3199 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
3200 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
3202 tree type
= TREE_TYPE (arg0
);
3204 /* Make sure shorter operand is extended the right way
3205 to match the longer operand. */
3206 primarg1
= fold_convert (signed_or_unsigned_type_for
3207 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
3209 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
3216 /* See if ARG is an expression that is either a comparison or is performing
3217 arithmetic on comparisons. The comparisons must only be comparing
3218 two different values, which will be stored in *CVAL1 and *CVAL2; if
3219 they are nonzero it means that some operands have already been found.
3220 No variables may be used anywhere else in the expression except in the
3221 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3222 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3224 If this is true, return 1. Otherwise, return zero. */
3227 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
3229 enum tree_code code
= TREE_CODE (arg
);
3230 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3232 /* We can handle some of the tcc_expression cases here. */
3233 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3235 else if (tclass
== tcc_expression
3236 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
3237 || code
== COMPOUND_EXPR
))
3238 tclass
= tcc_binary
;
3240 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
3241 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
3243 /* If we've already found a CVAL1 or CVAL2, this expression is
3244 two complex to handle. */
3245 if (*cval1
|| *cval2
)
3255 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
3258 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
3259 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3260 cval1
, cval2
, save_p
));
3265 case tcc_expression
:
3266 if (code
== COND_EXPR
)
3267 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
3268 cval1
, cval2
, save_p
)
3269 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3270 cval1
, cval2
, save_p
)
3271 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
3272 cval1
, cval2
, save_p
));
3275 case tcc_comparison
:
3276 /* First see if we can handle the first operand, then the second. For
3277 the second operand, we know *CVAL1 can't be zero. It must be that
3278 one side of the comparison is each of the values; test for the
3279 case where this isn't true by failing if the two operands
3282 if (operand_equal_p (TREE_OPERAND (arg
, 0),
3283 TREE_OPERAND (arg
, 1), 0))
3287 *cval1
= TREE_OPERAND (arg
, 0);
3288 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
3290 else if (*cval2
== 0)
3291 *cval2
= TREE_OPERAND (arg
, 0);
3292 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
3297 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
3299 else if (*cval2
== 0)
3300 *cval2
= TREE_OPERAND (arg
, 1);
3301 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
3313 /* ARG is a tree that is known to contain just arithmetic operations and
3314 comparisons. Evaluate the operations in the tree substituting NEW0 for
3315 any occurrence of OLD0 as an operand of a comparison and likewise for
3319 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
3320 tree old1
, tree new1
)
3322 tree type
= TREE_TYPE (arg
);
3323 enum tree_code code
= TREE_CODE (arg
);
3324 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3326 /* We can handle some of the tcc_expression cases here. */
3327 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3329 else if (tclass
== tcc_expression
3330 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3331 tclass
= tcc_binary
;
3336 return fold_build1_loc (loc
, code
, type
,
3337 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3338 old0
, new0
, old1
, new1
));
3341 return fold_build2_loc (loc
, code
, type
,
3342 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3343 old0
, new0
, old1
, new1
),
3344 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3345 old0
, new0
, old1
, new1
));
3347 case tcc_expression
:
3351 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
3355 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
3359 return fold_build3_loc (loc
, code
, type
,
3360 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3361 old0
, new0
, old1
, new1
),
3362 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3363 old0
, new0
, old1
, new1
),
3364 eval_subst (loc
, TREE_OPERAND (arg
, 2),
3365 old0
, new0
, old1
, new1
));
3369 /* Fall through - ??? */
3371 case tcc_comparison
:
3373 tree arg0
= TREE_OPERAND (arg
, 0);
3374 tree arg1
= TREE_OPERAND (arg
, 1);
3376 /* We need to check both for exact equality and tree equality. The
3377 former will be true if the operand has a side-effect. In that
3378 case, we know the operand occurred exactly once. */
3380 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3382 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3385 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3387 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3390 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3398 /* Return a tree for the case when the result of an expression is RESULT
3399 converted to TYPE and OMITTED was previously an operand of the expression
3400 but is now not needed (e.g., we folded OMITTED * 0).
3402 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3403 the conversion of RESULT to TYPE. */
3406 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3408 tree t
= fold_convert_loc (loc
, type
, result
);
3410 /* If the resulting operand is an empty statement, just return the omitted
3411 statement casted to void. */
3412 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3413 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3414 fold_ignored_result (omitted
));
3416 if (TREE_SIDE_EFFECTS (omitted
))
3417 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3418 fold_ignored_result (omitted
), t
);
3420 return non_lvalue_loc (loc
, t
);
3423 /* Return a tree for the case when the result of an expression is RESULT
3424 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3425 of the expression but are now not needed.
3427 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3428 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3429 evaluated before OMITTED2. Otherwise, if neither has side effects,
3430 just do the conversion of RESULT to TYPE. */
3433 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3434 tree omitted1
, tree omitted2
)
3436 tree t
= fold_convert_loc (loc
, type
, result
);
3438 if (TREE_SIDE_EFFECTS (omitted2
))
3439 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3440 if (TREE_SIDE_EFFECTS (omitted1
))
3441 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3443 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3447 /* Return a simplified tree node for the truth-negation of ARG. This
3448 never alters ARG itself. We assume that ARG is an operation that
3449 returns a truth value (0 or 1).
3451 FIXME: one would think we would fold the result, but it causes
3452 problems with the dominator optimizer. */
3455 fold_truth_not_expr (location_t loc
, tree arg
)
3457 tree type
= TREE_TYPE (arg
);
3458 enum tree_code code
= TREE_CODE (arg
);
3459 location_t loc1
, loc2
;
3461 /* If this is a comparison, we can simply invert it, except for
3462 floating-point non-equality comparisons, in which case we just
3463 enclose a TRUTH_NOT_EXPR around what we have. */
3465 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3467 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3468 if (FLOAT_TYPE_P (op_type
)
3469 && flag_trapping_math
3470 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3471 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3474 code
= invert_tree_comparison (code
, HONOR_NANS (op_type
));
3475 if (code
== ERROR_MARK
)
3478 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3479 TREE_OPERAND (arg
, 1));
3485 return constant_boolean_node (integer_zerop (arg
), type
);
3487 case TRUTH_AND_EXPR
:
3488 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3489 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3490 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3491 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3492 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3495 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3496 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3497 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3498 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3499 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3501 case TRUTH_XOR_EXPR
:
3502 /* Here we can invert either operand. We invert the first operand
3503 unless the second operand is a TRUTH_NOT_EXPR in which case our
3504 result is the XOR of the first operand with the inside of the
3505 negation of the second operand. */
3507 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3508 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3509 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3511 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3512 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3513 TREE_OPERAND (arg
, 1));
3515 case TRUTH_ANDIF_EXPR
:
3516 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3517 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3518 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3519 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3520 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3522 case TRUTH_ORIF_EXPR
:
3523 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3524 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3525 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3526 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3527 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3529 case TRUTH_NOT_EXPR
:
3530 return TREE_OPERAND (arg
, 0);
3534 tree arg1
= TREE_OPERAND (arg
, 1);
3535 tree arg2
= TREE_OPERAND (arg
, 2);
3537 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3538 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3540 /* A COND_EXPR may have a throw as one operand, which
3541 then has void type. Just leave void operands
3543 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3544 VOID_TYPE_P (TREE_TYPE (arg1
))
3545 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3546 VOID_TYPE_P (TREE_TYPE (arg2
))
3547 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3551 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3552 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3553 TREE_OPERAND (arg
, 0),
3554 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3556 case NON_LVALUE_EXPR
:
3557 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3558 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3561 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3562 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3564 /* ... fall through ... */
3567 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3568 return build1_loc (loc
, TREE_CODE (arg
), type
,
3569 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3572 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3574 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3577 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3579 case CLEANUP_POINT_EXPR
:
3580 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3581 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3582 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3589 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3590 assume that ARG is an operation that returns a truth value (0 or 1
3591 for scalars, 0 or -1 for vectors). Return the folded expression if
3592 folding is successful. Otherwise, return NULL_TREE. */
3595 fold_invert_truthvalue (location_t loc
, tree arg
)
3597 tree type
= TREE_TYPE (arg
);
3598 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
3604 /* Return a simplified tree node for the truth-negation of ARG. This
3605 never alters ARG itself. We assume that ARG is an operation that
3606 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3609 invert_truthvalue_loc (location_t loc
, tree arg
)
3611 if (TREE_CODE (arg
) == ERROR_MARK
)
3614 tree type
= TREE_TYPE (arg
);
3615 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
3621 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3622 with code CODE. This optimization is unsafe. */
3624 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3625 tree arg0
, tree arg1
)
3627 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3628 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3630 /* (A / C) +- (B / C) -> (A +- B) / C. */
3632 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3633 TREE_OPERAND (arg1
, 1), 0))
3634 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3635 fold_build2_loc (loc
, code
, type
,
3636 TREE_OPERAND (arg0
, 0),
3637 TREE_OPERAND (arg1
, 0)),
3638 TREE_OPERAND (arg0
, 1));
3640 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3641 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3642 TREE_OPERAND (arg1
, 0), 0)
3643 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3644 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3646 REAL_VALUE_TYPE r0
, r1
;
3647 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3648 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3650 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3652 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3653 real_arithmetic (&r0
, code
, &r0
, &r1
);
3654 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3655 TREE_OPERAND (arg0
, 0),
3656 build_real (type
, r0
));
3662 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3663 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3666 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3667 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3669 tree result
, bftype
;
3673 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3674 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3675 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3676 && tree_fits_shwi_p (size
)
3677 && tree_to_shwi (size
) == bitsize
)
3678 return fold_convert_loc (loc
, type
, inner
);
3682 if (TYPE_PRECISION (bftype
) != bitsize
3683 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3684 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3686 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3687 size_int (bitsize
), bitsize_int (bitpos
));
3690 result
= fold_convert_loc (loc
, type
, result
);
3695 /* Optimize a bit-field compare.
3697 There are two cases: First is a compare against a constant and the
3698 second is a comparison of two items where the fields are at the same
3699 bit position relative to the start of a chunk (byte, halfword, word)
3700 large enough to contain it. In these cases we can avoid the shift
3701 implicit in bitfield extractions.
3703 For constants, we emit a compare of the shifted constant with the
3704 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3705 compared. For two fields at the same position, we do the ANDs with the
3706 similar mask and compare the result of the ANDs.
3708 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3709 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3710 are the left and right operands of the comparison, respectively.
3712 If the optimization described above can be done, we return the resulting
3713 tree. Otherwise we return zero. */
3716 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3717 tree compare_type
, tree lhs
, tree rhs
)
3719 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3720 tree type
= TREE_TYPE (lhs
);
3722 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3723 machine_mode lmode
, rmode
, nmode
;
3724 int lunsignedp
, runsignedp
;
3725 int lvolatilep
= 0, rvolatilep
= 0;
3726 tree linner
, rinner
= NULL_TREE
;
3730 /* Get all the information about the extractions being done. If the bit size
3731 if the same as the size of the underlying object, we aren't doing an
3732 extraction at all and so can do nothing. We also don't want to
3733 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3734 then will no longer be able to replace it. */
3735 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3736 &lunsignedp
, &lvolatilep
, false);
3737 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3738 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
|| lvolatilep
)
3743 /* If this is not a constant, we can only do something if bit positions,
3744 sizes, and signedness are the same. */
3745 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3746 &runsignedp
, &rvolatilep
, false);
3748 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3749 || lunsignedp
!= runsignedp
|| offset
!= 0
3750 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
|| rvolatilep
)
3754 /* See if we can find a mode to refer to this field. We should be able to,
3755 but fail if we can't. */
3756 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3757 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3758 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3759 TYPE_ALIGN (TREE_TYPE (rinner
))),
3761 if (nmode
== VOIDmode
)
3764 /* Set signed and unsigned types of the precision of this mode for the
3766 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3768 /* Compute the bit position and size for the new reference and our offset
3769 within it. If the new reference is the same size as the original, we
3770 won't optimize anything, so return zero. */
3771 nbitsize
= GET_MODE_BITSIZE (nmode
);
3772 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3774 if (nbitsize
== lbitsize
)
3777 if (BYTES_BIG_ENDIAN
)
3778 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3780 /* Make the mask to be used against the extracted field. */
3781 mask
= build_int_cst_type (unsigned_type
, -1);
3782 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3783 mask
= const_binop (RSHIFT_EXPR
, mask
,
3784 size_int (nbitsize
- lbitsize
- lbitpos
));
3787 /* If not comparing with constant, just rework the comparison
3789 return fold_build2_loc (loc
, code
, compare_type
,
3790 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3791 make_bit_field_ref (loc
, linner
,
3796 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3797 make_bit_field_ref (loc
, rinner
,
3803 /* Otherwise, we are handling the constant case. See if the constant is too
3804 big for the field. Warn and return a tree of for 0 (false) if so. We do
3805 this not only for its own sake, but to avoid having to test for this
3806 error case below. If we didn't, we might generate wrong code.
3808 For unsigned fields, the constant shifted right by the field length should
3809 be all zero. For signed fields, the high-order bits should agree with
3814 if (wi::lrshift (rhs
, lbitsize
) != 0)
3816 warning (0, "comparison is always %d due to width of bit-field",
3818 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3823 wide_int tem
= wi::arshift (rhs
, lbitsize
- 1);
3824 if (tem
!= 0 && tem
!= -1)
3826 warning (0, "comparison is always %d due to width of bit-field",
3828 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3832 /* Single-bit compares should always be against zero. */
3833 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3835 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3836 rhs
= build_int_cst (type
, 0);
3839 /* Make a new bitfield reference, shift the constant over the
3840 appropriate number of bits and mask it with the computed mask
3841 (in case this was a signed field). If we changed it, make a new one. */
3842 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3844 rhs
= const_binop (BIT_AND_EXPR
,
3845 const_binop (LSHIFT_EXPR
,
3846 fold_convert_loc (loc
, unsigned_type
, rhs
),
3847 size_int (lbitpos
)),
3850 lhs
= build2_loc (loc
, code
, compare_type
,
3851 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3855 /* Subroutine for fold_truth_andor_1: decode a field reference.
3857 If EXP is a comparison reference, we return the innermost reference.
3859 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3860 set to the starting bit number.
3862 If the innermost field can be completely contained in a mode-sized
3863 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3865 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3866 otherwise it is not changed.
3868 *PUNSIGNEDP is set to the signedness of the field.
3870 *PMASK is set to the mask used. This is either contained in a
3871 BIT_AND_EXPR or derived from the width of the field.
3873 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3875 Return 0 if this is not a component reference or is one that we can't
3876 do anything with. */
3879 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3880 HOST_WIDE_INT
*pbitpos
, machine_mode
*pmode
,
3881 int *punsignedp
, int *pvolatilep
,
3882 tree
*pmask
, tree
*pand_mask
)
3884 tree outer_type
= 0;
3886 tree mask
, inner
, offset
;
3888 unsigned int precision
;
3890 /* All the optimizations using this function assume integer fields.
3891 There are problems with FP fields since the type_for_size call
3892 below can fail for, e.g., XFmode. */
3893 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3896 /* We are interested in the bare arrangement of bits, so strip everything
3897 that doesn't affect the machine mode. However, record the type of the
3898 outermost expression if it may matter below. */
3899 if (CONVERT_EXPR_P (exp
)
3900 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3901 outer_type
= TREE_TYPE (exp
);
3904 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3906 and_mask
= TREE_OPERAND (exp
, 1);
3907 exp
= TREE_OPERAND (exp
, 0);
3908 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3909 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3913 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3914 punsignedp
, pvolatilep
, false);
3915 if ((inner
== exp
&& and_mask
== 0)
3916 || *pbitsize
< 0 || offset
!= 0
3917 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3920 /* If the number of bits in the reference is the same as the bitsize of
3921 the outer type, then the outer type gives the signedness. Otherwise
3922 (in case of a small bitfield) the signedness is unchanged. */
3923 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3924 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3926 /* Compute the mask to access the bitfield. */
3927 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3928 precision
= TYPE_PRECISION (unsigned_type
);
3930 mask
= build_int_cst_type (unsigned_type
, -1);
3932 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3933 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3935 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3937 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3938 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3941 *pand_mask
= and_mask
;
3945 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3946 bit positions and MASK is SIGNED. */
3949 all_ones_mask_p (const_tree mask
, unsigned int size
)
3951 tree type
= TREE_TYPE (mask
);
3952 unsigned int precision
= TYPE_PRECISION (type
);
3954 /* If this function returns true when the type of the mask is
3955 UNSIGNED, then there will be errors. In particular see
3956 gcc.c-torture/execute/990326-1.c. There does not appear to be
3957 any documentation paper trail as to why this is so. But the pre
3958 wide-int worked with that restriction and it has been preserved
3960 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
3963 return wi::mask (size
, false, precision
) == mask
;
3966 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3967 represents the sign bit of EXP's type. If EXP represents a sign
3968 or zero extension, also test VAL against the unextended type.
3969 The return value is the (sub)expression whose sign bit is VAL,
3970 or NULL_TREE otherwise. */
3973 sign_bit_p (tree exp
, const_tree val
)
3978 /* Tree EXP must have an integral type. */
3979 t
= TREE_TYPE (exp
);
3980 if (! INTEGRAL_TYPE_P (t
))
3983 /* Tree VAL must be an integer constant. */
3984 if (TREE_CODE (val
) != INTEGER_CST
3985 || TREE_OVERFLOW (val
))
3988 width
= TYPE_PRECISION (t
);
3989 if (wi::only_sign_bit_p (val
, width
))
3992 /* Handle extension from a narrower type. */
3993 if (TREE_CODE (exp
) == NOP_EXPR
3994 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3995 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
4000 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4001 to be evaluated unconditionally. */
4004 simple_operand_p (const_tree exp
)
4006 /* Strip any conversions that don't change the machine mode. */
4009 return (CONSTANT_CLASS_P (exp
)
4010 || TREE_CODE (exp
) == SSA_NAME
4012 && ! TREE_ADDRESSABLE (exp
)
4013 && ! TREE_THIS_VOLATILE (exp
)
4014 && ! DECL_NONLOCAL (exp
)
4015 /* Don't regard global variables as simple. They may be
4016 allocated in ways unknown to the compiler (shared memory,
4017 #pragma weak, etc). */
4018 && ! TREE_PUBLIC (exp
)
4019 && ! DECL_EXTERNAL (exp
)
4020 /* Weakrefs are not safe to be read, since they can be NULL.
4021 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4022 have DECL_WEAK flag set. */
4023 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
4024 /* Loading a static variable is unduly expensive, but global
4025 registers aren't expensive. */
4026 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
4029 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4030 to be evaluated unconditionally.
4031 I addition to simple_operand_p, we assume that comparisons, conversions,
4032 and logic-not operations are simple, if their operands are simple, too. */
4035 simple_operand_p_2 (tree exp
)
4037 enum tree_code code
;
4039 if (TREE_SIDE_EFFECTS (exp
)
4040 || tree_could_trap_p (exp
))
4043 while (CONVERT_EXPR_P (exp
))
4044 exp
= TREE_OPERAND (exp
, 0);
4046 code
= TREE_CODE (exp
);
4048 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4049 return (simple_operand_p (TREE_OPERAND (exp
, 0))
4050 && simple_operand_p (TREE_OPERAND (exp
, 1)));
4052 if (code
== TRUTH_NOT_EXPR
)
4053 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
4055 return simple_operand_p (exp
);
4059 /* The following functions are subroutines to fold_range_test and allow it to
4060 try to change a logical combination of comparisons into a range test.
4063 X == 2 || X == 3 || X == 4 || X == 5
4067 (unsigned) (X - 2) <= 3
4069 We describe each set of comparisons as being either inside or outside
4070 a range, using a variable named like IN_P, and then describe the
4071 range with a lower and upper bound. If one of the bounds is omitted,
4072 it represents either the highest or lowest value of the type.
4074 In the comments below, we represent a range by two numbers in brackets
4075 preceded by a "+" to designate being inside that range, or a "-" to
4076 designate being outside that range, so the condition can be inverted by
4077 flipping the prefix. An omitted bound is represented by a "-". For
4078 example, "- [-, 10]" means being outside the range starting at the lowest
4079 possible value and ending at 10, in other words, being greater than 10.
4080 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4083 We set up things so that the missing bounds are handled in a consistent
4084 manner so neither a missing bound nor "true" and "false" need to be
4085 handled using a special case. */
4087 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4088 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4089 and UPPER1_P are nonzero if the respective argument is an upper bound
4090 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4091 must be specified for a comparison. ARG1 will be converted to ARG0's
4092 type if both are specified. */
4095 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
4096 tree arg1
, int upper1_p
)
4102 /* If neither arg represents infinity, do the normal operation.
4103 Else, if not a comparison, return infinity. Else handle the special
4104 comparison rules. Note that most of the cases below won't occur, but
4105 are handled for consistency. */
4107 if (arg0
!= 0 && arg1
!= 0)
4109 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
4110 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
4112 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
4115 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4118 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4119 for neither. In real maths, we cannot assume open ended ranges are
4120 the same. But, this is computer arithmetic, where numbers are finite.
4121 We can therefore make the transformation of any unbounded range with
4122 the value Z, Z being greater than any representable number. This permits
4123 us to treat unbounded ranges as equal. */
4124 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
4125 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
4129 result
= sgn0
== sgn1
;
4132 result
= sgn0
!= sgn1
;
4135 result
= sgn0
< sgn1
;
4138 result
= sgn0
<= sgn1
;
4141 result
= sgn0
> sgn1
;
4144 result
= sgn0
>= sgn1
;
4150 return constant_boolean_node (result
, type
);
4153 /* Helper routine for make_range. Perform one step for it, return
4154 new expression if the loop should continue or NULL_TREE if it should
4158 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
4159 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
4160 bool *strict_overflow_p
)
4162 tree arg0_type
= TREE_TYPE (arg0
);
4163 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
4164 int in_p
= *p_in_p
, n_in_p
;
4168 case TRUTH_NOT_EXPR
:
4169 /* We can only do something if the range is testing for zero. */
4170 if (low
== NULL_TREE
|| high
== NULL_TREE
4171 || ! integer_zerop (low
) || ! integer_zerop (high
))
4176 case EQ_EXPR
: case NE_EXPR
:
4177 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
4178 /* We can only do something if the range is testing for zero
4179 and if the second operand is an integer constant. Note that
4180 saying something is "in" the range we make is done by
4181 complementing IN_P since it will set in the initial case of
4182 being not equal to zero; "out" is leaving it alone. */
4183 if (low
== NULL_TREE
|| high
== NULL_TREE
4184 || ! integer_zerop (low
) || ! integer_zerop (high
)
4185 || TREE_CODE (arg1
) != INTEGER_CST
)
4190 case NE_EXPR
: /* - [c, c] */
4193 case EQ_EXPR
: /* + [c, c] */
4194 in_p
= ! in_p
, low
= high
= arg1
;
4196 case GT_EXPR
: /* - [-, c] */
4197 low
= 0, high
= arg1
;
4199 case GE_EXPR
: /* + [c, -] */
4200 in_p
= ! in_p
, low
= arg1
, high
= 0;
4202 case LT_EXPR
: /* - [c, -] */
4203 low
= arg1
, high
= 0;
4205 case LE_EXPR
: /* + [-, c] */
4206 in_p
= ! in_p
, low
= 0, high
= arg1
;
4212 /* If this is an unsigned comparison, we also know that EXP is
4213 greater than or equal to zero. We base the range tests we make
4214 on that fact, so we record it here so we can parse existing
4215 range tests. We test arg0_type since often the return type
4216 of, e.g. EQ_EXPR, is boolean. */
4217 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
4219 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4221 build_int_cst (arg0_type
, 0),
4225 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4227 /* If the high bound is missing, but we have a nonzero low
4228 bound, reverse the range so it goes from zero to the low bound
4230 if (high
== 0 && low
&& ! integer_zerop (low
))
4233 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4234 build_int_cst (TREE_TYPE (low
), 1), 0);
4235 low
= build_int_cst (arg0_type
, 0);
4245 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4246 low and high are non-NULL, then normalize will DTRT. */
4247 if (!TYPE_UNSIGNED (arg0_type
)
4248 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4250 if (low
== NULL_TREE
)
4251 low
= TYPE_MIN_VALUE (arg0_type
);
4252 if (high
== NULL_TREE
)
4253 high
= TYPE_MAX_VALUE (arg0_type
);
4256 /* (-x) IN [a,b] -> x in [-b, -a] */
4257 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4258 build_int_cst (exp_type
, 0),
4260 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4261 build_int_cst (exp_type
, 0),
4263 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
4269 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4270 build_int_cst (exp_type
, 1));
4274 if (TREE_CODE (arg1
) != INTEGER_CST
)
4277 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4278 move a constant to the other side. */
4279 if (!TYPE_UNSIGNED (arg0_type
)
4280 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4283 /* If EXP is signed, any overflow in the computation is undefined,
4284 so we don't worry about it so long as our computations on
4285 the bounds don't overflow. For unsigned, overflow is defined
4286 and this is exactly the right thing. */
4287 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4288 arg0_type
, low
, 0, arg1
, 0);
4289 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4290 arg0_type
, high
, 1, arg1
, 0);
4291 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4292 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4295 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4296 *strict_overflow_p
= true;
4299 /* Check for an unsigned range which has wrapped around the maximum
4300 value thus making n_high < n_low, and normalize it. */
4301 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4303 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4304 build_int_cst (TREE_TYPE (n_high
), 1), 0);
4305 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4306 build_int_cst (TREE_TYPE (n_low
), 1), 0);
4308 /* If the range is of the form +/- [ x+1, x ], we won't
4309 be able to normalize it. But then, it represents the
4310 whole range or the empty set, so make it
4312 if (tree_int_cst_equal (n_low
, low
)
4313 && tree_int_cst_equal (n_high
, high
))
4319 low
= n_low
, high
= n_high
;
4327 case NON_LVALUE_EXPR
:
4328 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4331 if (! INTEGRAL_TYPE_P (arg0_type
)
4332 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4333 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4336 n_low
= low
, n_high
= high
;
4339 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4342 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4344 /* If we're converting arg0 from an unsigned type, to exp,
4345 a signed type, we will be doing the comparison as unsigned.
4346 The tests above have already verified that LOW and HIGH
4349 So we have to ensure that we will handle large unsigned
4350 values the same way that the current signed bounds treat
4353 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4357 /* For fixed-point modes, we need to pass the saturating flag
4358 as the 2nd parameter. */
4359 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4361 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4362 TYPE_SATURATING (arg0_type
));
4365 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4367 /* A range without an upper bound is, naturally, unbounded.
4368 Since convert would have cropped a very large value, use
4369 the max value for the destination type. */
4371 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4372 : TYPE_MAX_VALUE (arg0_type
);
4374 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4375 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4376 fold_convert_loc (loc
, arg0_type
,
4378 build_int_cst (arg0_type
, 1));
4380 /* If the low bound is specified, "and" the range with the
4381 range for which the original unsigned value will be
4385 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4386 1, fold_convert_loc (loc
, arg0_type
,
4391 in_p
= (n_in_p
== in_p
);
4395 /* Otherwise, "or" the range with the range of the input
4396 that will be interpreted as negative. */
4397 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4398 1, fold_convert_loc (loc
, arg0_type
,
4403 in_p
= (in_p
!= n_in_p
);
4417 /* Given EXP, a logical expression, set the range it is testing into
4418 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4419 actually being tested. *PLOW and *PHIGH will be made of the same
4420 type as the returned expression. If EXP is not a comparison, we
4421 will most likely not be returning a useful value and range. Set
4422 *STRICT_OVERFLOW_P to true if the return value is only valid
4423 because signed overflow is undefined; otherwise, do not change
4424 *STRICT_OVERFLOW_P. */
4427 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4428 bool *strict_overflow_p
)
4430 enum tree_code code
;
4431 tree arg0
, arg1
= NULL_TREE
;
4432 tree exp_type
, nexp
;
4435 location_t loc
= EXPR_LOCATION (exp
);
4437 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4438 and see if we can refine the range. Some of the cases below may not
4439 happen, but it doesn't seem worth worrying about this. We "continue"
4440 the outer loop when we've changed something; otherwise we "break"
4441 the switch, which will "break" the while. */
4444 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4448 code
= TREE_CODE (exp
);
4449 exp_type
= TREE_TYPE (exp
);
4452 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4454 if (TREE_OPERAND_LENGTH (exp
) > 0)
4455 arg0
= TREE_OPERAND (exp
, 0);
4456 if (TREE_CODE_CLASS (code
) == tcc_binary
4457 || TREE_CODE_CLASS (code
) == tcc_comparison
4458 || (TREE_CODE_CLASS (code
) == tcc_expression
4459 && TREE_OPERAND_LENGTH (exp
) > 1))
4460 arg1
= TREE_OPERAND (exp
, 1);
4462 if (arg0
== NULL_TREE
)
4465 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4466 &high
, &in_p
, strict_overflow_p
);
4467 if (nexp
== NULL_TREE
)
4472 /* If EXP is a constant, we can evaluate whether this is true or false. */
4473 if (TREE_CODE (exp
) == INTEGER_CST
)
4475 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4477 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4483 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4487 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4488 type, TYPE, return an expression to test if EXP is in (or out of, depending
4489 on IN_P) the range. Return 0 if the test couldn't be created. */
4492 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4493 tree low
, tree high
)
4495 tree etype
= TREE_TYPE (exp
), value
;
4497 /* Disable this optimization for function pointer expressions
4498 on targets that require function pointer canonicalization. */
4499 if (targetm
.have_canonicalize_funcptr_for_compare ()
4500 && TREE_CODE (etype
) == POINTER_TYPE
4501 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4506 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4508 return invert_truthvalue_loc (loc
, value
);
4513 if (low
== 0 && high
== 0)
4514 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
4517 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4518 fold_convert_loc (loc
, etype
, high
));
4521 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4522 fold_convert_loc (loc
, etype
, low
));
4524 if (operand_equal_p (low
, high
, 0))
4525 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4526 fold_convert_loc (loc
, etype
, low
));
4528 if (integer_zerop (low
))
4530 if (! TYPE_UNSIGNED (etype
))
4532 etype
= unsigned_type_for (etype
);
4533 high
= fold_convert_loc (loc
, etype
, high
);
4534 exp
= fold_convert_loc (loc
, etype
, exp
);
4536 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4539 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4540 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4542 int prec
= TYPE_PRECISION (etype
);
4544 if (wi::mask (prec
- 1, false, prec
) == high
)
4546 if (TYPE_UNSIGNED (etype
))
4548 tree signed_etype
= signed_type_for (etype
);
4549 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4551 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4553 etype
= signed_etype
;
4554 exp
= fold_convert_loc (loc
, etype
, exp
);
4556 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4557 build_int_cst (etype
, 0));
4561 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4562 This requires wrap-around arithmetics for the type of the expression.
4563 First make sure that arithmetics in this type is valid, then make sure
4564 that it wraps around. */
4565 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4566 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4567 TYPE_UNSIGNED (etype
));
4569 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4571 tree utype
, minv
, maxv
;
4573 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4574 for the type in question, as we rely on this here. */
4575 utype
= unsigned_type_for (etype
);
4576 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4577 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4578 build_int_cst (TREE_TYPE (maxv
), 1), 1);
4579 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4581 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4588 high
= fold_convert_loc (loc
, etype
, high
);
4589 low
= fold_convert_loc (loc
, etype
, low
);
4590 exp
= fold_convert_loc (loc
, etype
, exp
);
4592 value
= const_binop (MINUS_EXPR
, high
, low
);
4595 if (POINTER_TYPE_P (etype
))
4597 if (value
!= 0 && !TREE_OVERFLOW (value
))
4599 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4600 return build_range_check (loc
, type
,
4601 fold_build_pointer_plus_loc (loc
, exp
, low
),
4602 1, build_int_cst (etype
, 0), value
);
4607 if (value
!= 0 && !TREE_OVERFLOW (value
))
4608 return build_range_check (loc
, type
,
4609 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4610 1, build_int_cst (etype
, 0), value
);
4615 /* Return the predecessor of VAL in its type, handling the infinite case. */
4618 range_predecessor (tree val
)
4620 tree type
= TREE_TYPE (val
);
4622 if (INTEGRAL_TYPE_P (type
)
4623 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4626 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
4627 build_int_cst (TREE_TYPE (val
), 1), 0);
4630 /* Return the successor of VAL in its type, handling the infinite case. */
4633 range_successor (tree val
)
4635 tree type
= TREE_TYPE (val
);
4637 if (INTEGRAL_TYPE_P (type
)
4638 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4641 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
4642 build_int_cst (TREE_TYPE (val
), 1), 0);
4645 /* Given two ranges, see if we can merge them into one. Return 1 if we
4646 can, 0 if we can't. Set the output range into the specified parameters. */
4649 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4650 tree high0
, int in1_p
, tree low1
, tree high1
)
4658 int lowequal
= ((low0
== 0 && low1
== 0)
4659 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4660 low0
, 0, low1
, 0)));
4661 int highequal
= ((high0
== 0 && high1
== 0)
4662 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4663 high0
, 1, high1
, 1)));
4665 /* Make range 0 be the range that starts first, or ends last if they
4666 start at the same value. Swap them if it isn't. */
4667 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4670 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4671 high1
, 1, high0
, 1))))
4673 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4674 tem
= low0
, low0
= low1
, low1
= tem
;
4675 tem
= high0
, high0
= high1
, high1
= tem
;
4678 /* Now flag two cases, whether the ranges are disjoint or whether the
4679 second range is totally subsumed in the first. Note that the tests
4680 below are simplified by the ones above. */
4681 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4682 high0
, 1, low1
, 0));
4683 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4684 high1
, 1, high0
, 1));
4686 /* We now have four cases, depending on whether we are including or
4687 excluding the two ranges. */
4690 /* If they don't overlap, the result is false. If the second range
4691 is a subset it is the result. Otherwise, the range is from the start
4692 of the second to the end of the first. */
4694 in_p
= 0, low
= high
= 0;
4696 in_p
= 1, low
= low1
, high
= high1
;
4698 in_p
= 1, low
= low1
, high
= high0
;
4701 else if (in0_p
&& ! in1_p
)
4703 /* If they don't overlap, the result is the first range. If they are
4704 equal, the result is false. If the second range is a subset of the
4705 first, and the ranges begin at the same place, we go from just after
4706 the end of the second range to the end of the first. If the second
4707 range is not a subset of the first, or if it is a subset and both
4708 ranges end at the same place, the range starts at the start of the
4709 first range and ends just before the second range.
4710 Otherwise, we can't describe this as a single range. */
4712 in_p
= 1, low
= low0
, high
= high0
;
4713 else if (lowequal
&& highequal
)
4714 in_p
= 0, low
= high
= 0;
4715 else if (subset
&& lowequal
)
4717 low
= range_successor (high1
);
4722 /* We are in the weird situation where high0 > high1 but
4723 high1 has no successor. Punt. */
4727 else if (! subset
|| highequal
)
4730 high
= range_predecessor (low1
);
4734 /* low0 < low1 but low1 has no predecessor. Punt. */
4742 else if (! in0_p
&& in1_p
)
4744 /* If they don't overlap, the result is the second range. If the second
4745 is a subset of the first, the result is false. Otherwise,
4746 the range starts just after the first range and ends at the
4747 end of the second. */
4749 in_p
= 1, low
= low1
, high
= high1
;
4750 else if (subset
|| highequal
)
4751 in_p
= 0, low
= high
= 0;
4754 low
= range_successor (high0
);
4759 /* high1 > high0 but high0 has no successor. Punt. */
4767 /* The case where we are excluding both ranges. Here the complex case
4768 is if they don't overlap. In that case, the only time we have a
4769 range is if they are adjacent. If the second is a subset of the
4770 first, the result is the first. Otherwise, the range to exclude
4771 starts at the beginning of the first range and ends at the end of the
4775 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4776 range_successor (high0
),
4778 in_p
= 0, low
= low0
, high
= high1
;
4781 /* Canonicalize - [min, x] into - [-, x]. */
4782 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4783 switch (TREE_CODE (TREE_TYPE (low0
)))
4786 if (TYPE_PRECISION (TREE_TYPE (low0
))
4787 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4791 if (tree_int_cst_equal (low0
,
4792 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4796 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4797 && integer_zerop (low0
))
4804 /* Canonicalize - [x, max] into - [x, -]. */
4805 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4806 switch (TREE_CODE (TREE_TYPE (high1
)))
4809 if (TYPE_PRECISION (TREE_TYPE (high1
))
4810 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4814 if (tree_int_cst_equal (high1
,
4815 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4819 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4820 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4822 build_int_cst (TREE_TYPE (high1
), 1),
4830 /* The ranges might be also adjacent between the maximum and
4831 minimum values of the given type. For
4832 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4833 return + [x + 1, y - 1]. */
4834 if (low0
== 0 && high1
== 0)
4836 low
= range_successor (high0
);
4837 high
= range_predecessor (low1
);
4838 if (low
== 0 || high
== 0)
4848 in_p
= 0, low
= low0
, high
= high0
;
4850 in_p
= 0, low
= low0
, high
= high1
;
4853 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4858 /* Subroutine of fold, looking inside expressions of the form
4859 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4860 of the COND_EXPR. This function is being used also to optimize
4861 A op B ? C : A, by reversing the comparison first.
4863 Return a folded expression whose code is not a COND_EXPR
4864 anymore, or NULL_TREE if no folding opportunity is found. */
4867 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4868 tree arg0
, tree arg1
, tree arg2
)
4870 enum tree_code comp_code
= TREE_CODE (arg0
);
4871 tree arg00
= TREE_OPERAND (arg0
, 0);
4872 tree arg01
= TREE_OPERAND (arg0
, 1);
4873 tree arg1_type
= TREE_TYPE (arg1
);
4879 /* If we have A op 0 ? A : -A, consider applying the following
4882 A == 0? A : -A same as -A
4883 A != 0? A : -A same as A
4884 A >= 0? A : -A same as abs (A)
4885 A > 0? A : -A same as abs (A)
4886 A <= 0? A : -A same as -abs (A)
4887 A < 0? A : -A same as -abs (A)
4889 None of these transformations work for modes with signed
4890 zeros. If A is +/-0, the first two transformations will
4891 change the sign of the result (from +0 to -0, or vice
4892 versa). The last four will fix the sign of the result,
4893 even though the original expressions could be positive or
4894 negative, depending on the sign of A.
4896 Note that all these transformations are correct if A is
4897 NaN, since the two alternatives (A and -A) are also NaNs. */
4898 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4899 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4900 ? real_zerop (arg01
)
4901 : integer_zerop (arg01
))
4902 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4903 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4904 /* In the case that A is of the form X-Y, '-A' (arg2) may
4905 have already been folded to Y-X, check for that. */
4906 || (TREE_CODE (arg1
) == MINUS_EXPR
4907 && TREE_CODE (arg2
) == MINUS_EXPR
4908 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4909 TREE_OPERAND (arg2
, 1), 0)
4910 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4911 TREE_OPERAND (arg2
, 0), 0))))
4916 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4917 return pedantic_non_lvalue_loc (loc
,
4918 fold_convert_loc (loc
, type
,
4919 negate_expr (tem
)));
4922 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4925 if (flag_trapping_math
)
4930 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4931 arg1
= fold_convert_loc (loc
, signed_type_for
4932 (TREE_TYPE (arg1
)), arg1
);
4933 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4934 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4937 if (flag_trapping_math
)
4941 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4942 arg1
= fold_convert_loc (loc
, signed_type_for
4943 (TREE_TYPE (arg1
)), arg1
);
4944 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4945 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4947 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4951 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4952 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4953 both transformations are correct when A is NaN: A != 0
4954 is then true, and A == 0 is false. */
4956 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4957 && integer_zerop (arg01
) && integer_zerop (arg2
))
4959 if (comp_code
== NE_EXPR
)
4960 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4961 else if (comp_code
== EQ_EXPR
)
4962 return build_zero_cst (type
);
4965 /* Try some transformations of A op B ? A : B.
4967 A == B? A : B same as B
4968 A != B? A : B same as A
4969 A >= B? A : B same as max (A, B)
4970 A > B? A : B same as max (B, A)
4971 A <= B? A : B same as min (A, B)
4972 A < B? A : B same as min (B, A)
4974 As above, these transformations don't work in the presence
4975 of signed zeros. For example, if A and B are zeros of
4976 opposite sign, the first two transformations will change
4977 the sign of the result. In the last four, the original
4978 expressions give different results for (A=+0, B=-0) and
4979 (A=-0, B=+0), but the transformed expressions do not.
4981 The first two transformations are correct if either A or B
4982 is a NaN. In the first transformation, the condition will
4983 be false, and B will indeed be chosen. In the case of the
4984 second transformation, the condition A != B will be true,
4985 and A will be chosen.
4987 The conversions to max() and min() are not correct if B is
4988 a number and A is not. The conditions in the original
4989 expressions will be false, so all four give B. The min()
4990 and max() versions would give a NaN instead. */
4991 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4992 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4993 /* Avoid these transformations if the COND_EXPR may be used
4994 as an lvalue in the C++ front-end. PR c++/19199. */
4996 || VECTOR_TYPE_P (type
)
4997 || (! lang_GNU_CXX ()
4998 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4999 || ! maybe_lvalue_p (arg1
)
5000 || ! maybe_lvalue_p (arg2
)))
5002 tree comp_op0
= arg00
;
5003 tree comp_op1
= arg01
;
5004 tree comp_type
= TREE_TYPE (comp_op0
);
5006 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5007 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
5017 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
5019 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
5024 /* In C++ a ?: expression can be an lvalue, so put the
5025 operand which will be used if they are equal first
5026 so that we can convert this back to the
5027 corresponding COND_EXPR. */
5028 if (!HONOR_NANS (arg1
))
5030 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5031 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5032 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
5033 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
5034 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
5035 comp_op1
, comp_op0
);
5036 return pedantic_non_lvalue_loc (loc
,
5037 fold_convert_loc (loc
, type
, tem
));
5044 if (!HONOR_NANS (arg1
))
5046 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5047 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5048 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
5049 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
5050 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
5051 comp_op1
, comp_op0
);
5052 return pedantic_non_lvalue_loc (loc
,
5053 fold_convert_loc (loc
, type
, tem
));
5057 if (!HONOR_NANS (arg1
))
5058 return pedantic_non_lvalue_loc (loc
,
5059 fold_convert_loc (loc
, type
, arg2
));
5062 if (!HONOR_NANS (arg1
))
5063 return pedantic_non_lvalue_loc (loc
,
5064 fold_convert_loc (loc
, type
, arg1
));
5067 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5072 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5073 we might still be able to simplify this. For example,
5074 if C1 is one less or one more than C2, this might have started
5075 out as a MIN or MAX and been transformed by this function.
5076 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5078 if (INTEGRAL_TYPE_P (type
)
5079 && TREE_CODE (arg01
) == INTEGER_CST
5080 && TREE_CODE (arg2
) == INTEGER_CST
)
5084 if (TREE_CODE (arg1
) == INTEGER_CST
)
5086 /* We can replace A with C1 in this case. */
5087 arg1
= fold_convert_loc (loc
, type
, arg01
);
5088 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
5091 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5092 MIN_EXPR, to preserve the signedness of the comparison. */
5093 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5095 && operand_equal_p (arg01
,
5096 const_binop (PLUS_EXPR
, arg2
,
5097 build_int_cst (type
, 1)),
5100 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
5101 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5103 return pedantic_non_lvalue_loc (loc
,
5104 fold_convert_loc (loc
, type
, tem
));
5109 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5111 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5113 && operand_equal_p (arg01
,
5114 const_binop (MINUS_EXPR
, arg2
,
5115 build_int_cst (type
, 1)),
5118 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
5119 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5121 return pedantic_non_lvalue_loc (loc
,
5122 fold_convert_loc (loc
, type
, tem
));
5127 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5128 MAX_EXPR, to preserve the signedness of the comparison. */
5129 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5131 && operand_equal_p (arg01
,
5132 const_binop (MINUS_EXPR
, arg2
,
5133 build_int_cst (type
, 1)),
5136 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
5137 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5139 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5144 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5145 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5147 && operand_equal_p (arg01
,
5148 const_binop (PLUS_EXPR
, arg2
,
5149 build_int_cst (type
, 1)),
5152 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
5153 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5155 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5169 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5170 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5171 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5175 /* EXP is some logical combination of boolean tests. See if we can
5176 merge it into some range test. Return the new tree if so. */
5179 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
5182 int or_op
= (code
== TRUTH_ORIF_EXPR
5183 || code
== TRUTH_OR_EXPR
);
5184 int in0_p
, in1_p
, in_p
;
5185 tree low0
, low1
, low
, high0
, high1
, high
;
5186 bool strict_overflow_p
= false;
5188 const char * const warnmsg
= G_("assuming signed overflow does not occur "
5189 "when simplifying range test");
5191 if (!INTEGRAL_TYPE_P (type
))
5194 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
5195 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
5197 /* If this is an OR operation, invert both sides; we will invert
5198 again at the end. */
5200 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
5202 /* If both expressions are the same, if we can merge the ranges, and we
5203 can build the range test, return it or it inverted. If one of the
5204 ranges is always true or always false, consider it to be the same
5205 expression as the other. */
5206 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
5207 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
5209 && 0 != (tem
= (build_range_check (loc
, type
,
5211 : rhs
!= 0 ? rhs
: integer_zero_node
,
5214 if (strict_overflow_p
)
5215 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5216 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
5219 /* On machines where the branch cost is expensive, if this is a
5220 short-circuited branch and the underlying object on both sides
5221 is the same, make a non-short-circuit operation. */
5222 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5223 && lhs
!= 0 && rhs
!= 0
5224 && (code
== TRUTH_ANDIF_EXPR
5225 || code
== TRUTH_ORIF_EXPR
)
5226 && operand_equal_p (lhs
, rhs
, 0))
5228 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5229 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5230 which cases we can't do this. */
5231 if (simple_operand_p (lhs
))
5232 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5233 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5236 else if (!lang_hooks
.decls
.global_bindings_p ()
5237 && !CONTAINS_PLACEHOLDER_P (lhs
))
5239 tree common
= save_expr (lhs
);
5241 if (0 != (lhs
= build_range_check (loc
, type
, common
,
5242 or_op
? ! in0_p
: in0_p
,
5244 && (0 != (rhs
= build_range_check (loc
, type
, common
,
5245 or_op
? ! in1_p
: in1_p
,
5248 if (strict_overflow_p
)
5249 fold_overflow_warning (warnmsg
,
5250 WARN_STRICT_OVERFLOW_COMPARISON
);
5251 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5252 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5261 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5262 bit value. Arrange things so the extra bits will be set to zero if and
5263 only if C is signed-extended to its full width. If MASK is nonzero,
5264 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5267 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5269 tree type
= TREE_TYPE (c
);
5270 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5273 if (p
== modesize
|| unsignedp
)
5276 /* We work by getting just the sign bit into the low-order bit, then
5277 into the high-order bit, then sign-extend. We then XOR that value
5279 temp
= build_int_cst (TREE_TYPE (c
), wi::extract_uhwi (c
, p
- 1, 1));
5281 /* We must use a signed type in order to get an arithmetic right shift.
5282 However, we must also avoid introducing accidental overflows, so that
5283 a subsequent call to integer_zerop will work. Hence we must
5284 do the type conversion here. At this point, the constant is either
5285 zero or one, and the conversion to a signed type can never overflow.
5286 We could get an overflow if this conversion is done anywhere else. */
5287 if (TYPE_UNSIGNED (type
))
5288 temp
= fold_convert (signed_type_for (type
), temp
);
5290 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
5291 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
5293 temp
= const_binop (BIT_AND_EXPR
, temp
,
5294 fold_convert (TREE_TYPE (c
), mask
));
5295 /* If necessary, convert the type back to match the type of C. */
5296 if (TYPE_UNSIGNED (type
))
5297 temp
= fold_convert (type
, temp
);
5299 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5302 /* For an expression that has the form
5306 we can drop one of the inner expressions and simplify to
5310 LOC is the location of the resulting expression. OP is the inner
5311 logical operation; the left-hand side in the examples above, while CMPOP
5312 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5313 removing a condition that guards another, as in
5314 (A != NULL && A->...) || A == NULL
5315 which we must not transform. If RHS_ONLY is true, only eliminate the
5316 right-most operand of the inner logical operation. */
5319 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5322 tree type
= TREE_TYPE (cmpop
);
5323 enum tree_code code
= TREE_CODE (cmpop
);
5324 enum tree_code truthop_code
= TREE_CODE (op
);
5325 tree lhs
= TREE_OPERAND (op
, 0);
5326 tree rhs
= TREE_OPERAND (op
, 1);
5327 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5328 enum tree_code rhs_code
= TREE_CODE (rhs
);
5329 enum tree_code lhs_code
= TREE_CODE (lhs
);
5330 enum tree_code inv_code
;
5332 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5335 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5338 if (rhs_code
== truthop_code
)
5340 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5341 if (newrhs
!= NULL_TREE
)
5344 rhs_code
= TREE_CODE (rhs
);
5347 if (lhs_code
== truthop_code
&& !rhs_only
)
5349 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5350 if (newlhs
!= NULL_TREE
)
5353 lhs_code
= TREE_CODE (lhs
);
5357 inv_code
= invert_tree_comparison (code
, HONOR_NANS (type
));
5358 if (inv_code
== rhs_code
5359 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5360 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5362 if (!rhs_only
&& inv_code
== lhs_code
5363 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5364 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5366 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5367 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5372 /* Find ways of folding logical expressions of LHS and RHS:
5373 Try to merge two comparisons to the same innermost item.
5374 Look for range tests like "ch >= '0' && ch <= '9'".
5375 Look for combinations of simple terms on machines with expensive branches
5376 and evaluate the RHS unconditionally.
5378 For example, if we have p->a == 2 && p->b == 4 and we can make an
5379 object large enough to span both A and B, we can do this with a comparison
5380 against the object ANDed with the a mask.
5382 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5383 operations to do this with one comparison.
5385 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5386 function and the one above.
5388 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5389 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5391 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5394 We return the simplified tree or 0 if no optimization is possible. */
5397 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5400 /* If this is the "or" of two comparisons, we can do something if
5401 the comparisons are NE_EXPR. If this is the "and", we can do something
5402 if the comparisons are EQ_EXPR. I.e.,
5403 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5405 WANTED_CODE is this operation code. For single bit fields, we can
5406 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5407 comparison for one-bit fields. */
5409 enum tree_code wanted_code
;
5410 enum tree_code lcode
, rcode
;
5411 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5412 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5413 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5414 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5415 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5416 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5417 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5418 machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5419 machine_mode lnmode
, rnmode
;
5420 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5421 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5422 tree l_const
, r_const
;
5423 tree lntype
, rntype
, result
;
5424 HOST_WIDE_INT first_bit
, end_bit
;
5427 /* Start by getting the comparison codes. Fail if anything is volatile.
5428 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5429 it were surrounded with a NE_EXPR. */
5431 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5434 lcode
= TREE_CODE (lhs
);
5435 rcode
= TREE_CODE (rhs
);
5437 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5439 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5440 build_int_cst (TREE_TYPE (lhs
), 0));
5444 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5446 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5447 build_int_cst (TREE_TYPE (rhs
), 0));
5451 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5452 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5455 ll_arg
= TREE_OPERAND (lhs
, 0);
5456 lr_arg
= TREE_OPERAND (lhs
, 1);
5457 rl_arg
= TREE_OPERAND (rhs
, 0);
5458 rr_arg
= TREE_OPERAND (rhs
, 1);
5460 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5461 if (simple_operand_p (ll_arg
)
5462 && simple_operand_p (lr_arg
))
5464 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5465 && operand_equal_p (lr_arg
, rr_arg
, 0))
5467 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5468 truth_type
, ll_arg
, lr_arg
);
5472 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5473 && operand_equal_p (lr_arg
, rl_arg
, 0))
5475 result
= combine_comparisons (loc
, code
, lcode
,
5476 swap_tree_comparison (rcode
),
5477 truth_type
, ll_arg
, lr_arg
);
5483 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5484 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5486 /* If the RHS can be evaluated unconditionally and its operands are
5487 simple, it wins to evaluate the RHS unconditionally on machines
5488 with expensive branches. In this case, this isn't a comparison
5489 that can be merged. */
5491 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5493 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5494 && simple_operand_p (rl_arg
)
5495 && simple_operand_p (rr_arg
))
5497 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5498 if (code
== TRUTH_OR_EXPR
5499 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5500 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5501 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5502 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5503 return build2_loc (loc
, NE_EXPR
, truth_type
,
5504 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5506 build_int_cst (TREE_TYPE (ll_arg
), 0));
5508 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5509 if (code
== TRUTH_AND_EXPR
5510 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5511 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5512 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5513 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5514 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5515 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5517 build_int_cst (TREE_TYPE (ll_arg
), 0));
5520 /* See if the comparisons can be merged. Then get all the parameters for
5523 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5524 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5528 ll_inner
= decode_field_reference (loc
, ll_arg
,
5529 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5530 &ll_unsignedp
, &volatilep
, &ll_mask
,
5532 lr_inner
= decode_field_reference (loc
, lr_arg
,
5533 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5534 &lr_unsignedp
, &volatilep
, &lr_mask
,
5536 rl_inner
= decode_field_reference (loc
, rl_arg
,
5537 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5538 &rl_unsignedp
, &volatilep
, &rl_mask
,
5540 rr_inner
= decode_field_reference (loc
, rr_arg
,
5541 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5542 &rr_unsignedp
, &volatilep
, &rr_mask
,
5545 /* It must be true that the inner operation on the lhs of each
5546 comparison must be the same if we are to be able to do anything.
5547 Then see if we have constants. If not, the same must be true for
5549 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5550 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5553 if (TREE_CODE (lr_arg
) == INTEGER_CST
5554 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5555 l_const
= lr_arg
, r_const
= rr_arg
;
5556 else if (lr_inner
== 0 || rr_inner
== 0
5557 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5560 l_const
= r_const
= 0;
5562 /* If either comparison code is not correct for our logical operation,
5563 fail. However, we can convert a one-bit comparison against zero into
5564 the opposite comparison against that bit being set in the field. */
5566 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5567 if (lcode
!= wanted_code
)
5569 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5571 /* Make the left operand unsigned, since we are only interested
5572 in the value of one bit. Otherwise we are doing the wrong
5581 /* This is analogous to the code for l_const above. */
5582 if (rcode
!= wanted_code
)
5584 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5593 /* See if we can find a mode that contains both fields being compared on
5594 the left. If we can't, fail. Otherwise, update all constants and masks
5595 to be relative to a field of that size. */
5596 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5597 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5598 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5599 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5601 if (lnmode
== VOIDmode
)
5604 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5605 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5606 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5607 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5609 if (BYTES_BIG_ENDIAN
)
5611 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5612 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5615 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5616 size_int (xll_bitpos
));
5617 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5618 size_int (xrl_bitpos
));
5622 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5623 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5624 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5625 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5626 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5629 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5631 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5636 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5637 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5638 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5639 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5640 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5643 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5645 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5649 /* If the right sides are not constant, do the same for it. Also,
5650 disallow this optimization if a size or signedness mismatch occurs
5651 between the left and right sides. */
5654 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5655 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5656 /* Make sure the two fields on the right
5657 correspond to the left without being swapped. */
5658 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5661 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5662 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5663 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5664 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5666 if (rnmode
== VOIDmode
)
5669 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5670 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5671 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5672 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5674 if (BYTES_BIG_ENDIAN
)
5676 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5677 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5680 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5682 size_int (xlr_bitpos
));
5683 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5685 size_int (xrr_bitpos
));
5687 /* Make a mask that corresponds to both fields being compared.
5688 Do this for both items being compared. If the operands are the
5689 same size and the bits being compared are in the same position
5690 then we can do this by masking both and comparing the masked
5692 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5693 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5694 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5696 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5697 ll_unsignedp
|| rl_unsignedp
);
5698 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5699 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5701 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5702 lr_unsignedp
|| rr_unsignedp
);
5703 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5704 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5706 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5709 /* There is still another way we can do something: If both pairs of
5710 fields being compared are adjacent, we may be able to make a wider
5711 field containing them both.
5713 Note that we still must mask the lhs/rhs expressions. Furthermore,
5714 the mask must be shifted to account for the shift done by
5715 make_bit_field_ref. */
5716 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5717 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5718 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5719 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5723 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5724 ll_bitsize
+ rl_bitsize
,
5725 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5726 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5727 lr_bitsize
+ rr_bitsize
,
5728 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5730 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5731 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5732 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5733 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5735 /* Convert to the smaller type before masking out unwanted bits. */
5737 if (lntype
!= rntype
)
5739 if (lnbitsize
> rnbitsize
)
5741 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5742 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5745 else if (lnbitsize
< rnbitsize
)
5747 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5748 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5753 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5754 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5756 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5757 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5759 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5765 /* Handle the case of comparisons with constants. If there is something in
5766 common between the masks, those bits of the constants must be the same.
5767 If not, the condition is always false. Test for this to avoid generating
5768 incorrect code below. */
5769 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5770 if (! integer_zerop (result
)
5771 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5772 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5774 if (wanted_code
== NE_EXPR
)
5776 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5777 return constant_boolean_node (true, truth_type
);
5781 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5782 return constant_boolean_node (false, truth_type
);
5786 /* Construct the expression we will return. First get the component
5787 reference we will make. Unless the mask is all ones the width of
5788 that field, perform the mask operation. Then compare with the
5790 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5791 ll_unsignedp
|| rl_unsignedp
);
5793 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5794 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5795 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5797 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5798 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5801 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5805 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5809 enum tree_code op_code
;
5812 int consts_equal
, consts_lt
;
5815 STRIP_SIGN_NOPS (arg0
);
5817 op_code
= TREE_CODE (arg0
);
5818 minmax_const
= TREE_OPERAND (arg0
, 1);
5819 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5820 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5821 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5822 inner
= TREE_OPERAND (arg0
, 0);
5824 /* If something does not permit us to optimize, return the original tree. */
5825 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5826 || TREE_CODE (comp_const
) != INTEGER_CST
5827 || TREE_OVERFLOW (comp_const
)
5828 || TREE_CODE (minmax_const
) != INTEGER_CST
5829 || TREE_OVERFLOW (minmax_const
))
5832 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5833 and GT_EXPR, doing the rest with recursive calls using logical
5837 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5840 = optimize_minmax_comparison (loc
,
5841 invert_tree_comparison (code
, false),
5844 return invert_truthvalue_loc (loc
, tem
);
5850 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5851 optimize_minmax_comparison
5852 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5853 optimize_minmax_comparison
5854 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5857 if (op_code
== MAX_EXPR
&& consts_equal
)
5858 /* MAX (X, 0) == 0 -> X <= 0 */
5859 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5861 else if (op_code
== MAX_EXPR
&& consts_lt
)
5862 /* MAX (X, 0) == 5 -> X == 5 */
5863 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5865 else if (op_code
== MAX_EXPR
)
5866 /* MAX (X, 0) == -1 -> false */
5867 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5869 else if (consts_equal
)
5870 /* MIN (X, 0) == 0 -> X >= 0 */
5871 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5874 /* MIN (X, 0) == 5 -> false */
5875 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5878 /* MIN (X, 0) == -1 -> X == -1 */
5879 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5882 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5883 /* MAX (X, 0) > 0 -> X > 0
5884 MAX (X, 0) > 5 -> X > 5 */
5885 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5887 else if (op_code
== MAX_EXPR
)
5888 /* MAX (X, 0) > -1 -> true */
5889 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5891 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5892 /* MIN (X, 0) > 0 -> false
5893 MIN (X, 0) > 5 -> false */
5894 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5897 /* MIN (X, 0) > -1 -> X > -1 */
5898 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5905 /* T is an integer expression that is being multiplied, divided, or taken a
5906 modulus (CODE says which and what kind of divide or modulus) by a
5907 constant C. See if we can eliminate that operation by folding it with
5908 other operations already in T. WIDE_TYPE, if non-null, is a type that
5909 should be used for the computation if wider than our type.
5911 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5912 (X * 2) + (Y * 4). We must, however, be assured that either the original
5913 expression would not overflow or that overflow is undefined for the type
5914 in the language in question.
5916 If we return a non-null expression, it is an equivalent form of the
5917 original computation, but need not be in the original type.
5919 We set *STRICT_OVERFLOW_P to true if the return values depends on
5920 signed overflow being undefined. Otherwise we do not change
5921 *STRICT_OVERFLOW_P. */
5924 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5925 bool *strict_overflow_p
)
5927 /* To avoid exponential search depth, refuse to allow recursion past
5928 three levels. Beyond that (1) it's highly unlikely that we'll find
5929 something interesting and (2) we've probably processed it before
5930 when we built the inner expression. */
5939 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5946 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5947 bool *strict_overflow_p
)
5949 tree type
= TREE_TYPE (t
);
5950 enum tree_code tcode
= TREE_CODE (t
);
5951 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5952 > GET_MODE_SIZE (TYPE_MODE (type
)))
5953 ? wide_type
: type
);
5955 int same_p
= tcode
== code
;
5956 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5957 bool sub_strict_overflow_p
;
5959 /* Don't deal with constants of zero here; they confuse the code below. */
5960 if (integer_zerop (c
))
5963 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5964 op0
= TREE_OPERAND (t
, 0);
5966 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5967 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5969 /* Note that we need not handle conditional operations here since fold
5970 already handles those cases. So just do arithmetic here. */
5974 /* For a constant, we can always simplify if we are a multiply
5975 or (for divide and modulus) if it is a multiple of our constant. */
5976 if (code
== MULT_EXPR
5977 || wi::multiple_of_p (t
, c
, TYPE_SIGN (type
)))
5978 return const_binop (code
, fold_convert (ctype
, t
),
5979 fold_convert (ctype
, c
));
5982 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5983 /* If op0 is an expression ... */
5984 if ((COMPARISON_CLASS_P (op0
)
5985 || UNARY_CLASS_P (op0
)
5986 || BINARY_CLASS_P (op0
)
5987 || VL_EXP_CLASS_P (op0
)
5988 || EXPRESSION_CLASS_P (op0
))
5989 /* ... and has wrapping overflow, and its type is smaller
5990 than ctype, then we cannot pass through as widening. */
5991 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
5992 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
)))
5993 && (TYPE_PRECISION (ctype
)
5994 > TYPE_PRECISION (TREE_TYPE (op0
))))
5995 /* ... or this is a truncation (t is narrower than op0),
5996 then we cannot pass through this narrowing. */
5997 || (TYPE_PRECISION (type
)
5998 < TYPE_PRECISION (TREE_TYPE (op0
)))
5999 /* ... or signedness changes for division or modulus,
6000 then we cannot pass through this conversion. */
6001 || (code
!= MULT_EXPR
6002 && (TYPE_UNSIGNED (ctype
)
6003 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
6004 /* ... or has undefined overflow while the converted to
6005 type has not, we cannot do the operation in the inner type
6006 as that would introduce undefined overflow. */
6007 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
6008 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
)))
6009 && !TYPE_OVERFLOW_UNDEFINED (type
))))
6012 /* Pass the constant down and see if we can make a simplification. If
6013 we can, replace this expression with the inner simplification for
6014 possible later conversion to our or some other type. */
6015 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
6016 && TREE_CODE (t2
) == INTEGER_CST
6017 && !TREE_OVERFLOW (t2
)
6018 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
6020 ? ctype
: NULL_TREE
,
6021 strict_overflow_p
))))
6026 /* If widening the type changes it from signed to unsigned, then we
6027 must avoid building ABS_EXPR itself as unsigned. */
6028 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
6030 tree cstype
= (*signed_type_for
) (ctype
);
6031 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
6034 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
6035 return fold_convert (ctype
, t1
);
6039 /* If the constant is negative, we cannot simplify this. */
6040 if (tree_int_cst_sgn (c
) == -1)
6044 /* For division and modulus, type can't be unsigned, as e.g.
6045 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6046 For signed types, even with wrapping overflow, this is fine. */
6047 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
6049 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
6051 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
6054 case MIN_EXPR
: case MAX_EXPR
:
6055 /* If widening the type changes the signedness, then we can't perform
6056 this optimization as that changes the result. */
6057 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
6060 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6061 sub_strict_overflow_p
= false;
6062 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6063 &sub_strict_overflow_p
)) != 0
6064 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
6065 &sub_strict_overflow_p
)) != 0)
6067 if (tree_int_cst_sgn (c
) < 0)
6068 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
6069 if (sub_strict_overflow_p
)
6070 *strict_overflow_p
= true;
6071 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6072 fold_convert (ctype
, t2
));
6076 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
6077 /* If the second operand is constant, this is a multiplication
6078 or floor division, by a power of two, so we can treat it that
6079 way unless the multiplier or divisor overflows. Signed
6080 left-shift overflow is implementation-defined rather than
6081 undefined in C90, so do not convert signed left shift into
6083 if (TREE_CODE (op1
) == INTEGER_CST
6084 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
6085 /* const_binop may not detect overflow correctly,
6086 so check for it explicitly here. */
6087 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
6088 && 0 != (t1
= fold_convert (ctype
,
6089 const_binop (LSHIFT_EXPR
,
6092 && !TREE_OVERFLOW (t1
))
6093 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
6094 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
6096 fold_convert (ctype
, op0
),
6098 c
, code
, wide_type
, strict_overflow_p
);
6101 case PLUS_EXPR
: case MINUS_EXPR
:
6102 /* See if we can eliminate the operation on both sides. If we can, we
6103 can return a new PLUS or MINUS. If we can't, the only remaining
6104 cases where we can do anything are if the second operand is a
6106 sub_strict_overflow_p
= false;
6107 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6108 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6109 if (t1
!= 0 && t2
!= 0
6110 && (code
== MULT_EXPR
6111 /* If not multiplication, we can only do this if both operands
6112 are divisible by c. */
6113 || (multiple_of_p (ctype
, op0
, c
)
6114 && multiple_of_p (ctype
, op1
, c
))))
6116 if (sub_strict_overflow_p
)
6117 *strict_overflow_p
= true;
6118 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6119 fold_convert (ctype
, t2
));
6122 /* If this was a subtraction, negate OP1 and set it to be an addition.
6123 This simplifies the logic below. */
6124 if (tcode
== MINUS_EXPR
)
6126 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
6127 /* If OP1 was not easily negatable, the constant may be OP0. */
6128 if (TREE_CODE (op0
) == INTEGER_CST
)
6130 std::swap (op0
, op1
);
6135 if (TREE_CODE (op1
) != INTEGER_CST
)
6138 /* If either OP1 or C are negative, this optimization is not safe for
6139 some of the division and remainder types while for others we need
6140 to change the code. */
6141 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
6143 if (code
== CEIL_DIV_EXPR
)
6144 code
= FLOOR_DIV_EXPR
;
6145 else if (code
== FLOOR_DIV_EXPR
)
6146 code
= CEIL_DIV_EXPR
;
6147 else if (code
!= MULT_EXPR
6148 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
6152 /* If it's a multiply or a division/modulus operation of a multiple
6153 of our constant, do the operation and verify it doesn't overflow. */
6154 if (code
== MULT_EXPR
6155 || wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6157 op1
= const_binop (code
, fold_convert (ctype
, op1
),
6158 fold_convert (ctype
, c
));
6159 /* We allow the constant to overflow with wrapping semantics. */
6161 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
6167 /* If we have an unsigned type, we cannot widen the operation since it
6168 will change the result if the original computation overflowed. */
6169 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
6172 /* If we were able to eliminate our operation from the first side,
6173 apply our operation to the second side and reform the PLUS. */
6174 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
6175 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
6177 /* The last case is if we are a multiply. In that case, we can
6178 apply the distributive law to commute the multiply and addition
6179 if the multiplication of the constants doesn't overflow
6180 and overflow is defined. With undefined overflow
6181 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6182 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
6183 return fold_build2 (tcode
, ctype
,
6184 fold_build2 (code
, ctype
,
6185 fold_convert (ctype
, op0
),
6186 fold_convert (ctype
, c
)),
6192 /* We have a special case here if we are doing something like
6193 (C * 8) % 4 since we know that's zero. */
6194 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
6195 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
6196 /* If the multiplication can overflow we cannot optimize this. */
6197 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
6198 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
6199 && wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6201 *strict_overflow_p
= true;
6202 return omit_one_operand (type
, integer_zero_node
, op0
);
6205 /* ... fall through ... */
6207 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6208 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
6209 /* If we can extract our operation from the LHS, do so and return a
6210 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6211 do something only if the second operand is a constant. */
6213 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6214 strict_overflow_p
)) != 0)
6215 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6216 fold_convert (ctype
, op1
));
6217 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
6218 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
6219 strict_overflow_p
)) != 0)
6220 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6221 fold_convert (ctype
, t1
));
6222 else if (TREE_CODE (op1
) != INTEGER_CST
)
6225 /* If these are the same operation types, we can associate them
6226 assuming no overflow. */
6229 bool overflow_p
= false;
6230 bool overflow_mul_p
;
6231 signop sign
= TYPE_SIGN (ctype
);
6232 wide_int mul
= wi::mul (op1
, c
, sign
, &overflow_mul_p
);
6233 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
6235 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
6239 mul
= wide_int::from (mul
, TYPE_PRECISION (ctype
),
6240 TYPE_SIGN (TREE_TYPE (op1
)));
6241 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6242 wide_int_to_tree (ctype
, mul
));
6246 /* If these operations "cancel" each other, we have the main
6247 optimizations of this pass, which occur when either constant is a
6248 multiple of the other, in which case we replace this with either an
6249 operation or CODE or TCODE.
6251 If we have an unsigned type, we cannot do this since it will change
6252 the result if the original computation overflowed. */
6253 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
6254 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
6255 || (tcode
== MULT_EXPR
6256 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
6257 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
6258 && code
!= MULT_EXPR
)))
6260 if (wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6262 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6263 *strict_overflow_p
= true;
6264 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6265 fold_convert (ctype
,
6266 const_binop (TRUNC_DIV_EXPR
,
6269 else if (wi::multiple_of_p (c
, op1
, TYPE_SIGN (type
)))
6271 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6272 *strict_overflow_p
= true;
6273 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6274 fold_convert (ctype
,
6275 const_binop (TRUNC_DIV_EXPR
,
6288 /* Return a node which has the indicated constant VALUE (either 0 or
6289 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6290 and is of the indicated TYPE. */
6293 constant_boolean_node (bool value
, tree type
)
6295 if (type
== integer_type_node
)
6296 return value
? integer_one_node
: integer_zero_node
;
6297 else if (type
== boolean_type_node
)
6298 return value
? boolean_true_node
: boolean_false_node
;
6299 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6300 return build_vector_from_val (type
,
6301 build_int_cst (TREE_TYPE (type
),
6304 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6308 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6309 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6310 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6311 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6312 COND is the first argument to CODE; otherwise (as in the example
6313 given here), it is the second argument. TYPE is the type of the
6314 original expression. Return NULL_TREE if no simplification is
6318 fold_binary_op_with_conditional_arg (location_t loc
,
6319 enum tree_code code
,
6320 tree type
, tree op0
, tree op1
,
6321 tree cond
, tree arg
, int cond_first_p
)
6323 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6324 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6325 tree test
, true_value
, false_value
;
6326 tree lhs
= NULL_TREE
;
6327 tree rhs
= NULL_TREE
;
6328 enum tree_code cond_code
= COND_EXPR
;
6330 if (TREE_CODE (cond
) == COND_EXPR
6331 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6333 test
= TREE_OPERAND (cond
, 0);
6334 true_value
= TREE_OPERAND (cond
, 1);
6335 false_value
= TREE_OPERAND (cond
, 2);
6336 /* If this operand throws an expression, then it does not make
6337 sense to try to perform a logical or arithmetic operation
6339 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6341 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6346 tree testtype
= TREE_TYPE (cond
);
6348 true_value
= constant_boolean_node (true, testtype
);
6349 false_value
= constant_boolean_node (false, testtype
);
6352 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6353 cond_code
= VEC_COND_EXPR
;
6355 /* This transformation is only worthwhile if we don't have to wrap ARG
6356 in a SAVE_EXPR and the operation can be simplified without recursing
6357 on at least one of the branches once its pushed inside the COND_EXPR. */
6358 if (!TREE_CONSTANT (arg
)
6359 && (TREE_SIDE_EFFECTS (arg
)
6360 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6361 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6364 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6367 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6369 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6371 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6375 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6377 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6379 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6382 /* Check that we have simplified at least one of the branches. */
6383 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6386 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6390 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6392 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6393 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6394 ADDEND is the same as X.
6396 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6397 and finite. The problematic cases are when X is zero, and its mode
6398 has signed zeros. In the case of rounding towards -infinity,
6399 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6400 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6403 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6405 if (!real_zerop (addend
))
6408 /* Don't allow the fold with -fsignaling-nans. */
6409 if (HONOR_SNANS (element_mode (type
)))
6412 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6413 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
6416 /* In a vector or complex, we would need to check the sign of all zeros. */
6417 if (TREE_CODE (addend
) != REAL_CST
)
6420 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6421 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6424 /* The mode has signed zeros, and we have to honor their sign.
6425 In this situation, there is only one case we can return true for.
6426 X - 0 is the same as X unless rounding towards -infinity is
6428 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
));
6431 /* Subroutine of fold() that optimizes comparisons of a division by
6432 a nonzero integer constant against an integer constant, i.e.
6435 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6436 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6437 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6439 The function returns the constant folded tree if a simplification
6440 can be made, and NULL_TREE otherwise. */
6443 fold_div_compare (location_t loc
,
6444 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6446 tree prod
, tmp
, hi
, lo
;
6447 tree arg00
= TREE_OPERAND (arg0
, 0);
6448 tree arg01
= TREE_OPERAND (arg0
, 1);
6449 signop sign
= TYPE_SIGN (TREE_TYPE (arg0
));
6450 bool neg_overflow
= false;
6453 /* We have to do this the hard way to detect unsigned overflow.
6454 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6455 wide_int val
= wi::mul (arg01
, arg1
, sign
, &overflow
);
6456 prod
= force_fit_type (TREE_TYPE (arg00
), val
, -1, overflow
);
6457 neg_overflow
= false;
6459 if (sign
== UNSIGNED
)
6461 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6462 build_int_cst (TREE_TYPE (arg01
), 1));
6465 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6466 val
= wi::add (prod
, tmp
, sign
, &overflow
);
6467 hi
= force_fit_type (TREE_TYPE (arg00
), val
,
6468 -1, overflow
| TREE_OVERFLOW (prod
));
6470 else if (tree_int_cst_sgn (arg01
) >= 0)
6472 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6473 build_int_cst (TREE_TYPE (arg01
), 1));
6474 switch (tree_int_cst_sgn (arg1
))
6477 neg_overflow
= true;
6478 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6483 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6488 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6498 /* A negative divisor reverses the relational operators. */
6499 code
= swap_tree_comparison (code
);
6501 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6502 build_int_cst (TREE_TYPE (arg01
), 1));
6503 switch (tree_int_cst_sgn (arg1
))
6506 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6511 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6516 neg_overflow
= true;
6517 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6529 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6530 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6531 if (TREE_OVERFLOW (hi
))
6532 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6533 if (TREE_OVERFLOW (lo
))
6534 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6535 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6538 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6539 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6540 if (TREE_OVERFLOW (hi
))
6541 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6542 if (TREE_OVERFLOW (lo
))
6543 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6544 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6547 if (TREE_OVERFLOW (lo
))
6549 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6550 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6552 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6555 if (TREE_OVERFLOW (hi
))
6557 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6558 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6560 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6563 if (TREE_OVERFLOW (hi
))
6565 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6566 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6568 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6571 if (TREE_OVERFLOW (lo
))
6573 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6574 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6576 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6586 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6587 equality/inequality test, then return a simplified form of the test
6588 using a sign testing. Otherwise return NULL. TYPE is the desired
6592 fold_single_bit_test_into_sign_test (location_t loc
,
6593 enum tree_code code
, tree arg0
, tree arg1
,
6596 /* If this is testing a single bit, we can optimize the test. */
6597 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6598 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6599 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6601 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6602 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6603 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6605 if (arg00
!= NULL_TREE
6606 /* This is only a win if casting to a signed type is cheap,
6607 i.e. when arg00's type is not a partial mode. */
6608 && TYPE_PRECISION (TREE_TYPE (arg00
))
6609 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00
))))
6611 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6612 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6614 fold_convert_loc (loc
, stype
, arg00
),
6615 build_int_cst (stype
, 0));
6622 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6623 equality/inequality test, then return a simplified form of
6624 the test using shifts and logical operations. Otherwise return
6625 NULL. TYPE is the desired result type. */
6628 fold_single_bit_test (location_t loc
, enum tree_code code
,
6629 tree arg0
, tree arg1
, tree result_type
)
6631 /* If this is testing a single bit, we can optimize the test. */
6632 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6633 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6634 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6636 tree inner
= TREE_OPERAND (arg0
, 0);
6637 tree type
= TREE_TYPE (arg0
);
6638 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6639 machine_mode operand_mode
= TYPE_MODE (type
);
6641 tree signed_type
, unsigned_type
, intermediate_type
;
6644 /* First, see if we can fold the single bit test into a sign-bit
6646 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6651 /* Otherwise we have (A & C) != 0 where C is a single bit,
6652 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6653 Similarly for (A & C) == 0. */
6655 /* If INNER is a right shift of a constant and it plus BITNUM does
6656 not overflow, adjust BITNUM and INNER. */
6657 if (TREE_CODE (inner
) == RSHIFT_EXPR
6658 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6659 && bitnum
< TYPE_PRECISION (type
)
6660 && wi::ltu_p (TREE_OPERAND (inner
, 1),
6661 TYPE_PRECISION (type
) - bitnum
))
6663 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
6664 inner
= TREE_OPERAND (inner
, 0);
6667 /* If we are going to be able to omit the AND below, we must do our
6668 operations as unsigned. If we must use the AND, we have a choice.
6669 Normally unsigned is faster, but for some machines signed is. */
6670 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6671 && !flag_syntax_only
) ? 0 : 1;
6673 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6674 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6675 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6676 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6679 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6680 inner
, size_int (bitnum
));
6682 one
= build_int_cst (intermediate_type
, 1);
6684 if (code
== EQ_EXPR
)
6685 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6687 /* Put the AND last so it can combine with more things. */
6688 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6690 /* Make sure to return the proper type. */
6691 inner
= fold_convert_loc (loc
, result_type
, inner
);
6698 /* Check whether we are allowed to reorder operands arg0 and arg1,
6699 such that the evaluation of arg1 occurs before arg0. */
6702 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6704 if (! flag_evaluation_order
)
6706 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6708 return ! TREE_SIDE_EFFECTS (arg0
)
6709 && ! TREE_SIDE_EFFECTS (arg1
);
6712 /* Test whether it is preferable two swap two operands, ARG0 and
6713 ARG1, for example because ARG0 is an integer constant and ARG1
6714 isn't. If REORDER is true, only recommend swapping if we can
6715 evaluate the operands in reverse order. */
6718 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6720 if (CONSTANT_CLASS_P (arg1
))
6722 if (CONSTANT_CLASS_P (arg0
))
6728 if (TREE_CONSTANT (arg1
))
6730 if (TREE_CONSTANT (arg0
))
6733 if (reorder
&& flag_evaluation_order
6734 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6737 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6738 for commutative and comparison operators. Ensuring a canonical
6739 form allows the optimizers to find additional redundancies without
6740 having to explicitly check for both orderings. */
6741 if (TREE_CODE (arg0
) == SSA_NAME
6742 && TREE_CODE (arg1
) == SSA_NAME
6743 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6746 /* Put SSA_NAMEs last. */
6747 if (TREE_CODE (arg1
) == SSA_NAME
)
6749 if (TREE_CODE (arg0
) == SSA_NAME
)
6752 /* Put variables last. */
6762 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6763 means A >= Y && A != MAX, but in this case we know that
6764 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6767 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
6769 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6771 if (TREE_CODE (bound
) == LT_EXPR
)
6772 a
= TREE_OPERAND (bound
, 0);
6773 else if (TREE_CODE (bound
) == GT_EXPR
)
6774 a
= TREE_OPERAND (bound
, 1);
6778 typea
= TREE_TYPE (a
);
6779 if (!INTEGRAL_TYPE_P (typea
)
6780 && !POINTER_TYPE_P (typea
))
6783 if (TREE_CODE (ineq
) == LT_EXPR
)
6785 a1
= TREE_OPERAND (ineq
, 1);
6786 y
= TREE_OPERAND (ineq
, 0);
6788 else if (TREE_CODE (ineq
) == GT_EXPR
)
6790 a1
= TREE_OPERAND (ineq
, 0);
6791 y
= TREE_OPERAND (ineq
, 1);
6796 if (TREE_TYPE (a1
) != typea
)
6799 if (POINTER_TYPE_P (typea
))
6801 /* Convert the pointer types into integer before taking the difference. */
6802 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
6803 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
6804 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
6807 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
6809 if (!diff
|| !integer_onep (diff
))
6812 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
6815 /* Fold a sum or difference of at least one multiplication.
6816 Returns the folded tree or NULL if no simplification could be made. */
6819 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
6820 tree arg0
, tree arg1
)
6822 tree arg00
, arg01
, arg10
, arg11
;
6823 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
6825 /* (A * C) +- (B * C) -> (A+-B) * C.
6826 (A * C) +- A -> A * (C+-1).
6827 We are most concerned about the case where C is a constant,
6828 but other combinations show up during loop reduction. Since
6829 it is not difficult, try all four possibilities. */
6831 if (TREE_CODE (arg0
) == MULT_EXPR
)
6833 arg00
= TREE_OPERAND (arg0
, 0);
6834 arg01
= TREE_OPERAND (arg0
, 1);
6836 else if (TREE_CODE (arg0
) == INTEGER_CST
)
6838 arg00
= build_one_cst (type
);
6843 /* We cannot generate constant 1 for fract. */
6844 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
6847 arg01
= build_one_cst (type
);
6849 if (TREE_CODE (arg1
) == MULT_EXPR
)
6851 arg10
= TREE_OPERAND (arg1
, 0);
6852 arg11
= TREE_OPERAND (arg1
, 1);
6854 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6856 arg10
= build_one_cst (type
);
6857 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6858 the purpose of this canonicalization. */
6859 if (wi::neg_p (arg1
, TYPE_SIGN (TREE_TYPE (arg1
)))
6860 && negate_expr_p (arg1
)
6861 && code
== PLUS_EXPR
)
6863 arg11
= negate_expr (arg1
);
6871 /* We cannot generate constant 1 for fract. */
6872 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
6875 arg11
= build_one_cst (type
);
6879 if (operand_equal_p (arg01
, arg11
, 0))
6880 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
6881 else if (operand_equal_p (arg00
, arg10
, 0))
6882 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
6883 else if (operand_equal_p (arg00
, arg11
, 0))
6884 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
6885 else if (operand_equal_p (arg01
, arg10
, 0))
6886 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
6888 /* No identical multiplicands; see if we can find a common
6889 power-of-two factor in non-power-of-two multiplies. This
6890 can help in multi-dimensional array access. */
6891 else if (tree_fits_shwi_p (arg01
)
6892 && tree_fits_shwi_p (arg11
))
6894 HOST_WIDE_INT int01
, int11
, tmp
;
6897 int01
= tree_to_shwi (arg01
);
6898 int11
= tree_to_shwi (arg11
);
6900 /* Move min of absolute values to int11. */
6901 if (absu_hwi (int01
) < absu_hwi (int11
))
6903 tmp
= int01
, int01
= int11
, int11
= tmp
;
6904 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
6911 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
6912 /* The remainder should not be a constant, otherwise we
6913 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6914 increased the number of multiplications necessary. */
6915 && TREE_CODE (arg10
) != INTEGER_CST
)
6917 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
6918 build_int_cst (TREE_TYPE (arg00
),
6923 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
6928 return fold_build2_loc (loc
, MULT_EXPR
, type
,
6929 fold_build2_loc (loc
, code
, type
,
6930 fold_convert_loc (loc
, type
, alt0
),
6931 fold_convert_loc (loc
, type
, alt1
)),
6932 fold_convert_loc (loc
, type
, same
));
6937 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6938 specified by EXPR into the buffer PTR of length LEN bytes.
6939 Return the number of bytes placed in the buffer, or zero
6943 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
, int off
)
6945 tree type
= TREE_TYPE (expr
);
6946 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
6947 int byte
, offset
, word
, words
;
6948 unsigned char value
;
6950 if ((off
== -1 && total_bytes
> len
)
6951 || off
>= total_bytes
)
6955 words
= total_bytes
/ UNITS_PER_WORD
;
6957 for (byte
= 0; byte
< total_bytes
; byte
++)
6959 int bitpos
= byte
* BITS_PER_UNIT
;
6960 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
6962 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
6964 if (total_bytes
> UNITS_PER_WORD
)
6966 word
= byte
/ UNITS_PER_WORD
;
6967 if (WORDS_BIG_ENDIAN
)
6968 word
= (words
- 1) - word
;
6969 offset
= word
* UNITS_PER_WORD
;
6970 if (BYTES_BIG_ENDIAN
)
6971 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
6973 offset
+= byte
% UNITS_PER_WORD
;
6976 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
6978 && offset
- off
< len
)
6979 ptr
[offset
- off
] = value
;
6981 return MIN (len
, total_bytes
- off
);
6985 /* Subroutine of native_encode_expr. Encode the FIXED_CST
6986 specified by EXPR into the buffer PTR of length LEN bytes.
6987 Return the number of bytes placed in the buffer, or zero
6991 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
, int off
)
6993 tree type
= TREE_TYPE (expr
);
6994 machine_mode mode
= TYPE_MODE (type
);
6995 int total_bytes
= GET_MODE_SIZE (mode
);
6996 FIXED_VALUE_TYPE value
;
6997 tree i_value
, i_type
;
6999 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7002 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7004 if (NULL_TREE
== i_type
7005 || TYPE_PRECISION (i_type
) != total_bytes
)
7008 value
= TREE_FIXED_CST (expr
);
7009 i_value
= double_int_to_tree (i_type
, value
.data
);
7011 return native_encode_int (i_value
, ptr
, len
, off
);
7015 /* Subroutine of native_encode_expr. Encode the REAL_CST
7016 specified by EXPR into the buffer PTR of length LEN bytes.
7017 Return the number of bytes placed in the buffer, or zero
7021 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7023 tree type
= TREE_TYPE (expr
);
7024 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7025 int byte
, offset
, word
, words
, bitpos
;
7026 unsigned char value
;
7028 /* There are always 32 bits in each long, no matter the size of
7029 the hosts long. We handle floating point representations with
7033 if ((off
== -1 && total_bytes
> len
)
7034 || off
>= total_bytes
)
7038 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7040 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7042 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7043 bitpos
+= BITS_PER_UNIT
)
7045 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7046 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7048 if (UNITS_PER_WORD
< 4)
7050 word
= byte
/ UNITS_PER_WORD
;
7051 if (WORDS_BIG_ENDIAN
)
7052 word
= (words
- 1) - word
;
7053 offset
= word
* UNITS_PER_WORD
;
7054 if (BYTES_BIG_ENDIAN
)
7055 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7057 offset
+= byte
% UNITS_PER_WORD
;
7060 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7061 offset
= offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3);
7063 && offset
- off
< len
)
7064 ptr
[offset
- off
] = value
;
7066 return MIN (len
, total_bytes
- off
);
7069 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7070 specified by EXPR into the buffer PTR of length LEN bytes.
7071 Return the number of bytes placed in the buffer, or zero
7075 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7080 part
= TREE_REALPART (expr
);
7081 rsize
= native_encode_expr (part
, ptr
, len
, off
);
7085 part
= TREE_IMAGPART (expr
);
7087 off
= MAX (0, off
- GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part
))));
7088 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
, off
);
7092 return rsize
+ isize
;
7096 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7097 specified by EXPR into the buffer PTR of length LEN bytes.
7098 Return the number of bytes placed in the buffer, or zero
7102 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7109 count
= VECTOR_CST_NELTS (expr
);
7110 itype
= TREE_TYPE (TREE_TYPE (expr
));
7111 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7112 for (i
= 0; i
< count
; i
++)
7119 elem
= VECTOR_CST_ELT (expr
, i
);
7120 int res
= native_encode_expr (elem
, ptr
+offset
, len
-offset
, off
);
7121 if ((off
== -1 && res
!= size
)
7134 /* Subroutine of native_encode_expr. Encode the STRING_CST
7135 specified by EXPR into the buffer PTR of length LEN bytes.
7136 Return the number of bytes placed in the buffer, or zero
7140 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7142 tree type
= TREE_TYPE (expr
);
7143 HOST_WIDE_INT total_bytes
;
7145 if (TREE_CODE (type
) != ARRAY_TYPE
7146 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7147 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7148 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7150 total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (type
));
7151 if ((off
== -1 && total_bytes
> len
)
7152 || off
>= total_bytes
)
7156 if (TREE_STRING_LENGTH (expr
) - off
< MIN (total_bytes
, len
))
7159 if (off
< TREE_STRING_LENGTH (expr
))
7161 written
= MIN (len
, TREE_STRING_LENGTH (expr
) - off
);
7162 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, written
);
7164 memset (ptr
+ written
, 0,
7165 MIN (total_bytes
- written
, len
- written
));
7168 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, MIN (total_bytes
, len
));
7169 return MIN (total_bytes
- off
, len
);
7173 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7174 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7175 buffer PTR of length LEN bytes. If OFF is not -1 then start
7176 the encoding at byte offset OFF and encode at most LEN bytes.
7177 Return the number of bytes placed in the buffer, or zero upon failure. */
7180 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7182 /* We don't support starting at negative offset and -1 is special. */
7186 switch (TREE_CODE (expr
))
7189 return native_encode_int (expr
, ptr
, len
, off
);
7192 return native_encode_real (expr
, ptr
, len
, off
);
7195 return native_encode_fixed (expr
, ptr
, len
, off
);
7198 return native_encode_complex (expr
, ptr
, len
, off
);
7201 return native_encode_vector (expr
, ptr
, len
, off
);
7204 return native_encode_string (expr
, ptr
, len
, off
);
7212 /* Subroutine of native_interpret_expr. Interpret the contents of
7213 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7214 If the buffer cannot be interpreted, return NULL_TREE. */
7217 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7219 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7221 if (total_bytes
> len
7222 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7225 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
7227 return wide_int_to_tree (type
, result
);
7231 /* Subroutine of native_interpret_expr. Interpret the contents of
7232 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7233 If the buffer cannot be interpreted, return NULL_TREE. */
7236 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7238 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7240 FIXED_VALUE_TYPE fixed_value
;
7242 if (total_bytes
> len
7243 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7246 result
= double_int::from_buffer (ptr
, total_bytes
);
7247 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7249 return build_fixed (type
, fixed_value
);
7253 /* Subroutine of native_interpret_expr. Interpret the contents of
7254 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7255 If the buffer cannot be interpreted, return NULL_TREE. */
7258 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7260 machine_mode mode
= TYPE_MODE (type
);
7261 int total_bytes
= GET_MODE_SIZE (mode
);
7262 unsigned char value
;
7263 /* There are always 32 bits in each long, no matter the size of
7264 the hosts long. We handle floating point representations with
7269 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7270 if (total_bytes
> len
|| total_bytes
> 24)
7272 int words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7274 memset (tmp
, 0, sizeof (tmp
));
7275 for (int bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7276 bitpos
+= BITS_PER_UNIT
)
7278 /* Both OFFSET and BYTE index within a long;
7279 bitpos indexes the whole float. */
7280 int offset
, byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7281 if (UNITS_PER_WORD
< 4)
7283 int word
= byte
/ UNITS_PER_WORD
;
7284 if (WORDS_BIG_ENDIAN
)
7285 word
= (words
- 1) - word
;
7286 offset
= word
* UNITS_PER_WORD
;
7287 if (BYTES_BIG_ENDIAN
)
7288 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7290 offset
+= byte
% UNITS_PER_WORD
;
7295 if (BYTES_BIG_ENDIAN
)
7297 /* Reverse bytes within each long, or within the entire float
7298 if it's smaller than a long (for HFmode). */
7299 offset
= MIN (3, total_bytes
- 1) - offset
;
7300 gcc_assert (offset
>= 0);
7303 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7305 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7308 real_from_target (&r
, tmp
, mode
);
7309 return build_real (type
, r
);
7313 /* Subroutine of native_interpret_expr. Interpret the contents of
7314 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7315 If the buffer cannot be interpreted, return NULL_TREE. */
7318 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7320 tree etype
, rpart
, ipart
;
7323 etype
= TREE_TYPE (type
);
7324 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7327 rpart
= native_interpret_expr (etype
, ptr
, size
);
7330 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7333 return build_complex (type
, rpart
, ipart
);
7337 /* Subroutine of native_interpret_expr. Interpret the contents of
7338 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7339 If the buffer cannot be interpreted, return NULL_TREE. */
7342 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7348 etype
= TREE_TYPE (type
);
7349 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7350 count
= TYPE_VECTOR_SUBPARTS (type
);
7351 if (size
* count
> len
)
7354 elements
= XALLOCAVEC (tree
, count
);
7355 for (i
= count
- 1; i
>= 0; i
--)
7357 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7362 return build_vector (type
, elements
);
7366 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7367 the buffer PTR of length LEN as a constant of type TYPE. For
7368 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7369 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7370 return NULL_TREE. */
7373 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7375 switch (TREE_CODE (type
))
7381 case REFERENCE_TYPE
:
7382 return native_interpret_int (type
, ptr
, len
);
7385 return native_interpret_real (type
, ptr
, len
);
7387 case FIXED_POINT_TYPE
:
7388 return native_interpret_fixed (type
, ptr
, len
);
7391 return native_interpret_complex (type
, ptr
, len
);
7394 return native_interpret_vector (type
, ptr
, len
);
7401 /* Returns true if we can interpret the contents of a native encoding
7405 can_native_interpret_type_p (tree type
)
7407 switch (TREE_CODE (type
))
7413 case REFERENCE_TYPE
:
7414 case FIXED_POINT_TYPE
:
7424 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7425 TYPE at compile-time. If we're unable to perform the conversion
7426 return NULL_TREE. */
7429 fold_view_convert_expr (tree type
, tree expr
)
7431 /* We support up to 512-bit values (for V8DFmode). */
7432 unsigned char buffer
[64];
7435 /* Check that the host and target are sane. */
7436 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7439 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7443 return native_interpret_expr (type
, buffer
, len
);
7446 /* Build an expression for the address of T. Folds away INDIRECT_REF
7447 to avoid confusing the gimplify process. */
7450 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7452 /* The size of the object is not relevant when talking about its address. */
7453 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7454 t
= TREE_OPERAND (t
, 0);
7456 if (TREE_CODE (t
) == INDIRECT_REF
)
7458 t
= TREE_OPERAND (t
, 0);
7460 if (TREE_TYPE (t
) != ptrtype
)
7461 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7463 else if (TREE_CODE (t
) == MEM_REF
7464 && integer_zerop (TREE_OPERAND (t
, 1)))
7465 return TREE_OPERAND (t
, 0);
7466 else if (TREE_CODE (t
) == MEM_REF
7467 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7468 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7469 TREE_OPERAND (t
, 0),
7470 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7471 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7473 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7475 if (TREE_TYPE (t
) != ptrtype
)
7476 t
= fold_convert_loc (loc
, ptrtype
, t
);
7479 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7484 /* Build an expression for the address of T. */
7487 build_fold_addr_expr_loc (location_t loc
, tree t
)
7489 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7491 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7494 /* Fold a unary expression of code CODE and type TYPE with operand
7495 OP0. Return the folded expression if folding is successful.
7496 Otherwise, return NULL_TREE. */
7499 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7503 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7505 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7506 && TREE_CODE_LENGTH (code
) == 1);
7511 if (CONVERT_EXPR_CODE_P (code
)
7512 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7514 /* Don't use STRIP_NOPS, because signedness of argument type
7516 STRIP_SIGN_NOPS (arg0
);
7520 /* Strip any conversions that don't change the mode. This
7521 is safe for every expression, except for a comparison
7522 expression because its signedness is derived from its
7525 Note that this is done as an internal manipulation within
7526 the constant folder, in order to find the simplest
7527 representation of the arguments so that their form can be
7528 studied. In any cases, the appropriate type conversions
7529 should be put back in the tree that will get out of the
7534 if (CONSTANT_CLASS_P (arg0
))
7536 tree tem
= const_unop (code
, type
, arg0
);
7539 if (TREE_TYPE (tem
) != type
)
7540 tem
= fold_convert_loc (loc
, type
, tem
);
7546 tem
= generic_simplify (loc
, code
, type
, op0
);
7550 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7552 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7553 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7554 fold_build1_loc (loc
, code
, type
,
7555 fold_convert_loc (loc
, TREE_TYPE (op0
),
7556 TREE_OPERAND (arg0
, 1))));
7557 else if (TREE_CODE (arg0
) == COND_EXPR
)
7559 tree arg01
= TREE_OPERAND (arg0
, 1);
7560 tree arg02
= TREE_OPERAND (arg0
, 2);
7561 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7562 arg01
= fold_build1_loc (loc
, code
, type
,
7563 fold_convert_loc (loc
,
7564 TREE_TYPE (op0
), arg01
));
7565 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7566 arg02
= fold_build1_loc (loc
, code
, type
,
7567 fold_convert_loc (loc
,
7568 TREE_TYPE (op0
), arg02
));
7569 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7572 /* If this was a conversion, and all we did was to move into
7573 inside the COND_EXPR, bring it back out. But leave it if
7574 it is a conversion from integer to integer and the
7575 result precision is no wider than a word since such a
7576 conversion is cheap and may be optimized away by combine,
7577 while it couldn't if it were outside the COND_EXPR. Then return
7578 so we don't get into an infinite recursion loop taking the
7579 conversion out and then back in. */
7581 if ((CONVERT_EXPR_CODE_P (code
)
7582 || code
== NON_LVALUE_EXPR
)
7583 && TREE_CODE (tem
) == COND_EXPR
7584 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7585 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7586 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7587 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7588 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7589 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7590 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7592 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7593 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7594 || flag_syntax_only
))
7595 tem
= build1_loc (loc
, code
, type
,
7597 TREE_TYPE (TREE_OPERAND
7598 (TREE_OPERAND (tem
, 1), 0)),
7599 TREE_OPERAND (tem
, 0),
7600 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7601 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7609 case NON_LVALUE_EXPR
:
7610 if (!maybe_lvalue_p (op0
))
7611 return fold_convert_loc (loc
, type
, op0
);
7616 case FIX_TRUNC_EXPR
:
7617 if (COMPARISON_CLASS_P (op0
))
7619 /* If we have (type) (a CMP b) and type is an integral type, return
7620 new expression involving the new type. Canonicalize
7621 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7623 Do not fold the result as that would not simplify further, also
7624 folding again results in recursions. */
7625 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7626 return build2_loc (loc
, TREE_CODE (op0
), type
,
7627 TREE_OPERAND (op0
, 0),
7628 TREE_OPERAND (op0
, 1));
7629 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7630 && TREE_CODE (type
) != VECTOR_TYPE
)
7631 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7632 constant_boolean_node (true, type
),
7633 constant_boolean_node (false, type
));
7636 /* Handle (T *)&A.B.C for A being of type T and B and C
7637 living at offset zero. This occurs frequently in
7638 C++ upcasting and then accessing the base. */
7639 if (TREE_CODE (op0
) == ADDR_EXPR
7640 && POINTER_TYPE_P (type
)
7641 && handled_component_p (TREE_OPERAND (op0
, 0)))
7643 HOST_WIDE_INT bitsize
, bitpos
;
7646 int unsignedp
, volatilep
;
7647 tree base
= TREE_OPERAND (op0
, 0);
7648 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7649 &mode
, &unsignedp
, &volatilep
, false);
7650 /* If the reference was to a (constant) zero offset, we can use
7651 the address of the base if it has the same base type
7652 as the result type and the pointer type is unqualified. */
7653 if (! offset
&& bitpos
== 0
7654 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7655 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7656 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7657 return fold_convert_loc (loc
, type
,
7658 build_fold_addr_expr_loc (loc
, base
));
7661 if (TREE_CODE (op0
) == MODIFY_EXPR
7662 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7663 /* Detect assigning a bitfield. */
7664 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7666 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7668 /* Don't leave an assignment inside a conversion
7669 unless assigning a bitfield. */
7670 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7671 /* First do the assignment, then return converted constant. */
7672 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7673 TREE_NO_WARNING (tem
) = 1;
7674 TREE_USED (tem
) = 1;
7678 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7679 constants (if x has signed type, the sign bit cannot be set
7680 in c). This folds extension into the BIT_AND_EXPR.
7681 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7682 very likely don't have maximal range for their precision and this
7683 transformation effectively doesn't preserve non-maximal ranges. */
7684 if (TREE_CODE (type
) == INTEGER_TYPE
7685 && TREE_CODE (op0
) == BIT_AND_EXPR
7686 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7688 tree and_expr
= op0
;
7689 tree and0
= TREE_OPERAND (and_expr
, 0);
7690 tree and1
= TREE_OPERAND (and_expr
, 1);
7693 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7694 || (TYPE_PRECISION (type
)
7695 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7697 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7698 <= HOST_BITS_PER_WIDE_INT
7699 && tree_fits_uhwi_p (and1
))
7701 unsigned HOST_WIDE_INT cst
;
7703 cst
= tree_to_uhwi (and1
);
7704 cst
&= HOST_WIDE_INT_M1U
7705 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7706 change
= (cst
== 0);
7708 && !flag_syntax_only
7709 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7712 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7713 and0
= fold_convert_loc (loc
, uns
, and0
);
7714 and1
= fold_convert_loc (loc
, uns
, and1
);
7719 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
7720 TREE_OVERFLOW (and1
));
7721 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7722 fold_convert_loc (loc
, type
, and0
), tem
);
7726 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7727 when one of the new casts will fold away. Conservatively we assume
7728 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7729 if (POINTER_TYPE_P (type
)
7730 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7731 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
7732 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7733 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7734 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
7736 tree arg00
= TREE_OPERAND (arg0
, 0);
7737 tree arg01
= TREE_OPERAND (arg0
, 1);
7739 return fold_build_pointer_plus_loc
7740 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
7743 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7744 of the same precision, and X is an integer type not narrower than
7745 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7746 if (INTEGRAL_TYPE_P (type
)
7747 && TREE_CODE (op0
) == BIT_NOT_EXPR
7748 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7749 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7750 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7752 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7753 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7754 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7755 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
7756 fold_convert_loc (loc
, type
, tem
));
7759 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7760 type of X and Y (integer types only). */
7761 if (INTEGRAL_TYPE_P (type
)
7762 && TREE_CODE (op0
) == MULT_EXPR
7763 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7764 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7766 /* Be careful not to introduce new overflows. */
7768 if (TYPE_OVERFLOW_WRAPS (type
))
7771 mult_type
= unsigned_type_for (type
);
7773 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7775 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
7776 fold_convert_loc (loc
, mult_type
,
7777 TREE_OPERAND (op0
, 0)),
7778 fold_convert_loc (loc
, mult_type
,
7779 TREE_OPERAND (op0
, 1)));
7780 return fold_convert_loc (loc
, type
, tem
);
7786 case VIEW_CONVERT_EXPR
:
7787 if (TREE_CODE (op0
) == MEM_REF
)
7788 return fold_build2_loc (loc
, MEM_REF
, type
,
7789 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
7794 tem
= fold_negate_expr (loc
, arg0
);
7796 return fold_convert_loc (loc
, type
, tem
);
7800 /* Convert fabs((double)float) into (double)fabsf(float). */
7801 if (TREE_CODE (arg0
) == NOP_EXPR
7802 && TREE_CODE (type
) == REAL_TYPE
)
7804 tree targ0
= strip_float_extensions (arg0
);
7806 return fold_convert_loc (loc
, type
,
7807 fold_build1_loc (loc
, ABS_EXPR
,
7814 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7815 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7816 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
7817 fold_convert_loc (loc
, type
,
7818 TREE_OPERAND (arg0
, 0)))))
7819 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
7820 fold_convert_loc (loc
, type
,
7821 TREE_OPERAND (arg0
, 1)));
7822 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7823 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
7824 fold_convert_loc (loc
, type
,
7825 TREE_OPERAND (arg0
, 1)))))
7826 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
7827 fold_convert_loc (loc
, type
,
7828 TREE_OPERAND (arg0
, 0)), tem
);
7832 case TRUTH_NOT_EXPR
:
7833 /* Note that the operand of this must be an int
7834 and its values must be 0 or 1.
7835 ("true" is a fixed value perhaps depending on the language,
7836 but we don't handle values other than 1 correctly yet.) */
7837 tem
= fold_truth_not_expr (loc
, arg0
);
7840 return fold_convert_loc (loc
, type
, tem
);
7843 /* Fold *&X to X if X is an lvalue. */
7844 if (TREE_CODE (op0
) == ADDR_EXPR
)
7846 tree op00
= TREE_OPERAND (op0
, 0);
7847 if ((TREE_CODE (op00
) == VAR_DECL
7848 || TREE_CODE (op00
) == PARM_DECL
7849 || TREE_CODE (op00
) == RESULT_DECL
)
7850 && !TREE_READONLY (op00
))
7857 } /* switch (code) */
7861 /* If the operation was a conversion do _not_ mark a resulting constant
7862 with TREE_OVERFLOW if the original constant was not. These conversions
7863 have implementation defined behavior and retaining the TREE_OVERFLOW
7864 flag here would confuse later passes such as VRP. */
7866 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
7867 tree type
, tree op0
)
7869 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
7871 && TREE_CODE (res
) == INTEGER_CST
7872 && TREE_CODE (op0
) == INTEGER_CST
7873 && CONVERT_EXPR_CODE_P (code
))
7874 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
7879 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7880 operands OP0 and OP1. LOC is the location of the resulting expression.
7881 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7882 Return the folded expression if folding is successful. Otherwise,
7883 return NULL_TREE. */
7885 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
7886 tree arg0
, tree arg1
, tree op0
, tree op1
)
7890 /* We only do these simplifications if we are optimizing. */
7894 /* Check for things like (A || B) && (A || C). We can convert this
7895 to A || (B && C). Note that either operator can be any of the four
7896 truth and/or operations and the transformation will still be
7897 valid. Also note that we only care about order for the
7898 ANDIF and ORIF operators. If B contains side effects, this
7899 might change the truth-value of A. */
7900 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
7901 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
7902 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
7903 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
7904 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
7905 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
7907 tree a00
= TREE_OPERAND (arg0
, 0);
7908 tree a01
= TREE_OPERAND (arg0
, 1);
7909 tree a10
= TREE_OPERAND (arg1
, 0);
7910 tree a11
= TREE_OPERAND (arg1
, 1);
7911 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
7912 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
7913 && (code
== TRUTH_AND_EXPR
7914 || code
== TRUTH_OR_EXPR
));
7916 if (operand_equal_p (a00
, a10
, 0))
7917 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
7918 fold_build2_loc (loc
, code
, type
, a01
, a11
));
7919 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
7920 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
7921 fold_build2_loc (loc
, code
, type
, a01
, a10
));
7922 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
7923 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
7924 fold_build2_loc (loc
, code
, type
, a00
, a11
));
7926 /* This case if tricky because we must either have commutative
7927 operators or else A10 must not have side-effects. */
7929 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
7930 && operand_equal_p (a01
, a11
, 0))
7931 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
7932 fold_build2_loc (loc
, code
, type
, a00
, a10
),
7936 /* See if we can build a range comparison. */
7937 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
7940 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
7941 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
7943 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
7945 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
7948 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
7949 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
7951 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
7953 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
7956 /* Check for the possibility of merging component references. If our
7957 lhs is another similar operation, try to merge its rhs with our
7958 rhs. Then try to merge our lhs and rhs. */
7959 if (TREE_CODE (arg0
) == code
7960 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
7961 TREE_OPERAND (arg0
, 1), arg1
)))
7962 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
7964 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
7967 if (LOGICAL_OP_NON_SHORT_CIRCUIT
7968 && (code
== TRUTH_AND_EXPR
7969 || code
== TRUTH_ANDIF_EXPR
7970 || code
== TRUTH_OR_EXPR
7971 || code
== TRUTH_ORIF_EXPR
))
7973 enum tree_code ncode
, icode
;
7975 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
7976 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
7977 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
7979 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
7980 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
7981 We don't want to pack more than two leafs to a non-IF AND/OR
7983 If tree-code of left-hand operand isn't an AND/OR-IF code and not
7984 equal to IF-CODE, then we don't want to add right-hand operand.
7985 If the inner right-hand side of left-hand operand has
7986 side-effects, or isn't simple, then we can't add to it,
7987 as otherwise we might destroy if-sequence. */
7988 if (TREE_CODE (arg0
) == icode
7989 && simple_operand_p_2 (arg1
)
7990 /* Needed for sequence points to handle trappings, and
7992 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
7994 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
7996 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
7999 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8000 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8001 else if (TREE_CODE (arg1
) == icode
8002 && simple_operand_p_2 (arg0
)
8003 /* Needed for sequence points to handle trappings, and
8005 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8007 tem
= fold_build2_loc (loc
, ncode
, type
,
8008 arg0
, TREE_OPERAND (arg1
, 0));
8009 return fold_build2_loc (loc
, icode
, type
, tem
,
8010 TREE_OPERAND (arg1
, 1));
8012 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8014 For sequence point consistancy, we need to check for trapping,
8015 and side-effects. */
8016 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8017 && simple_operand_p_2 (arg1
))
8018 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8024 /* Fold a binary expression of code CODE and type TYPE with operands
8025 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8026 Return the folded expression if folding is successful. Otherwise,
8027 return NULL_TREE. */
8030 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8032 enum tree_code compl_code
;
8034 if (code
== MIN_EXPR
)
8035 compl_code
= MAX_EXPR
;
8036 else if (code
== MAX_EXPR
)
8037 compl_code
= MIN_EXPR
;
8041 /* MIN (MAX (a, b), b) == b. */
8042 if (TREE_CODE (op0
) == compl_code
8043 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8044 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8046 /* MIN (MAX (b, a), b) == b. */
8047 if (TREE_CODE (op0
) == compl_code
8048 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8049 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8050 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8052 /* MIN (a, MAX (a, b)) == a. */
8053 if (TREE_CODE (op1
) == compl_code
8054 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8055 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8056 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8058 /* MIN (a, MAX (b, a)) == a. */
8059 if (TREE_CODE (op1
) == compl_code
8060 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8061 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8062 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8067 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8068 by changing CODE to reduce the magnitude of constants involved in
8069 ARG0 of the comparison.
8070 Returns a canonicalized comparison tree if a simplification was
8071 possible, otherwise returns NULL_TREE.
8072 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8073 valid if signed overflow is undefined. */
8076 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8077 tree arg0
, tree arg1
,
8078 bool *strict_overflow_p
)
8080 enum tree_code code0
= TREE_CODE (arg0
);
8081 tree t
, cst0
= NULL_TREE
;
8084 /* Match A +- CST code arg1. We can change this only if overflow
8086 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8087 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
8088 /* In principle pointers also have undefined overflow behavior,
8089 but that causes problems elsewhere. */
8090 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8091 && (code0
== MINUS_EXPR
8092 || code0
== PLUS_EXPR
)
8093 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
))
8096 /* Identify the constant in arg0 and its sign. */
8097 cst0
= TREE_OPERAND (arg0
, 1);
8098 sgn0
= tree_int_cst_sgn (cst0
);
8100 /* Overflowed constants and zero will cause problems. */
8101 if (integer_zerop (cst0
)
8102 || TREE_OVERFLOW (cst0
))
8105 /* See if we can reduce the magnitude of the constant in
8106 arg0 by changing the comparison code. */
8107 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8109 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8111 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8112 else if (code
== GT_EXPR
8113 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8115 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8116 else if (code
== LE_EXPR
8117 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8119 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8120 else if (code
== GE_EXPR
8121 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8125 *strict_overflow_p
= true;
8127 /* Now build the constant reduced in magnitude. But not if that
8128 would produce one outside of its types range. */
8129 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8131 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8132 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8134 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8135 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8138 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8139 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8140 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8141 t
= fold_convert (TREE_TYPE (arg1
), t
);
8143 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8146 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8147 overflow further. Try to decrease the magnitude of constants involved
8148 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8149 and put sole constants at the second argument position.
8150 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8153 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8154 tree arg0
, tree arg1
)
8157 bool strict_overflow_p
;
8158 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8159 "when reducing constant in comparison");
8161 /* Try canonicalization by simplifying arg0. */
8162 strict_overflow_p
= false;
8163 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8164 &strict_overflow_p
);
8167 if (strict_overflow_p
)
8168 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8172 /* Try canonicalization by simplifying arg1 using the swapped
8174 code
= swap_tree_comparison (code
);
8175 strict_overflow_p
= false;
8176 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8177 &strict_overflow_p
);
8178 if (t
&& strict_overflow_p
)
8179 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8183 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8184 space. This is used to avoid issuing overflow warnings for
8185 expressions like &p->x which can not wrap. */
8188 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8190 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8197 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
8198 if (offset
== NULL_TREE
)
8199 wi_offset
= wi::zero (precision
);
8200 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8206 wide_int units
= wi::shwi (bitpos
/ BITS_PER_UNIT
, precision
);
8207 wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
8211 if (!wi::fits_uhwi_p (total
))
8214 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8218 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8220 if (TREE_CODE (base
) == ADDR_EXPR
)
8222 HOST_WIDE_INT base_size
;
8224 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8225 if (base_size
> 0 && size
< base_size
)
8229 return total
.to_uhwi () > (unsigned HOST_WIDE_INT
) size
;
8232 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8233 kind INTEGER_CST. This makes sure to properly sign-extend the
8236 static HOST_WIDE_INT
8237 size_low_cst (const_tree t
)
8239 HOST_WIDE_INT w
= TREE_INT_CST_ELT (t
, 0);
8240 int prec
= TYPE_PRECISION (TREE_TYPE (t
));
8241 if (prec
< HOST_BITS_PER_WIDE_INT
)
8242 return sext_hwi (w
, prec
);
8246 /* Subroutine of fold_binary. This routine performs all of the
8247 transformations that are common to the equality/inequality
8248 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8249 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8250 fold_binary should call fold_binary. Fold a comparison with
8251 tree code CODE and type TYPE with operands OP0 and OP1. Return
8252 the folded comparison or NULL_TREE. */
8255 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8258 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
8259 tree arg0
, arg1
, tem
;
8264 STRIP_SIGN_NOPS (arg0
);
8265 STRIP_SIGN_NOPS (arg1
);
8267 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8268 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8270 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8271 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))))
8272 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8273 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8274 && TREE_CODE (arg1
) == INTEGER_CST
8275 && !TREE_OVERFLOW (arg1
))
8277 const enum tree_code
8278 reverse_op
= TREE_CODE (arg0
) == PLUS_EXPR
? MINUS_EXPR
: PLUS_EXPR
;
8279 tree const1
= TREE_OPERAND (arg0
, 1);
8280 tree const2
= fold_convert_loc (loc
, TREE_TYPE (const1
), arg1
);
8281 tree variable
= TREE_OPERAND (arg0
, 0);
8282 tree new_const
= int_const_binop (reverse_op
, const2
, const1
);
8284 /* If the constant operation overflowed this can be
8285 simplified as a comparison against INT_MAX/INT_MIN. */
8286 if (TREE_OVERFLOW (new_const
)
8287 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
8289 int const1_sgn
= tree_int_cst_sgn (const1
);
8290 enum tree_code code2
= code
;
8292 /* Get the sign of the constant on the lhs if the
8293 operation were VARIABLE + CONST1. */
8294 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8295 const1_sgn
= -const1_sgn
;
8297 /* The sign of the constant determines if we overflowed
8298 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8299 Canonicalize to the INT_MIN overflow by swapping the comparison
8301 if (const1_sgn
== -1)
8302 code2
= swap_tree_comparison (code
);
8304 /* We now can look at the canonicalized case
8305 VARIABLE + 1 CODE2 INT_MIN
8306 and decide on the result. */
8313 omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8319 omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8328 fold_overflow_warning ("assuming signed overflow does not occur "
8329 "when changing X +- C1 cmp C2 to "
8331 WARN_STRICT_OVERFLOW_COMPARISON
);
8332 return fold_build2_loc (loc
, code
, type
, variable
, new_const
);
8336 /* For comparisons of pointers we can decompose it to a compile time
8337 comparison of the base objects and the offsets into the object.
8338 This requires at least one operand being an ADDR_EXPR or a
8339 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8340 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8341 && (TREE_CODE (arg0
) == ADDR_EXPR
8342 || TREE_CODE (arg1
) == ADDR_EXPR
8343 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8344 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8346 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8347 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8349 int volatilep
, unsignedp
;
8350 bool indirect_base0
= false, indirect_base1
= false;
8352 /* Get base and offset for the access. Strip ADDR_EXPR for
8353 get_inner_reference, but put it back by stripping INDIRECT_REF
8354 off the base object if possible. indirect_baseN will be true
8355 if baseN is not an address but refers to the object itself. */
8357 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8359 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8360 &bitsize
, &bitpos0
, &offset0
, &mode
,
8361 &unsignedp
, &volatilep
, false);
8362 if (TREE_CODE (base0
) == INDIRECT_REF
)
8363 base0
= TREE_OPERAND (base0
, 0);
8365 indirect_base0
= true;
8367 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8369 base0
= TREE_OPERAND (arg0
, 0);
8370 STRIP_SIGN_NOPS (base0
);
8371 if (TREE_CODE (base0
) == ADDR_EXPR
)
8373 base0
= TREE_OPERAND (base0
, 0);
8374 indirect_base0
= true;
8376 offset0
= TREE_OPERAND (arg0
, 1);
8377 if (tree_fits_shwi_p (offset0
))
8379 HOST_WIDE_INT off
= size_low_cst (offset0
);
8380 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8382 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8384 bitpos0
= off
* BITS_PER_UNIT
;
8385 offset0
= NULL_TREE
;
8391 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8393 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8394 &bitsize
, &bitpos1
, &offset1
, &mode
,
8395 &unsignedp
, &volatilep
, false);
8396 if (TREE_CODE (base1
) == INDIRECT_REF
)
8397 base1
= TREE_OPERAND (base1
, 0);
8399 indirect_base1
= true;
8401 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8403 base1
= TREE_OPERAND (arg1
, 0);
8404 STRIP_SIGN_NOPS (base1
);
8405 if (TREE_CODE (base1
) == ADDR_EXPR
)
8407 base1
= TREE_OPERAND (base1
, 0);
8408 indirect_base1
= true;
8410 offset1
= TREE_OPERAND (arg1
, 1);
8411 if (tree_fits_shwi_p (offset1
))
8413 HOST_WIDE_INT off
= size_low_cst (offset1
);
8414 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8416 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8418 bitpos1
= off
* BITS_PER_UNIT
;
8419 offset1
= NULL_TREE
;
8424 /* If we have equivalent bases we might be able to simplify. */
8425 if (indirect_base0
== indirect_base1
8426 && operand_equal_p (base0
, base1
,
8427 indirect_base0
? OEP_ADDRESS_OF
: 0))
8429 /* We can fold this expression to a constant if the non-constant
8430 offset parts are equal. */
8431 if ((offset0
== offset1
8432 || (offset0
&& offset1
8433 && operand_equal_p (offset0
, offset1
, 0)))
8436 || (indirect_base0
&& DECL_P (base0
))
8437 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8441 && bitpos0
!= bitpos1
8442 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8443 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8444 fold_overflow_warning (("assuming pointer wraparound does not "
8445 "occur when comparing P +- C1 with "
8447 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8452 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8454 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8456 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8458 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8460 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8462 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8466 /* We can simplify the comparison to a comparison of the variable
8467 offset parts if the constant offset parts are equal.
8468 Be careful to use signed sizetype here because otherwise we
8469 mess with array offsets in the wrong way. This is possible
8470 because pointer arithmetic is restricted to retain within an
8471 object and overflow on pointer differences is undefined as of
8472 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8473 else if (bitpos0
== bitpos1
8475 || (indirect_base0
&& DECL_P (base0
))
8476 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8478 /* By converting to signed sizetype we cover middle-end pointer
8479 arithmetic which operates on unsigned pointer types of size
8480 type size and ARRAY_REF offsets which are properly sign or
8481 zero extended from their type in case it is narrower than
8483 if (offset0
== NULL_TREE
)
8484 offset0
= build_int_cst (ssizetype
, 0);
8486 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
8487 if (offset1
== NULL_TREE
)
8488 offset1
= build_int_cst (ssizetype
, 0);
8490 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
8493 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8494 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8495 fold_overflow_warning (("assuming pointer wraparound does not "
8496 "occur when comparing P +- C1 with "
8498 WARN_STRICT_OVERFLOW_COMPARISON
);
8500 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
8503 /* For equal offsets we can simplify to a comparison of the
8505 else if (bitpos0
== bitpos1
8507 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
8509 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
8510 && ((offset0
== offset1
)
8511 || (offset0
&& offset1
8512 && operand_equal_p (offset0
, offset1
, 0))))
8515 base0
= build_fold_addr_expr_loc (loc
, base0
);
8517 base1
= build_fold_addr_expr_loc (loc
, base1
);
8518 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
8522 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8523 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8524 the resulting offset is smaller in absolute value than the
8525 original one and has the same sign. */
8526 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8527 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8528 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8529 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8530 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8531 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8532 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8533 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8535 tree const1
= TREE_OPERAND (arg0
, 1);
8536 tree const2
= TREE_OPERAND (arg1
, 1);
8537 tree variable1
= TREE_OPERAND (arg0
, 0);
8538 tree variable2
= TREE_OPERAND (arg1
, 0);
8540 const char * const warnmsg
= G_("assuming signed overflow does not "
8541 "occur when combining constants around "
8544 /* Put the constant on the side where it doesn't overflow and is
8545 of lower absolute value and of same sign than before. */
8546 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8547 ? MINUS_EXPR
: PLUS_EXPR
,
8549 if (!TREE_OVERFLOW (cst
)
8550 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
8551 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
8553 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8554 return fold_build2_loc (loc
, code
, type
,
8556 fold_build2_loc (loc
, TREE_CODE (arg1
),
8561 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8562 ? MINUS_EXPR
: PLUS_EXPR
,
8564 if (!TREE_OVERFLOW (cst
)
8565 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
8566 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
8568 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8569 return fold_build2_loc (loc
, code
, type
,
8570 fold_build2_loc (loc
, TREE_CODE (arg0
),
8577 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
8581 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8582 constant, we can simplify it. */
8583 if (TREE_CODE (arg1
) == INTEGER_CST
8584 && (TREE_CODE (arg0
) == MIN_EXPR
8585 || TREE_CODE (arg0
) == MAX_EXPR
)
8586 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8588 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
8593 /* If we are comparing an expression that just has comparisons
8594 of two integer values, arithmetic expressions of those comparisons,
8595 and constants, we can simplify it. There are only three cases
8596 to check: the two values can either be equal, the first can be
8597 greater, or the second can be greater. Fold the expression for
8598 those three values. Since each value must be 0 or 1, we have
8599 eight possibilities, each of which corresponds to the constant 0
8600 or 1 or one of the six possible comparisons.
8602 This handles common cases like (a > b) == 0 but also handles
8603 expressions like ((x > y) - (y > x)) > 0, which supposedly
8604 occur in macroized code. */
8606 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
8608 tree cval1
= 0, cval2
= 0;
8611 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
8612 /* Don't handle degenerate cases here; they should already
8613 have been handled anyway. */
8614 && cval1
!= 0 && cval2
!= 0
8615 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
8616 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
8617 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
8618 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
8619 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
8620 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
8621 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
8623 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
8624 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
8626 /* We can't just pass T to eval_subst in case cval1 or cval2
8627 was the same as ARG1. */
8630 = fold_build2_loc (loc
, code
, type
,
8631 eval_subst (loc
, arg0
, cval1
, maxval
,
8635 = fold_build2_loc (loc
, code
, type
,
8636 eval_subst (loc
, arg0
, cval1
, maxval
,
8640 = fold_build2_loc (loc
, code
, type
,
8641 eval_subst (loc
, arg0
, cval1
, minval
,
8645 /* All three of these results should be 0 or 1. Confirm they are.
8646 Then use those values to select the proper code to use. */
8648 if (TREE_CODE (high_result
) == INTEGER_CST
8649 && TREE_CODE (equal_result
) == INTEGER_CST
8650 && TREE_CODE (low_result
) == INTEGER_CST
)
8652 /* Make a 3-bit mask with the high-order bit being the
8653 value for `>', the next for '=', and the low for '<'. */
8654 switch ((integer_onep (high_result
) * 4)
8655 + (integer_onep (equal_result
) * 2)
8656 + integer_onep (low_result
))
8660 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
8681 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
8686 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
8687 SET_EXPR_LOCATION (tem
, loc
);
8690 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
8695 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8696 into a single range test. */
8697 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
8698 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
8699 && TREE_CODE (arg1
) == INTEGER_CST
8700 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8701 && !integer_zerop (TREE_OPERAND (arg0
, 1))
8702 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8703 && !TREE_OVERFLOW (arg1
))
8705 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
8706 if (tem
!= NULL_TREE
)
8714 /* Subroutine of fold_binary. Optimize complex multiplications of the
8715 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8716 argument EXPR represents the expression "z" of type TYPE. */
8719 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
8721 tree itype
= TREE_TYPE (type
);
8722 tree rpart
, ipart
, tem
;
8724 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
8726 rpart
= TREE_OPERAND (expr
, 0);
8727 ipart
= TREE_OPERAND (expr
, 1);
8729 else if (TREE_CODE (expr
) == COMPLEX_CST
)
8731 rpart
= TREE_REALPART (expr
);
8732 ipart
= TREE_IMAGPART (expr
);
8736 expr
= save_expr (expr
);
8737 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
8738 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
8741 rpart
= save_expr (rpart
);
8742 ipart
= save_expr (ipart
);
8743 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
8744 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
8745 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
8746 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
8747 build_zero_cst (itype
));
8751 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8752 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8755 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
8757 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
8759 if (TREE_CODE (arg
) == VECTOR_CST
)
8761 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
8762 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
8764 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
8766 constructor_elt
*elt
;
8768 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
8769 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
8772 elts
[i
] = elt
->value
;
8776 for (; i
< nelts
; i
++)
8778 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
8782 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8783 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8784 NULL_TREE otherwise. */
8787 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
8789 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8791 bool need_ctor
= false;
8793 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
8794 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
8795 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
8796 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
8799 elts
= XALLOCAVEC (tree
, nelts
* 3);
8800 if (!vec_cst_ctor_to_array (arg0
, elts
)
8801 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
8804 for (i
= 0; i
< nelts
; i
++)
8806 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
8808 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
8813 vec
<constructor_elt
, va_gc
> *v
;
8814 vec_alloc (v
, nelts
);
8815 for (i
= 0; i
< nelts
; i
++)
8816 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
8817 return build_constructor (type
, v
);
8820 return build_vector (type
, &elts
[2 * nelts
]);
8823 /* Try to fold a pointer difference of type TYPE two address expressions of
8824 array references AREF0 and AREF1 using location LOC. Return a
8825 simplified expression for the difference or NULL_TREE. */
8828 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
8829 tree aref0
, tree aref1
)
8831 tree base0
= TREE_OPERAND (aref0
, 0);
8832 tree base1
= TREE_OPERAND (aref1
, 0);
8833 tree base_offset
= build_int_cst (type
, 0);
8835 /* If the bases are array references as well, recurse. If the bases
8836 are pointer indirections compute the difference of the pointers.
8837 If the bases are equal, we are set. */
8838 if ((TREE_CODE (base0
) == ARRAY_REF
8839 && TREE_CODE (base1
) == ARRAY_REF
8841 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
8842 || (INDIRECT_REF_P (base0
)
8843 && INDIRECT_REF_P (base1
)
8845 = fold_binary_loc (loc
, MINUS_EXPR
, type
,
8846 fold_convert (type
, TREE_OPERAND (base0
, 0)),
8848 TREE_OPERAND (base1
, 0)))))
8849 || operand_equal_p (base0
, base1
, OEP_ADDRESS_OF
))
8851 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
8852 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
8853 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
8854 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
8855 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
8857 fold_build2_loc (loc
, MULT_EXPR
, type
,
8863 /* If the real or vector real constant CST of type TYPE has an exact
8864 inverse, return it, else return NULL. */
8867 exact_inverse (tree type
, tree cst
)
8870 tree unit_type
, *elts
;
8872 unsigned vec_nelts
, i
;
8874 switch (TREE_CODE (cst
))
8877 r
= TREE_REAL_CST (cst
);
8879 if (exact_real_inverse (TYPE_MODE (type
), &r
))
8880 return build_real (type
, r
);
8885 vec_nelts
= VECTOR_CST_NELTS (cst
);
8886 elts
= XALLOCAVEC (tree
, vec_nelts
);
8887 unit_type
= TREE_TYPE (type
);
8888 mode
= TYPE_MODE (unit_type
);
8890 for (i
= 0; i
< vec_nelts
; i
++)
8892 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
8893 if (!exact_real_inverse (mode
, &r
))
8895 elts
[i
] = build_real (unit_type
, r
);
8898 return build_vector (type
, elts
);
8905 /* Mask out the tz least significant bits of X of type TYPE where
8906 tz is the number of trailing zeroes in Y. */
8908 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
8910 int tz
= wi::ctz (y
);
8912 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
8916 /* Return true when T is an address and is known to be nonzero.
8917 For floating point we further ensure that T is not denormal.
8918 Similar logic is present in nonzero_address in rtlanal.h.
8920 If the return value is based on the assumption that signed overflow
8921 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8922 change *STRICT_OVERFLOW_P. */
8925 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
8927 tree type
= TREE_TYPE (t
);
8928 enum tree_code code
;
8930 /* Doing something useful for floating point would need more work. */
8931 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
8934 code
= TREE_CODE (t
);
8935 switch (TREE_CODE_CLASS (code
))
8938 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
8941 case tcc_comparison
:
8942 return tree_binary_nonzero_warnv_p (code
, type
,
8943 TREE_OPERAND (t
, 0),
8944 TREE_OPERAND (t
, 1),
8947 case tcc_declaration
:
8949 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
8957 case TRUTH_NOT_EXPR
:
8958 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
8961 case TRUTH_AND_EXPR
:
8963 case TRUTH_XOR_EXPR
:
8964 return tree_binary_nonzero_warnv_p (code
, type
,
8965 TREE_OPERAND (t
, 0),
8966 TREE_OPERAND (t
, 1),
8974 case WITH_SIZE_EXPR
:
8976 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
8981 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
8985 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
8990 tree fndecl
= get_callee_fndecl (t
);
8991 if (!fndecl
) return false;
8992 if (flag_delete_null_pointer_checks
&& !flag_check_new
8993 && DECL_IS_OPERATOR_NEW (fndecl
)
8994 && !TREE_NOTHROW (fndecl
))
8996 if (flag_delete_null_pointer_checks
8997 && lookup_attribute ("returns_nonnull",
8998 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
9000 return alloca_call_p (t
);
9009 /* Return true when T is an address and is known to be nonzero.
9010 Handle warnings about undefined signed overflow. */
9013 tree_expr_nonzero_p (tree t
)
9015 bool ret
, strict_overflow_p
;
9017 strict_overflow_p
= false;
9018 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
9019 if (strict_overflow_p
)
9020 fold_overflow_warning (("assuming signed overflow does not occur when "
9021 "determining that expression is always "
9023 WARN_STRICT_OVERFLOW_MISC
);
9027 /* Fold a binary expression of code CODE and type TYPE with operands
9028 OP0 and OP1. LOC is the location of the resulting expression.
9029 Return the folded expression if folding is successful. Otherwise,
9030 return NULL_TREE. */
9033 fold_binary_loc (location_t loc
,
9034 enum tree_code code
, tree type
, tree op0
, tree op1
)
9036 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9037 tree arg0
, arg1
, tem
;
9038 tree t1
= NULL_TREE
;
9039 bool strict_overflow_p
;
9042 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9043 && TREE_CODE_LENGTH (code
) == 2
9045 && op1
!= NULL_TREE
);
9050 /* Strip any conversions that don't change the mode. This is
9051 safe for every expression, except for a comparison expression
9052 because its signedness is derived from its operands. So, in
9053 the latter case, only strip conversions that don't change the
9054 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9057 Note that this is done as an internal manipulation within the
9058 constant folder, in order to find the simplest representation
9059 of the arguments so that their form can be studied. In any
9060 cases, the appropriate type conversions should be put back in
9061 the tree that will get out of the constant folder. */
9063 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9065 STRIP_SIGN_NOPS (arg0
);
9066 STRIP_SIGN_NOPS (arg1
);
9074 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9075 constant but we can't do arithmetic on them. */
9076 if (CONSTANT_CLASS_P (arg0
) && CONSTANT_CLASS_P (arg1
))
9078 tem
= const_binop (code
, type
, arg0
, arg1
);
9079 if (tem
!= NULL_TREE
)
9081 if (TREE_TYPE (tem
) != type
)
9082 tem
= fold_convert_loc (loc
, type
, tem
);
9087 /* If this is a commutative operation, and ARG0 is a constant, move it
9088 to ARG1 to reduce the number of tests below. */
9089 if (commutative_tree_code (code
)
9090 && tree_swap_operands_p (arg0
, arg1
, true))
9091 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9093 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9094 to ARG1 to reduce the number of tests below. */
9095 if (kind
== tcc_comparison
9096 && tree_swap_operands_p (arg0
, arg1
, true))
9097 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
9099 tem
= generic_simplify (loc
, code
, type
, op0
, op1
);
9103 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9105 First check for cases where an arithmetic operation is applied to a
9106 compound, conditional, or comparison operation. Push the arithmetic
9107 operation inside the compound or conditional to see if any folding
9108 can then be done. Convert comparison to conditional for this purpose.
9109 The also optimizes non-constant cases that used to be done in
9112 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9113 one of the operands is a comparison and the other is a comparison, a
9114 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9115 code below would make the expression more complex. Change it to a
9116 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9117 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9119 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9120 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9121 && TREE_CODE (type
) != VECTOR_TYPE
9122 && ((truth_value_p (TREE_CODE (arg0
))
9123 && (truth_value_p (TREE_CODE (arg1
))
9124 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9125 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9126 || (truth_value_p (TREE_CODE (arg1
))
9127 && (truth_value_p (TREE_CODE (arg0
))
9128 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9129 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9131 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9132 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9135 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9136 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9138 if (code
== EQ_EXPR
)
9139 tem
= invert_truthvalue_loc (loc
, tem
);
9141 return fold_convert_loc (loc
, type
, tem
);
9144 if (TREE_CODE_CLASS (code
) == tcc_binary
9145 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9147 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9149 tem
= fold_build2_loc (loc
, code
, type
,
9150 fold_convert_loc (loc
, TREE_TYPE (op0
),
9151 TREE_OPERAND (arg0
, 1)), op1
);
9152 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9155 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9156 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9158 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9159 fold_convert_loc (loc
, TREE_TYPE (op1
),
9160 TREE_OPERAND (arg1
, 1)));
9161 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9165 if (TREE_CODE (arg0
) == COND_EXPR
9166 || TREE_CODE (arg0
) == VEC_COND_EXPR
9167 || COMPARISON_CLASS_P (arg0
))
9169 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9171 /*cond_first_p=*/1);
9172 if (tem
!= NULL_TREE
)
9176 if (TREE_CODE (arg1
) == COND_EXPR
9177 || TREE_CODE (arg1
) == VEC_COND_EXPR
9178 || COMPARISON_CLASS_P (arg1
))
9180 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9182 /*cond_first_p=*/0);
9183 if (tem
!= NULL_TREE
)
9191 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9192 if (TREE_CODE (arg0
) == ADDR_EXPR
9193 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
9195 tree iref
= TREE_OPERAND (arg0
, 0);
9196 return fold_build2 (MEM_REF
, type
,
9197 TREE_OPERAND (iref
, 0),
9198 int_const_binop (PLUS_EXPR
, arg1
,
9199 TREE_OPERAND (iref
, 1)));
9202 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9203 if (TREE_CODE (arg0
) == ADDR_EXPR
9204 && handled_component_p (TREE_OPERAND (arg0
, 0)))
9207 HOST_WIDE_INT coffset
;
9208 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
9212 return fold_build2 (MEM_REF
, type
,
9213 build_fold_addr_expr (base
),
9214 int_const_binop (PLUS_EXPR
, arg1
,
9215 size_int (coffset
)));
9220 case POINTER_PLUS_EXPR
:
9221 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9222 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9223 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9224 return fold_convert_loc (loc
, type
,
9225 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9226 fold_convert_loc (loc
, sizetype
,
9228 fold_convert_loc (loc
, sizetype
,
9234 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
9236 /* X + (X / CST) * -CST is X % CST. */
9237 if (TREE_CODE (arg1
) == MULT_EXPR
9238 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9239 && operand_equal_p (arg0
,
9240 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
9242 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
9243 tree cst1
= TREE_OPERAND (arg1
, 1);
9244 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
9246 if (sum
&& integer_zerop (sum
))
9247 return fold_convert_loc (loc
, type
,
9248 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
9249 TREE_TYPE (arg0
), arg0
,
9254 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9255 one. Make sure the type is not saturating and has the signedness of
9256 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9257 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9258 if ((TREE_CODE (arg0
) == MULT_EXPR
9259 || TREE_CODE (arg1
) == MULT_EXPR
)
9260 && !TYPE_SATURATING (type
)
9261 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
9262 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
9263 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9265 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9270 if (! FLOAT_TYPE_P (type
))
9272 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9273 (plus (plus (mult) (mult)) (foo)) so that we can
9274 take advantage of the factoring cases below. */
9275 if (ANY_INTEGRAL_TYPE_P (type
)
9276 && TYPE_OVERFLOW_WRAPS (type
)
9277 && (((TREE_CODE (arg0
) == PLUS_EXPR
9278 || TREE_CODE (arg0
) == MINUS_EXPR
)
9279 && TREE_CODE (arg1
) == MULT_EXPR
)
9280 || ((TREE_CODE (arg1
) == PLUS_EXPR
9281 || TREE_CODE (arg1
) == MINUS_EXPR
)
9282 && TREE_CODE (arg0
) == MULT_EXPR
)))
9284 tree parg0
, parg1
, parg
, marg
;
9285 enum tree_code pcode
;
9287 if (TREE_CODE (arg1
) == MULT_EXPR
)
9288 parg
= arg0
, marg
= arg1
;
9290 parg
= arg1
, marg
= arg0
;
9291 pcode
= TREE_CODE (parg
);
9292 parg0
= TREE_OPERAND (parg
, 0);
9293 parg1
= TREE_OPERAND (parg
, 1);
9297 if (TREE_CODE (parg0
) == MULT_EXPR
9298 && TREE_CODE (parg1
) != MULT_EXPR
)
9299 return fold_build2_loc (loc
, pcode
, type
,
9300 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9301 fold_convert_loc (loc
, type
,
9303 fold_convert_loc (loc
, type
,
9305 fold_convert_loc (loc
, type
, parg1
));
9306 if (TREE_CODE (parg0
) != MULT_EXPR
9307 && TREE_CODE (parg1
) == MULT_EXPR
)
9309 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9310 fold_convert_loc (loc
, type
, parg0
),
9311 fold_build2_loc (loc
, pcode
, type
,
9312 fold_convert_loc (loc
, type
, marg
),
9313 fold_convert_loc (loc
, type
,
9319 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9320 to __complex__ ( x, y ). This is not the same for SNaNs or
9321 if signed zeros are involved. */
9322 if (!HONOR_SNANS (element_mode (arg0
))
9323 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9324 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9326 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9327 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
9328 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
9329 bool arg0rz
= false, arg0iz
= false;
9330 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9331 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9333 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
9334 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
9335 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9337 tree rp
= arg1r
? arg1r
9338 : build1 (REALPART_EXPR
, rtype
, arg1
);
9339 tree ip
= arg0i
? arg0i
9340 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9341 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9343 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9345 tree rp
= arg0r
? arg0r
9346 : build1 (REALPART_EXPR
, rtype
, arg0
);
9347 tree ip
= arg1i
? arg1i
9348 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
9349 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9354 if (flag_unsafe_math_optimizations
9355 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9356 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9357 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
9360 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9361 We associate floats only if the user has specified
9362 -fassociative-math. */
9363 if (flag_associative_math
9364 && TREE_CODE (arg1
) == PLUS_EXPR
9365 && TREE_CODE (arg0
) != MULT_EXPR
)
9367 tree tree10
= TREE_OPERAND (arg1
, 0);
9368 tree tree11
= TREE_OPERAND (arg1
, 1);
9369 if (TREE_CODE (tree11
) == MULT_EXPR
9370 && TREE_CODE (tree10
) == MULT_EXPR
)
9373 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
9374 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
9377 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9378 We associate floats only if the user has specified
9379 -fassociative-math. */
9380 if (flag_associative_math
9381 && TREE_CODE (arg0
) == PLUS_EXPR
9382 && TREE_CODE (arg1
) != MULT_EXPR
)
9384 tree tree00
= TREE_OPERAND (arg0
, 0);
9385 tree tree01
= TREE_OPERAND (arg0
, 1);
9386 if (TREE_CODE (tree01
) == MULT_EXPR
9387 && TREE_CODE (tree00
) == MULT_EXPR
)
9390 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
9391 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
9397 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9398 is a rotate of A by C1 bits. */
9399 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9400 is a rotate of A by B bits. */
9402 enum tree_code code0
, code1
;
9404 code0
= TREE_CODE (arg0
);
9405 code1
= TREE_CODE (arg1
);
9406 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
9407 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
9408 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9409 TREE_OPERAND (arg1
, 0), 0)
9410 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9411 TYPE_UNSIGNED (rtype
))
9412 /* Only create rotates in complete modes. Other cases are not
9413 expanded properly. */
9414 && (element_precision (rtype
)
9415 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype
))))
9417 tree tree01
, tree11
;
9418 enum tree_code code01
, code11
;
9420 tree01
= TREE_OPERAND (arg0
, 1);
9421 tree11
= TREE_OPERAND (arg1
, 1);
9422 STRIP_NOPS (tree01
);
9423 STRIP_NOPS (tree11
);
9424 code01
= TREE_CODE (tree01
);
9425 code11
= TREE_CODE (tree11
);
9426 if (code01
== INTEGER_CST
9427 && code11
== INTEGER_CST
9428 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
9429 == element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
9431 tem
= build2_loc (loc
, LROTATE_EXPR
,
9432 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9433 TREE_OPERAND (arg0
, 0),
9434 code0
== LSHIFT_EXPR
9435 ? TREE_OPERAND (arg0
, 1)
9436 : TREE_OPERAND (arg1
, 1));
9437 return fold_convert_loc (loc
, type
, tem
);
9439 else if (code11
== MINUS_EXPR
)
9441 tree tree110
, tree111
;
9442 tree110
= TREE_OPERAND (tree11
, 0);
9443 tree111
= TREE_OPERAND (tree11
, 1);
9444 STRIP_NOPS (tree110
);
9445 STRIP_NOPS (tree111
);
9446 if (TREE_CODE (tree110
) == INTEGER_CST
9447 && 0 == compare_tree_int (tree110
,
9449 (TREE_TYPE (TREE_OPERAND
9451 && operand_equal_p (tree01
, tree111
, 0))
9453 fold_convert_loc (loc
, type
,
9454 build2 ((code0
== LSHIFT_EXPR
9457 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9458 TREE_OPERAND (arg0
, 0),
9459 TREE_OPERAND (arg0
, 1)));
9461 else if (code01
== MINUS_EXPR
)
9463 tree tree010
, tree011
;
9464 tree010
= TREE_OPERAND (tree01
, 0);
9465 tree011
= TREE_OPERAND (tree01
, 1);
9466 STRIP_NOPS (tree010
);
9467 STRIP_NOPS (tree011
);
9468 if (TREE_CODE (tree010
) == INTEGER_CST
9469 && 0 == compare_tree_int (tree010
,
9471 (TREE_TYPE (TREE_OPERAND
9473 && operand_equal_p (tree11
, tree011
, 0))
9474 return fold_convert_loc
9476 build2 ((code0
!= LSHIFT_EXPR
9479 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9480 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1)));
9486 /* In most languages, can't associate operations on floats through
9487 parentheses. Rather than remember where the parentheses were, we
9488 don't associate floats at all, unless the user has specified
9490 And, we need to make sure type is not saturating. */
9492 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
9493 && !TYPE_SATURATING (type
))
9495 tree var0
, con0
, lit0
, minus_lit0
;
9496 tree var1
, con1
, lit1
, minus_lit1
;
9500 /* Split both trees into variables, constants, and literals. Then
9501 associate each group together, the constants with literals,
9502 then the result with variables. This increases the chances of
9503 literals being recombined later and of generating relocatable
9504 expressions for the sum of a constant and literal. */
9505 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
9506 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
9507 code
== MINUS_EXPR
);
9509 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9510 if (code
== MINUS_EXPR
)
9513 /* With undefined overflow prefer doing association in a type
9514 which wraps on overflow, if that is one of the operand types. */
9515 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
9516 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
9518 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9519 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
9520 atype
= TREE_TYPE (arg0
);
9521 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9522 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
9523 atype
= TREE_TYPE (arg1
);
9524 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
9527 /* With undefined overflow we can only associate constants with one
9528 variable, and constants whose association doesn't overflow. */
9529 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
9530 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
9536 bool one_neg
= false;
9538 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
9540 tmp0
= TREE_OPERAND (tmp0
, 0);
9543 if (CONVERT_EXPR_P (tmp0
)
9544 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
9545 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
9546 <= TYPE_PRECISION (atype
)))
9547 tmp0
= TREE_OPERAND (tmp0
, 0);
9548 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
9550 tmp1
= TREE_OPERAND (tmp1
, 0);
9553 if (CONVERT_EXPR_P (tmp1
)
9554 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
9555 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
9556 <= TYPE_PRECISION (atype
)))
9557 tmp1
= TREE_OPERAND (tmp1
, 0);
9558 /* The only case we can still associate with two variables
9559 is if they cancel out. */
9561 || !operand_equal_p (tmp0
, tmp1
, 0))
9566 /* Only do something if we found more than two objects. Otherwise,
9567 nothing has changed and we risk infinite recursion. */
9569 && (2 < ((var0
!= 0) + (var1
!= 0)
9570 + (con0
!= 0) + (con1
!= 0)
9571 + (lit0
!= 0) + (lit1
!= 0)
9572 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
9574 bool any_overflows
= false;
9575 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
9576 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
9577 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
9578 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
9579 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
9580 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
9581 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
9582 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
9585 /* Preserve the MINUS_EXPR if the negative part of the literal is
9586 greater than the positive part. Otherwise, the multiplicative
9587 folding code (i.e extract_muldiv) may be fooled in case
9588 unsigned constants are subtracted, like in the following
9589 example: ((X*2 + 4) - 8U)/2. */
9590 if (minus_lit0
&& lit0
)
9592 if (TREE_CODE (lit0
) == INTEGER_CST
9593 && TREE_CODE (minus_lit0
) == INTEGER_CST
9594 && tree_int_cst_lt (lit0
, minus_lit0
))
9596 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
9602 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
9608 /* Don't introduce overflows through reassociation. */
9610 && ((lit0
&& TREE_OVERFLOW_P (lit0
))
9611 || (minus_lit0
&& TREE_OVERFLOW_P (minus_lit0
))))
9618 fold_convert_loc (loc
, type
,
9619 associate_trees (loc
, var0
, minus_lit0
,
9620 MINUS_EXPR
, atype
));
9623 con0
= associate_trees (loc
, con0
, minus_lit0
,
9626 fold_convert_loc (loc
, type
,
9627 associate_trees (loc
, var0
, con0
,
9632 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
9634 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
9642 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9643 if (TREE_CODE (arg0
) == NEGATE_EXPR
9644 && negate_expr_p (arg1
)
9645 && reorder_operands_p (arg0
, arg1
))
9646 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9647 fold_convert_loc (loc
, type
,
9648 negate_expr (arg1
)),
9649 fold_convert_loc (loc
, type
,
9650 TREE_OPERAND (arg0
, 0)));
9652 if (! FLOAT_TYPE_P (type
))
9654 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9655 any power of 2 minus 1. */
9656 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9657 && TREE_CODE (arg1
) == BIT_AND_EXPR
9658 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9659 TREE_OPERAND (arg1
, 0), 0))
9661 tree mask0
= TREE_OPERAND (arg0
, 1);
9662 tree mask1
= TREE_OPERAND (arg1
, 1);
9663 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
9665 if (operand_equal_p (tem
, mask1
, 0))
9667 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
9668 TREE_OPERAND (arg0
, 0), mask1
);
9669 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
9674 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9675 __complex__ ( x, -y ). This is not the same for SNaNs or if
9676 signed zeros are involved. */
9677 if (!HONOR_SNANS (element_mode (arg0
))
9678 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9679 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9681 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9682 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
9683 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
9684 bool arg0rz
= false, arg0iz
= false;
9685 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9686 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9688 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
9689 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
9690 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9692 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
9694 : build1 (REALPART_EXPR
, rtype
, arg1
));
9695 tree ip
= arg0i
? arg0i
9696 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9697 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9699 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9701 tree rp
= arg0r
? arg0r
9702 : build1 (REALPART_EXPR
, rtype
, arg0
);
9703 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
9705 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
9706 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9711 /* A - B -> A + (-B) if B is easily negatable. */
9712 if (negate_expr_p (arg1
)
9713 && !TYPE_OVERFLOW_SANITIZED (type
)
9714 && ((FLOAT_TYPE_P (type
)
9715 /* Avoid this transformation if B is a positive REAL_CST. */
9716 && (TREE_CODE (arg1
) != REAL_CST
9717 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
9718 || INTEGRAL_TYPE_P (type
)))
9719 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9720 fold_convert_loc (loc
, type
, arg0
),
9721 fold_convert_loc (loc
, type
,
9722 negate_expr (arg1
)));
9724 /* Fold &a[i] - &a[j] to i-j. */
9725 if (TREE_CODE (arg0
) == ADDR_EXPR
9726 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
9727 && TREE_CODE (arg1
) == ADDR_EXPR
9728 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
9730 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
9731 TREE_OPERAND (arg0
, 0),
9732 TREE_OPERAND (arg1
, 0));
9737 if (FLOAT_TYPE_P (type
)
9738 && flag_unsafe_math_optimizations
9739 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9740 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9741 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
9744 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9745 one. Make sure the type is not saturating and has the signedness of
9746 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9747 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9748 if ((TREE_CODE (arg0
) == MULT_EXPR
9749 || TREE_CODE (arg1
) == MULT_EXPR
)
9750 && !TYPE_SATURATING (type
)
9751 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
9752 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
9753 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9755 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9763 if (! FLOAT_TYPE_P (type
))
9765 /* Transform x * -C into -x * C if x is easily negatable. */
9766 if (TREE_CODE (arg1
) == INTEGER_CST
9767 && tree_int_cst_sgn (arg1
) == -1
9768 && negate_expr_p (arg0
)
9769 && (tem
= negate_expr (arg1
)) != arg1
9770 && !TREE_OVERFLOW (tem
))
9771 return fold_build2_loc (loc
, MULT_EXPR
, type
,
9772 fold_convert_loc (loc
, type
,
9773 negate_expr (arg0
)),
9776 /* (A + A) * C -> A * 2 * C */
9777 if (TREE_CODE (arg0
) == PLUS_EXPR
9778 && TREE_CODE (arg1
) == INTEGER_CST
9779 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9780 TREE_OPERAND (arg0
, 1), 0))
9781 return fold_build2_loc (loc
, MULT_EXPR
, type
,
9782 omit_one_operand_loc (loc
, type
,
9783 TREE_OPERAND (arg0
, 0),
9784 TREE_OPERAND (arg0
, 1)),
9785 fold_build2_loc (loc
, MULT_EXPR
, type
,
9786 build_int_cst (type
, 2) , arg1
));
9788 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
9789 sign-changing only. */
9790 if (TREE_CODE (arg1
) == INTEGER_CST
9791 && TREE_CODE (arg0
) == EXACT_DIV_EXPR
9792 && operand_equal_p (arg1
, TREE_OPERAND (arg0
, 1), 0))
9793 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
9795 strict_overflow_p
= false;
9796 if (TREE_CODE (arg1
) == INTEGER_CST
9797 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
9798 &strict_overflow_p
)))
9800 if (strict_overflow_p
)
9801 fold_overflow_warning (("assuming signed overflow does not "
9802 "occur when simplifying "
9804 WARN_STRICT_OVERFLOW_MISC
);
9805 return fold_convert_loc (loc
, type
, tem
);
9808 /* Optimize z * conj(z) for integer complex numbers. */
9809 if (TREE_CODE (arg0
) == CONJ_EXPR
9810 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9811 return fold_mult_zconjz (loc
, type
, arg1
);
9812 if (TREE_CODE (arg1
) == CONJ_EXPR
9813 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9814 return fold_mult_zconjz (loc
, type
, arg0
);
9818 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9819 This is not the same for NaNs or if signed zeros are
9821 if (!HONOR_NANS (arg0
)
9822 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9823 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
9824 && TREE_CODE (arg1
) == COMPLEX_CST
9825 && real_zerop (TREE_REALPART (arg1
)))
9827 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9828 if (real_onep (TREE_IMAGPART (arg1
)))
9830 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
9831 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
9833 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
9834 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
9836 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
9837 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
9838 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
9842 /* Optimize z * conj(z) for floating point complex numbers.
9843 Guarded by flag_unsafe_math_optimizations as non-finite
9844 imaginary components don't produce scalar results. */
9845 if (flag_unsafe_math_optimizations
9846 && TREE_CODE (arg0
) == CONJ_EXPR
9847 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9848 return fold_mult_zconjz (loc
, type
, arg1
);
9849 if (flag_unsafe_math_optimizations
9850 && TREE_CODE (arg1
) == CONJ_EXPR
9851 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9852 return fold_mult_zconjz (loc
, type
, arg0
);
9854 if (flag_unsafe_math_optimizations
)
9857 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
9860 && operand_equal_p (arg0
, arg1
, 0))
9862 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
9866 tree arg
= build_real (type
, dconst2
);
9867 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
9875 /* Canonicalize (X & C1) | C2. */
9876 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9877 && TREE_CODE (arg1
) == INTEGER_CST
9878 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9880 int width
= TYPE_PRECISION (type
), w
;
9881 wide_int c1
= TREE_OPERAND (arg0
, 1);
9884 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9885 if ((c1
& c2
) == c1
)
9886 return omit_one_operand_loc (loc
, type
, arg1
,
9887 TREE_OPERAND (arg0
, 0));
9889 wide_int msk
= wi::mask (width
, false,
9890 TYPE_PRECISION (TREE_TYPE (arg1
)));
9892 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9893 if (msk
.and_not (c1
| c2
) == 0)
9894 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
9895 TREE_OPERAND (arg0
, 0), arg1
);
9897 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9898 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9899 mode which allows further optimizations. */
9902 wide_int c3
= c1
.and_not (c2
);
9903 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
9905 wide_int mask
= wi::mask (w
, false,
9906 TYPE_PRECISION (type
));
9907 if (((c1
| c2
) & mask
) == mask
&& c1
.and_not (mask
) == 0)
9915 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
9916 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
9917 TREE_OPERAND (arg0
, 0),
9918 wide_int_to_tree (type
,
9923 /* (X & ~Y) | (~X & Y) is X ^ Y */
9924 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9925 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
9927 tree a0
, a1
, l0
, l1
, n0
, n1
;
9929 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
9930 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
9932 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
9933 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
9935 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
9936 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
9938 if ((operand_equal_p (n0
, a0
, 0)
9939 && operand_equal_p (n1
, a1
, 0))
9940 || (operand_equal_p (n0
, a1
, 0)
9941 && operand_equal_p (n1
, a0
, 0)))
9942 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
9945 /* See if this can be simplified into a rotate first. If that
9946 is unsuccessful continue in the association code. */
9950 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9951 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9952 && INTEGRAL_TYPE_P (type
)
9953 && integer_onep (TREE_OPERAND (arg0
, 1))
9954 && integer_onep (arg1
))
9955 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
9956 build_zero_cst (TREE_TYPE (arg0
)));
9958 /* See if this can be simplified into a rotate first. If that
9959 is unsuccessful continue in the association code. */
9963 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9964 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
9965 && INTEGRAL_TYPE_P (type
)
9966 && integer_onep (TREE_OPERAND (arg0
, 1))
9967 && integer_onep (arg1
))
9970 tem
= TREE_OPERAND (arg0
, 0);
9971 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
9972 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
9974 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
9975 build_zero_cst (TREE_TYPE (tem
)));
9977 /* Fold ~X & 1 as (X & 1) == 0. */
9978 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9979 && INTEGRAL_TYPE_P (type
)
9980 && integer_onep (arg1
))
9983 tem
= TREE_OPERAND (arg0
, 0);
9984 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
9985 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
9987 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
9988 build_zero_cst (TREE_TYPE (tem
)));
9990 /* Fold !X & 1 as X == 0. */
9991 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
9992 && integer_onep (arg1
))
9994 tem
= TREE_OPERAND (arg0
, 0);
9995 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
9996 build_zero_cst (TREE_TYPE (tem
)));
9999 /* Fold (X ^ Y) & Y as ~X & Y. */
10000 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10001 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10003 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10004 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10005 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10006 fold_convert_loc (loc
, type
, arg1
));
10008 /* Fold (X ^ Y) & X as ~Y & X. */
10009 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10010 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10011 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10013 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10014 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10015 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10016 fold_convert_loc (loc
, type
, arg1
));
10018 /* Fold X & (X ^ Y) as X & ~Y. */
10019 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10020 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10022 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10023 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10024 fold_convert_loc (loc
, type
, arg0
),
10025 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
10027 /* Fold X & (Y ^ X) as ~Y & X. */
10028 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10029 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10030 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10032 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10033 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10034 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10035 fold_convert_loc (loc
, type
, arg0
));
10038 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10039 multiple of 1 << CST. */
10040 if (TREE_CODE (arg1
) == INTEGER_CST
)
10042 wide_int cst1
= arg1
;
10043 wide_int ncst1
= -cst1
;
10044 if ((cst1
& ncst1
) == ncst1
10045 && multiple_of_p (type
, arg0
,
10046 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
10047 return fold_convert_loc (loc
, type
, arg0
);
10050 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10052 if (TREE_CODE (arg1
) == INTEGER_CST
10053 && TREE_CODE (arg0
) == MULT_EXPR
10054 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10056 wide_int warg1
= arg1
;
10057 wide_int masked
= mask_with_tz (type
, warg1
, TREE_OPERAND (arg0
, 1));
10060 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
10062 else if (masked
!= warg1
)
10064 /* Avoid the transform if arg1 is a mask of some
10065 mode which allows further optimizations. */
10066 int pop
= wi::popcount (warg1
);
10067 if (!(pop
>= BITS_PER_UNIT
10068 && exact_log2 (pop
) != -1
10069 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
10070 return fold_build2_loc (loc
, code
, type
, op0
,
10071 wide_int_to_tree (type
, masked
));
10075 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10076 ((A & N) + B) & M -> (A + B) & M
10077 Similarly if (N & M) == 0,
10078 ((A | N) + B) & M -> (A + B) & M
10079 and for - instead of + (or unary - instead of +)
10080 and/or ^ instead of |.
10081 If B is constant and (B & M) == 0, fold into A & M. */
10082 if (TREE_CODE (arg1
) == INTEGER_CST
)
10084 wide_int cst1
= arg1
;
10085 if ((~cst1
!= 0) && (cst1
& (cst1
+ 1)) == 0
10086 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10087 && (TREE_CODE (arg0
) == PLUS_EXPR
10088 || TREE_CODE (arg0
) == MINUS_EXPR
10089 || TREE_CODE (arg0
) == NEGATE_EXPR
)
10090 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
10091 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
10097 /* Now we know that arg0 is (C + D) or (C - D) or
10098 -C and arg1 (M) is == (1LL << cst) - 1.
10099 Store C into PMOP[0] and D into PMOP[1]. */
10100 pmop
[0] = TREE_OPERAND (arg0
, 0);
10102 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
10104 pmop
[1] = TREE_OPERAND (arg0
, 1);
10108 if ((wi::max_value (TREE_TYPE (arg0
)) & cst1
) != cst1
)
10111 for (; which
>= 0; which
--)
10112 switch (TREE_CODE (pmop
[which
]))
10117 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
10120 cst0
= TREE_OPERAND (pmop
[which
], 1);
10122 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
10127 else if (cst0
!= 0)
10129 /* If C or D is of the form (A & N) where
10130 (N & M) == M, or of the form (A | N) or
10131 (A ^ N) where (N & M) == 0, replace it with A. */
10132 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
10135 /* If C or D is a N where (N & M) == 0, it can be
10136 omitted (assumed 0). */
10137 if ((TREE_CODE (arg0
) == PLUS_EXPR
10138 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
10139 && (cst1
& pmop
[which
]) == 0)
10140 pmop
[which
] = NULL
;
10146 /* Only build anything new if we optimized one or both arguments
10148 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
10149 || (TREE_CODE (arg0
) != NEGATE_EXPR
10150 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
10152 tree utype
= TREE_TYPE (arg0
);
10153 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10155 /* Perform the operations in a type that has defined
10156 overflow behavior. */
10157 utype
= unsigned_type_for (TREE_TYPE (arg0
));
10158 if (pmop
[0] != NULL
)
10159 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
10160 if (pmop
[1] != NULL
)
10161 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
10164 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
10165 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
10166 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
10168 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
10169 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
10171 else if (pmop
[0] != NULL
)
10173 else if (pmop
[1] != NULL
)
10176 return build_int_cst (type
, 0);
10178 else if (pmop
[0] == NULL
)
10179 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
10181 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
10183 /* TEM is now the new binary +, - or unary - replacement. */
10184 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
10185 fold_convert_loc (loc
, utype
, arg1
));
10186 return fold_convert_loc (loc
, type
, tem
);
10191 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10192 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
10193 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
10195 prec
= element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
10197 wide_int mask
= wide_int::from (arg1
, prec
, UNSIGNED
);
10200 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10206 /* Don't touch a floating-point divide by zero unless the mode
10207 of the constant can represent infinity. */
10208 if (TREE_CODE (arg1
) == REAL_CST
10209 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
10210 && real_zerop (arg1
))
10213 /* (-A) / (-B) -> A / B */
10214 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10215 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
10216 TREE_OPERAND (arg0
, 0),
10217 negate_expr (arg1
));
10218 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10219 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
10220 negate_expr (arg0
),
10221 TREE_OPERAND (arg1
, 0));
10223 /* Convert A/B/C to A/(B*C). */
10224 if (flag_reciprocal_math
10225 && TREE_CODE (arg0
) == RDIV_EXPR
)
10226 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
10227 fold_build2_loc (loc
, MULT_EXPR
, type
,
10228 TREE_OPERAND (arg0
, 1), arg1
));
10230 /* Convert A/(B/C) to (A/B)*C. */
10231 if (flag_reciprocal_math
10232 && TREE_CODE (arg1
) == RDIV_EXPR
)
10233 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10234 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
10235 TREE_OPERAND (arg1
, 0)),
10236 TREE_OPERAND (arg1
, 1));
10238 /* Convert C1/(X*C2) into (C1/C2)/X. */
10239 if (flag_reciprocal_math
10240 && TREE_CODE (arg1
) == MULT_EXPR
10241 && TREE_CODE (arg0
) == REAL_CST
10242 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
10244 tree tem
= const_binop (RDIV_EXPR
, arg0
,
10245 TREE_OPERAND (arg1
, 1));
10247 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10248 TREE_OPERAND (arg1
, 0));
10253 case TRUNC_DIV_EXPR
:
10254 /* Optimize (X & (-A)) / A where A is a power of 2,
10256 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10257 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
10258 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
10260 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
10261 arg1
, TREE_OPERAND (arg0
, 1));
10262 if (sum
&& integer_zerop (sum
)) {
10263 tree pow2
= build_int_cst (integer_type_node
,
10264 wi::exact_log2 (arg1
));
10265 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
10266 TREE_OPERAND (arg0
, 0), pow2
);
10272 case FLOOR_DIV_EXPR
:
10273 /* Simplify A / (B << N) where A and B are positive and B is
10274 a power of 2, to A >> (N + log2(B)). */
10275 strict_overflow_p
= false;
10276 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10277 && (TYPE_UNSIGNED (type
)
10278 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
10280 tree sval
= TREE_OPERAND (arg1
, 0);
10281 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
10283 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
10284 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
10285 wi::exact_log2 (sval
));
10287 if (strict_overflow_p
)
10288 fold_overflow_warning (("assuming signed overflow does not "
10289 "occur when simplifying A / (B << N)"),
10290 WARN_STRICT_OVERFLOW_MISC
);
10292 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
10294 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
10295 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
10301 case ROUND_DIV_EXPR
:
10302 case CEIL_DIV_EXPR
:
10303 case EXACT_DIV_EXPR
:
10304 if (integer_zerop (arg1
))
10307 /* Convert -A / -B to A / B when the type is signed and overflow is
10309 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
10310 && TREE_CODE (arg0
) == NEGATE_EXPR
10311 && negate_expr_p (arg1
))
10313 if (INTEGRAL_TYPE_P (type
))
10314 fold_overflow_warning (("assuming signed overflow does not occur "
10315 "when distributing negation across "
10317 WARN_STRICT_OVERFLOW_MISC
);
10318 return fold_build2_loc (loc
, code
, type
,
10319 fold_convert_loc (loc
, type
,
10320 TREE_OPERAND (arg0
, 0)),
10321 fold_convert_loc (loc
, type
,
10322 negate_expr (arg1
)));
10324 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
10325 && TREE_CODE (arg1
) == NEGATE_EXPR
10326 && negate_expr_p (arg0
))
10328 if (INTEGRAL_TYPE_P (type
))
10329 fold_overflow_warning (("assuming signed overflow does not occur "
10330 "when distributing negation across "
10332 WARN_STRICT_OVERFLOW_MISC
);
10333 return fold_build2_loc (loc
, code
, type
,
10334 fold_convert_loc (loc
, type
,
10335 negate_expr (arg0
)),
10336 fold_convert_loc (loc
, type
,
10337 TREE_OPERAND (arg1
, 0)));
10340 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10341 operation, EXACT_DIV_EXPR.
10343 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10344 At one time others generated faster code, it's not clear if they do
10345 after the last round to changes to the DIV code in expmed.c. */
10346 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
10347 && multiple_of_p (type
, arg0
, arg1
))
10348 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
,
10349 fold_convert (type
, arg0
),
10350 fold_convert (type
, arg1
));
10352 strict_overflow_p
= false;
10353 if (TREE_CODE (arg1
) == INTEGER_CST
10354 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10355 &strict_overflow_p
)))
10357 if (strict_overflow_p
)
10358 fold_overflow_warning (("assuming signed overflow does not occur "
10359 "when simplifying division"),
10360 WARN_STRICT_OVERFLOW_MISC
);
10361 return fold_convert_loc (loc
, type
, tem
);
10366 case CEIL_MOD_EXPR
:
10367 case FLOOR_MOD_EXPR
:
10368 case ROUND_MOD_EXPR
:
10369 case TRUNC_MOD_EXPR
:
10370 strict_overflow_p
= false;
10371 if (TREE_CODE (arg1
) == INTEGER_CST
10372 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10373 &strict_overflow_p
)))
10375 if (strict_overflow_p
)
10376 fold_overflow_warning (("assuming signed overflow does not occur "
10377 "when simplifying modulus"),
10378 WARN_STRICT_OVERFLOW_MISC
);
10379 return fold_convert_loc (loc
, type
, tem
);
10388 /* Since negative shift count is not well-defined,
10389 don't try to compute it in the compiler. */
10390 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
10393 prec
= element_precision (type
);
10395 /* If we have a rotate of a bit operation with the rotate count and
10396 the second operand of the bit operation both constant,
10397 permute the two operations. */
10398 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
10399 && (TREE_CODE (arg0
) == BIT_AND_EXPR
10400 || TREE_CODE (arg0
) == BIT_IOR_EXPR
10401 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
10402 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10403 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
10404 fold_build2_loc (loc
, code
, type
,
10405 TREE_OPERAND (arg0
, 0), arg1
),
10406 fold_build2_loc (loc
, code
, type
,
10407 TREE_OPERAND (arg0
, 1), arg1
));
10409 /* Two consecutive rotates adding up to the some integer
10410 multiple of the precision of the type can be ignored. */
10411 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
10412 && TREE_CODE (arg0
) == RROTATE_EXPR
10413 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10414 && wi::umod_trunc (wi::add (arg1
, TREE_OPERAND (arg0
, 1)),
10416 return TREE_OPERAND (arg0
, 0);
10421 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
10427 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
10432 case TRUTH_ANDIF_EXPR
:
10433 /* Note that the operands of this must be ints
10434 and their values must be 0 or 1.
10435 ("true" is a fixed value perhaps depending on the language.) */
10436 /* If first arg is constant zero, return it. */
10437 if (integer_zerop (arg0
))
10438 return fold_convert_loc (loc
, type
, arg0
);
10439 case TRUTH_AND_EXPR
:
10440 /* If either arg is constant true, drop it. */
10441 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10442 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10443 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
10444 /* Preserve sequence points. */
10445 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
10446 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10447 /* If second arg is constant zero, result is zero, but first arg
10448 must be evaluated. */
10449 if (integer_zerop (arg1
))
10450 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10451 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10452 case will be handled here. */
10453 if (integer_zerop (arg0
))
10454 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
10456 /* !X && X is always false. */
10457 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10458 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10459 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
10460 /* X && !X is always false. */
10461 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10462 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10463 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10465 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10466 means A >= Y && A != MAX, but in this case we know that
10469 if (!TREE_SIDE_EFFECTS (arg0
)
10470 && !TREE_SIDE_EFFECTS (arg1
))
10472 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
10473 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
10474 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
10476 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
10477 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
10478 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
10481 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
10487 case TRUTH_ORIF_EXPR
:
10488 /* Note that the operands of this must be ints
10489 and their values must be 0 or true.
10490 ("true" is a fixed value perhaps depending on the language.) */
10491 /* If first arg is constant true, return it. */
10492 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10493 return fold_convert_loc (loc
, type
, arg0
);
10494 case TRUTH_OR_EXPR
:
10495 /* If either arg is constant zero, drop it. */
10496 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
10497 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10498 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
10499 /* Preserve sequence points. */
10500 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
10501 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10502 /* If second arg is constant true, result is true, but we must
10503 evaluate first arg. */
10504 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
10505 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10506 /* Likewise for first arg, but note this only occurs here for
10508 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10509 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
10511 /* !X || X is always true. */
10512 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10513 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10514 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
10515 /* X || !X is always true. */
10516 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10517 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10518 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
10520 /* (X && !Y) || (!X && Y) is X ^ Y */
10521 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
10522 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
10524 tree a0
, a1
, l0
, l1
, n0
, n1
;
10526 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10527 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10529 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10530 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10532 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
10533 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
10535 if ((operand_equal_p (n0
, a0
, 0)
10536 && operand_equal_p (n1
, a1
, 0))
10537 || (operand_equal_p (n0
, a1
, 0)
10538 && operand_equal_p (n1
, a0
, 0)))
10539 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
10542 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
10548 case TRUTH_XOR_EXPR
:
10549 /* If the second arg is constant zero, drop it. */
10550 if (integer_zerop (arg1
))
10551 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10552 /* If the second arg is constant true, this is a logical inversion. */
10553 if (integer_onep (arg1
))
10555 tem
= invert_truthvalue_loc (loc
, arg0
);
10556 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
10558 /* Identical arguments cancel to zero. */
10559 if (operand_equal_p (arg0
, arg1
, 0))
10560 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10562 /* !X ^ X is always true. */
10563 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10564 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10565 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
10567 /* X ^ !X is always true. */
10568 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10569 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10570 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
10579 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
10580 if (tem
!= NULL_TREE
)
10583 /* bool_var != 1 becomes !bool_var. */
10584 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
10585 && code
== NE_EXPR
)
10586 return fold_convert_loc (loc
, type
,
10587 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
10588 TREE_TYPE (arg0
), arg0
));
10590 /* bool_var == 0 becomes !bool_var. */
10591 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
10592 && code
== EQ_EXPR
)
10593 return fold_convert_loc (loc
, type
,
10594 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
10595 TREE_TYPE (arg0
), arg0
));
10597 /* !exp != 0 becomes !exp */
10598 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
10599 && code
== NE_EXPR
)
10600 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10602 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10603 if ((TREE_CODE (arg0
) == PLUS_EXPR
10604 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10605 || TREE_CODE (arg0
) == MINUS_EXPR
)
10606 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
10609 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10610 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
10612 tree val
= TREE_OPERAND (arg0
, 1);
10613 return omit_two_operands_loc (loc
, type
,
10614 fold_build2_loc (loc
, code
, type
,
10616 build_int_cst (TREE_TYPE (val
),
10618 TREE_OPERAND (arg0
, 0), arg1
);
10621 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10622 if (TREE_CODE (arg0
) == MINUS_EXPR
10623 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
10624 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
10627 && wi::extract_uhwi (TREE_OPERAND (arg0
, 0), 0, 1) == 1)
10629 return omit_two_operands_loc (loc
, type
,
10631 ? boolean_true_node
: boolean_false_node
,
10632 TREE_OPERAND (arg0
, 1), arg1
);
10635 /* If this is an EQ or NE comparison with zero and ARG0 is
10636 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10637 two operations, but the latter can be done in one less insn
10638 on machines that have only two-operand insns or on which a
10639 constant cannot be the first operand. */
10640 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10641 && integer_zerop (arg1
))
10643 tree arg00
= TREE_OPERAND (arg0
, 0);
10644 tree arg01
= TREE_OPERAND (arg0
, 1);
10645 if (TREE_CODE (arg00
) == LSHIFT_EXPR
10646 && integer_onep (TREE_OPERAND (arg00
, 0)))
10648 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
10649 arg01
, TREE_OPERAND (arg00
, 1));
10650 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
10651 build_int_cst (TREE_TYPE (arg0
), 1));
10652 return fold_build2_loc (loc
, code
, type
,
10653 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
10656 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
10657 && integer_onep (TREE_OPERAND (arg01
, 0)))
10659 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
10660 arg00
, TREE_OPERAND (arg01
, 1));
10661 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
10662 build_int_cst (TREE_TYPE (arg0
), 1));
10663 return fold_build2_loc (loc
, code
, type
,
10664 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
10669 /* If this is an NE or EQ comparison of zero against the result of a
10670 signed MOD operation whose second operand is a power of 2, make
10671 the MOD operation unsigned since it is simpler and equivalent. */
10672 if (integer_zerop (arg1
)
10673 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
10674 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
10675 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
10676 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
10677 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
10678 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10680 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
10681 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
10682 fold_convert_loc (loc
, newtype
,
10683 TREE_OPERAND (arg0
, 0)),
10684 fold_convert_loc (loc
, newtype
,
10685 TREE_OPERAND (arg0
, 1)));
10687 return fold_build2_loc (loc
, code
, type
, newmod
,
10688 fold_convert_loc (loc
, newtype
, arg1
));
10691 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10692 C1 is a valid shift constant, and C2 is a power of two, i.e.
10694 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10695 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
10696 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
10698 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10699 && integer_zerop (arg1
))
10701 tree itype
= TREE_TYPE (arg0
);
10702 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
10703 prec
= TYPE_PRECISION (itype
);
10705 /* Check for a valid shift count. */
10706 if (wi::ltu_p (arg001
, prec
))
10708 tree arg01
= TREE_OPERAND (arg0
, 1);
10709 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
10710 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
10711 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10712 can be rewritten as (X & (C2 << C1)) != 0. */
10713 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
10715 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
10716 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
10717 return fold_build2_loc (loc
, code
, type
, tem
,
10718 fold_convert_loc (loc
, itype
, arg1
));
10720 /* Otherwise, for signed (arithmetic) shifts,
10721 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10722 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10723 else if (!TYPE_UNSIGNED (itype
))
10724 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
10725 arg000
, build_int_cst (itype
, 0));
10726 /* Otherwise, of unsigned (logical) shifts,
10727 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10728 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10730 return omit_one_operand_loc (loc
, type
,
10731 code
== EQ_EXPR
? integer_one_node
10732 : integer_zero_node
,
10737 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10738 Similarly for NE_EXPR. */
10739 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10740 && TREE_CODE (arg1
) == INTEGER_CST
10741 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10743 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
10744 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
10745 TREE_OPERAND (arg0
, 1));
10747 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
10748 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
10750 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
10751 if (integer_nonzerop (dandnotc
))
10752 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
10755 /* If this is a comparison of a field, we may be able to simplify it. */
10756 if ((TREE_CODE (arg0
) == COMPONENT_REF
10757 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
10758 /* Handle the constant case even without -O
10759 to make sure the warnings are given. */
10760 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
10762 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
10767 /* Optimize comparisons of strlen vs zero to a compare of the
10768 first character of the string vs zero. To wit,
10769 strlen(ptr) == 0 => *ptr == 0
10770 strlen(ptr) != 0 => *ptr != 0
10771 Other cases should reduce to one of these two (or a constant)
10772 due to the return value of strlen being unsigned. */
10773 if (TREE_CODE (arg0
) == CALL_EXPR
10774 && integer_zerop (arg1
))
10776 tree fndecl
= get_callee_fndecl (arg0
);
10779 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
10780 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
10781 && call_expr_nargs (arg0
) == 1
10782 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
10784 tree iref
= build_fold_indirect_ref_loc (loc
,
10785 CALL_EXPR_ARG (arg0
, 0));
10786 return fold_build2_loc (loc
, code
, type
, iref
,
10787 build_int_cst (TREE_TYPE (iref
), 0));
10791 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10792 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10793 if (TREE_CODE (arg0
) == RSHIFT_EXPR
10794 && integer_zerop (arg1
)
10795 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10797 tree arg00
= TREE_OPERAND (arg0
, 0);
10798 tree arg01
= TREE_OPERAND (arg0
, 1);
10799 tree itype
= TREE_TYPE (arg00
);
10800 if (wi::eq_p (arg01
, element_precision (itype
) - 1))
10802 if (TYPE_UNSIGNED (itype
))
10804 itype
= signed_type_for (itype
);
10805 arg00
= fold_convert_loc (loc
, itype
, arg00
);
10807 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
10808 type
, arg00
, build_zero_cst (itype
));
10812 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10813 (X & C) == 0 when C is a single bit. */
10814 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10815 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
10816 && integer_zerop (arg1
)
10817 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10819 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
10820 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
10821 TREE_OPERAND (arg0
, 1));
10822 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
10824 fold_convert_loc (loc
, TREE_TYPE (arg0
),
10828 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10829 constant C is a power of two, i.e. a single bit. */
10830 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10831 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
10832 && integer_zerop (arg1
)
10833 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10834 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
10835 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
10837 tree arg00
= TREE_OPERAND (arg0
, 0);
10838 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
10839 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
10842 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10843 when is C is a power of two, i.e. a single bit. */
10844 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10845 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
10846 && integer_zerop (arg1
)
10847 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10848 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
10849 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
10851 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
10852 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
10853 arg000
, TREE_OPERAND (arg0
, 1));
10854 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
10855 tem
, build_int_cst (TREE_TYPE (tem
), 0));
10858 if (integer_zerop (arg1
)
10859 && tree_expr_nonzero_p (arg0
))
10861 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
10862 return omit_one_operand_loc (loc
, type
, res
, arg0
);
10865 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10866 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10867 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10869 tree arg00
= TREE_OPERAND (arg0
, 0);
10870 tree arg01
= TREE_OPERAND (arg0
, 1);
10871 tree arg10
= TREE_OPERAND (arg1
, 0);
10872 tree arg11
= TREE_OPERAND (arg1
, 1);
10873 tree itype
= TREE_TYPE (arg0
);
10875 if (operand_equal_p (arg01
, arg11
, 0))
10876 return fold_build2_loc (loc
, code
, type
,
10877 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
10878 fold_build2_loc (loc
,
10879 BIT_XOR_EXPR
, itype
,
10882 build_zero_cst (itype
));
10884 if (operand_equal_p (arg01
, arg10
, 0))
10885 return fold_build2_loc (loc
, code
, type
,
10886 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
10887 fold_build2_loc (loc
,
10888 BIT_XOR_EXPR
, itype
,
10891 build_zero_cst (itype
));
10893 if (operand_equal_p (arg00
, arg11
, 0))
10894 return fold_build2_loc (loc
, code
, type
,
10895 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
10896 fold_build2_loc (loc
,
10897 BIT_XOR_EXPR
, itype
,
10900 build_zero_cst (itype
));
10902 if (operand_equal_p (arg00
, arg10
, 0))
10903 return fold_build2_loc (loc
, code
, type
,
10904 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
10905 fold_build2_loc (loc
,
10906 BIT_XOR_EXPR
, itype
,
10909 build_zero_cst (itype
));
10912 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10913 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
10915 tree arg00
= TREE_OPERAND (arg0
, 0);
10916 tree arg01
= TREE_OPERAND (arg0
, 1);
10917 tree arg10
= TREE_OPERAND (arg1
, 0);
10918 tree arg11
= TREE_OPERAND (arg1
, 1);
10919 tree itype
= TREE_TYPE (arg0
);
10921 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10922 operand_equal_p guarantees no side-effects so we don't need
10923 to use omit_one_operand on Z. */
10924 if (operand_equal_p (arg01
, arg11
, 0))
10925 return fold_build2_loc (loc
, code
, type
, arg00
,
10926 fold_convert_loc (loc
, TREE_TYPE (arg00
),
10928 if (operand_equal_p (arg01
, arg10
, 0))
10929 return fold_build2_loc (loc
, code
, type
, arg00
,
10930 fold_convert_loc (loc
, TREE_TYPE (arg00
),
10932 if (operand_equal_p (arg00
, arg11
, 0))
10933 return fold_build2_loc (loc
, code
, type
, arg01
,
10934 fold_convert_loc (loc
, TREE_TYPE (arg01
),
10936 if (operand_equal_p (arg00
, arg10
, 0))
10937 return fold_build2_loc (loc
, code
, type
, arg01
,
10938 fold_convert_loc (loc
, TREE_TYPE (arg01
),
10941 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10942 if (TREE_CODE (arg01
) == INTEGER_CST
10943 && TREE_CODE (arg11
) == INTEGER_CST
)
10945 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
10946 fold_convert_loc (loc
, itype
, arg11
));
10947 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
10948 return fold_build2_loc (loc
, code
, type
, tem
,
10949 fold_convert_loc (loc
, itype
, arg10
));
10953 /* Attempt to simplify equality/inequality comparisons of complex
10954 values. Only lower the comparison if the result is known or
10955 can be simplified to a single scalar comparison. */
10956 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
10957 || TREE_CODE (arg0
) == COMPLEX_CST
)
10958 && (TREE_CODE (arg1
) == COMPLEX_EXPR
10959 || TREE_CODE (arg1
) == COMPLEX_CST
))
10961 tree real0
, imag0
, real1
, imag1
;
10964 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
10966 real0
= TREE_OPERAND (arg0
, 0);
10967 imag0
= TREE_OPERAND (arg0
, 1);
10971 real0
= TREE_REALPART (arg0
);
10972 imag0
= TREE_IMAGPART (arg0
);
10975 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
10977 real1
= TREE_OPERAND (arg1
, 0);
10978 imag1
= TREE_OPERAND (arg1
, 1);
10982 real1
= TREE_REALPART (arg1
);
10983 imag1
= TREE_IMAGPART (arg1
);
10986 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
10987 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
10989 if (integer_zerop (rcond
))
10991 if (code
== EQ_EXPR
)
10992 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
10994 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
10998 if (code
== NE_EXPR
)
10999 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
11001 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
11005 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
11006 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
11008 if (integer_zerop (icond
))
11010 if (code
== EQ_EXPR
)
11011 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
11013 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
11017 if (code
== NE_EXPR
)
11018 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
11020 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
11031 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
11032 if (tem
!= NULL_TREE
)
11035 /* Transform comparisons of the form X +- C CMP X. */
11036 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
11037 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11038 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
11039 && !HONOR_SNANS (arg0
))
11040 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11041 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
11043 tree arg01
= TREE_OPERAND (arg0
, 1);
11044 enum tree_code code0
= TREE_CODE (arg0
);
11047 if (TREE_CODE (arg01
) == REAL_CST
)
11048 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
11050 is_positive
= tree_int_cst_sgn (arg01
);
11052 /* (X - c) > X becomes false. */
11053 if (code
== GT_EXPR
11054 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
11055 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
11057 if (TREE_CODE (arg01
) == INTEGER_CST
11058 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11059 fold_overflow_warning (("assuming signed overflow does not "
11060 "occur when assuming that (X - c) > X "
11061 "is always false"),
11062 WARN_STRICT_OVERFLOW_ALL
);
11063 return constant_boolean_node (0, type
);
11066 /* Likewise (X + c) < X becomes false. */
11067 if (code
== LT_EXPR
11068 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
11069 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
11071 if (TREE_CODE (arg01
) == INTEGER_CST
11072 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11073 fold_overflow_warning (("assuming signed overflow does not "
11074 "occur when assuming that "
11075 "(X + c) < X is always false"),
11076 WARN_STRICT_OVERFLOW_ALL
);
11077 return constant_boolean_node (0, type
);
11080 /* Convert (X - c) <= X to true. */
11081 if (!HONOR_NANS (arg1
)
11083 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
11084 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
11086 if (TREE_CODE (arg01
) == INTEGER_CST
11087 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11088 fold_overflow_warning (("assuming signed overflow does not "
11089 "occur when assuming that "
11090 "(X - c) <= X is always true"),
11091 WARN_STRICT_OVERFLOW_ALL
);
11092 return constant_boolean_node (1, type
);
11095 /* Convert (X + c) >= X to true. */
11096 if (!HONOR_NANS (arg1
)
11098 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
11099 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
11101 if (TREE_CODE (arg01
) == INTEGER_CST
11102 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11103 fold_overflow_warning (("assuming signed overflow does not "
11104 "occur when assuming that "
11105 "(X + c) >= X is always true"),
11106 WARN_STRICT_OVERFLOW_ALL
);
11107 return constant_boolean_node (1, type
);
11110 if (TREE_CODE (arg01
) == INTEGER_CST
)
11112 /* Convert X + c > X and X - c < X to true for integers. */
11113 if (code
== GT_EXPR
11114 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
11115 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
11117 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11118 fold_overflow_warning (("assuming signed overflow does "
11119 "not occur when assuming that "
11120 "(X + c) > X is always true"),
11121 WARN_STRICT_OVERFLOW_ALL
);
11122 return constant_boolean_node (1, type
);
11125 if (code
== LT_EXPR
11126 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
11127 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
11129 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11130 fold_overflow_warning (("assuming signed overflow does "
11131 "not occur when assuming that "
11132 "(X - c) < X is always true"),
11133 WARN_STRICT_OVERFLOW_ALL
);
11134 return constant_boolean_node (1, type
);
11137 /* Convert X + c <= X and X - c >= X to false for integers. */
11138 if (code
== LE_EXPR
11139 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
11140 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
11142 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11143 fold_overflow_warning (("assuming signed overflow does "
11144 "not occur when assuming that "
11145 "(X + c) <= X is always false"),
11146 WARN_STRICT_OVERFLOW_ALL
);
11147 return constant_boolean_node (0, type
);
11150 if (code
== GE_EXPR
11151 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
11152 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
11154 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11155 fold_overflow_warning (("assuming signed overflow does "
11156 "not occur when assuming that "
11157 "(X - c) >= X is always false"),
11158 WARN_STRICT_OVERFLOW_ALL
);
11159 return constant_boolean_node (0, type
);
11164 /* If we are comparing an ABS_EXPR with a constant, we can
11165 convert all the cases into explicit comparisons, but they may
11166 well not be faster than doing the ABS and one comparison.
11167 But ABS (X) <= C is a range comparison, which becomes a subtraction
11168 and a comparison, and is probably faster. */
11169 if (code
== LE_EXPR
11170 && TREE_CODE (arg1
) == INTEGER_CST
11171 && TREE_CODE (arg0
) == ABS_EXPR
11172 && ! TREE_SIDE_EFFECTS (arg0
)
11173 && (0 != (tem
= negate_expr (arg1
)))
11174 && TREE_CODE (tem
) == INTEGER_CST
11175 && !TREE_OVERFLOW (tem
))
11176 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
11177 build2 (GE_EXPR
, type
,
11178 TREE_OPERAND (arg0
, 0), tem
),
11179 build2 (LE_EXPR
, type
,
11180 TREE_OPERAND (arg0
, 0), arg1
));
11182 /* Convert ABS_EXPR<x> >= 0 to true. */
11183 strict_overflow_p
= false;
11184 if (code
== GE_EXPR
11185 && (integer_zerop (arg1
)
11186 || (! HONOR_NANS (arg0
)
11187 && real_zerop (arg1
)))
11188 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
11190 if (strict_overflow_p
)
11191 fold_overflow_warning (("assuming signed overflow does not occur "
11192 "when simplifying comparison of "
11193 "absolute value and zero"),
11194 WARN_STRICT_OVERFLOW_CONDITIONAL
);
11195 return omit_one_operand_loc (loc
, type
,
11196 constant_boolean_node (true, type
),
11200 /* Convert ABS_EXPR<x> < 0 to false. */
11201 strict_overflow_p
= false;
11202 if (code
== LT_EXPR
11203 && (integer_zerop (arg1
) || real_zerop (arg1
))
11204 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
11206 if (strict_overflow_p
)
11207 fold_overflow_warning (("assuming signed overflow does not occur "
11208 "when simplifying comparison of "
11209 "absolute value and zero"),
11210 WARN_STRICT_OVERFLOW_CONDITIONAL
);
11211 return omit_one_operand_loc (loc
, type
,
11212 constant_boolean_node (false, type
),
11216 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11217 and similarly for >= into !=. */
11218 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
11219 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
11220 && TREE_CODE (arg1
) == LSHIFT_EXPR
11221 && integer_onep (TREE_OPERAND (arg1
, 0)))
11222 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
11223 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
11224 TREE_OPERAND (arg1
, 1)),
11225 build_zero_cst (TREE_TYPE (arg0
)));
11227 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11228 otherwise Y might be >= # of bits in X's type and thus e.g.
11229 (unsigned char) (1 << Y) for Y 15 might be 0.
11230 If the cast is widening, then 1 << Y should have unsigned type,
11231 otherwise if Y is number of bits in the signed shift type minus 1,
11232 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11233 31 might be 0xffffffff80000000. */
11234 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
11235 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
11236 && CONVERT_EXPR_P (arg1
)
11237 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
11238 && (element_precision (TREE_TYPE (arg1
))
11239 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
11240 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
11241 || (element_precision (TREE_TYPE (arg1
))
11242 == element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
11243 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
11245 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
11246 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
11247 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
11248 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
11249 build_zero_cst (TREE_TYPE (arg0
)));
11254 case UNORDERED_EXPR
:
11262 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11264 tree targ0
= strip_float_extensions (arg0
);
11265 tree targ1
= strip_float_extensions (arg1
);
11266 tree newtype
= TREE_TYPE (targ0
);
11268 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
11269 newtype
= TREE_TYPE (targ1
);
11271 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
11272 return fold_build2_loc (loc
, code
, type
,
11273 fold_convert_loc (loc
, newtype
, targ0
),
11274 fold_convert_loc (loc
, newtype
, targ1
));
11279 case COMPOUND_EXPR
:
11280 /* When pedantic, a compound expression can be neither an lvalue
11281 nor an integer constant expression. */
11282 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
11284 /* Don't let (0, 0) be null pointer constant. */
11285 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
11286 : fold_convert_loc (loc
, type
, arg1
);
11287 return pedantic_non_lvalue_loc (loc
, tem
);
11290 /* An ASSERT_EXPR should never be passed to fold_binary. */
11291 gcc_unreachable ();
11295 } /* switch (code) */
11298 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11299 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11303 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
11305 switch (TREE_CODE (*tp
))
11311 *walk_subtrees
= 0;
11313 /* ... fall through ... */
11320 /* Return whether the sub-tree ST contains a label which is accessible from
11321 outside the sub-tree. */
11324 contains_label_p (tree st
)
11327 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
11330 /* Fold a ternary expression of code CODE and type TYPE with operands
11331 OP0, OP1, and OP2. Return the folded expression if folding is
11332 successful. Otherwise, return NULL_TREE. */
11335 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
11336 tree op0
, tree op1
, tree op2
)
11339 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
11340 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
11342 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
11343 && TREE_CODE_LENGTH (code
) == 3);
11345 /* If this is a commutative operation, and OP0 is a constant, move it
11346 to OP1 to reduce the number of tests below. */
11347 if (commutative_ternary_tree_code (code
)
11348 && tree_swap_operands_p (op0
, op1
, true))
11349 return fold_build3_loc (loc
, code
, type
, op1
, op0
, op2
);
11351 tem
= generic_simplify (loc
, code
, type
, op0
, op1
, op2
);
11355 /* Strip any conversions that don't change the mode. This is safe
11356 for every expression, except for a comparison expression because
11357 its signedness is derived from its operands. So, in the latter
11358 case, only strip conversions that don't change the signedness.
11360 Note that this is done as an internal manipulation within the
11361 constant folder, in order to find the simplest representation of
11362 the arguments so that their form can be studied. In any cases,
11363 the appropriate type conversions should be put back in the tree
11364 that will get out of the constant folder. */
11385 case COMPONENT_REF
:
11386 if (TREE_CODE (arg0
) == CONSTRUCTOR
11387 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
11389 unsigned HOST_WIDE_INT idx
;
11391 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
11398 case VEC_COND_EXPR
:
11399 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11400 so all simple results must be passed through pedantic_non_lvalue. */
11401 if (TREE_CODE (arg0
) == INTEGER_CST
)
11403 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
11404 tem
= integer_zerop (arg0
) ? op2
: op1
;
11405 /* Only optimize constant conditions when the selected branch
11406 has the same type as the COND_EXPR. This avoids optimizing
11407 away "c ? x : throw", where the throw has a void type.
11408 Avoid throwing away that operand which contains label. */
11409 if ((!TREE_SIDE_EFFECTS (unused_op
)
11410 || !contains_label_p (unused_op
))
11411 && (! VOID_TYPE_P (TREE_TYPE (tem
))
11412 || VOID_TYPE_P (type
)))
11413 return pedantic_non_lvalue_loc (loc
, tem
);
11416 else if (TREE_CODE (arg0
) == VECTOR_CST
)
11418 if ((TREE_CODE (arg1
) == VECTOR_CST
11419 || TREE_CODE (arg1
) == CONSTRUCTOR
)
11420 && (TREE_CODE (arg2
) == VECTOR_CST
11421 || TREE_CODE (arg2
) == CONSTRUCTOR
))
11423 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
11424 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
11425 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
11426 for (i
= 0; i
< nelts
; i
++)
11428 tree val
= VECTOR_CST_ELT (arg0
, i
);
11429 if (integer_all_onesp (val
))
11431 else if (integer_zerop (val
))
11432 sel
[i
] = nelts
+ i
;
11433 else /* Currently unreachable. */
11436 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
11437 if (t
!= NULL_TREE
)
11442 /* If we have A op B ? A : C, we may be able to convert this to a
11443 simpler expression, depending on the operation and the values
11444 of B and C. Signed zeros prevent all of these transformations,
11445 for reasons given above each one.
11447 Also try swapping the arguments and inverting the conditional. */
11448 if (COMPARISON_CLASS_P (arg0
)
11449 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
11450 arg1
, TREE_OPERAND (arg0
, 1))
11451 && !HONOR_SIGNED_ZEROS (element_mode (arg1
)))
11453 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
11458 if (COMPARISON_CLASS_P (arg0
)
11459 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
11461 TREE_OPERAND (arg0
, 1))
11462 && !HONOR_SIGNED_ZEROS (element_mode (op2
)))
11464 location_t loc0
= expr_location_or (arg0
, loc
);
11465 tem
= fold_invert_truthvalue (loc0
, arg0
);
11466 if (tem
&& COMPARISON_CLASS_P (tem
))
11468 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
11474 /* If the second operand is simpler than the third, swap them
11475 since that produces better jump optimization results. */
11476 if (truth_value_p (TREE_CODE (arg0
))
11477 && tree_swap_operands_p (op1
, op2
, false))
11479 location_t loc0
= expr_location_or (arg0
, loc
);
11480 /* See if this can be inverted. If it can't, possibly because
11481 it was a floating-point inequality comparison, don't do
11483 tem
= fold_invert_truthvalue (loc0
, arg0
);
11485 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
11488 /* Convert A ? 1 : 0 to simply A. */
11489 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
11490 : (integer_onep (op1
)
11491 && !VECTOR_TYPE_P (type
)))
11492 && integer_zerop (op2
)
11493 /* If we try to convert OP0 to our type, the
11494 call to fold will try to move the conversion inside
11495 a COND, which will recurse. In that case, the COND_EXPR
11496 is probably the best choice, so leave it alone. */
11497 && type
== TREE_TYPE (arg0
))
11498 return pedantic_non_lvalue_loc (loc
, arg0
);
11500 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11501 over COND_EXPR in cases such as floating point comparisons. */
11502 if (integer_zerop (op1
)
11503 && (code
== VEC_COND_EXPR
? integer_all_onesp (op2
)
11504 : (integer_onep (op2
)
11505 && !VECTOR_TYPE_P (type
)))
11506 && truth_value_p (TREE_CODE (arg0
)))
11507 return pedantic_non_lvalue_loc (loc
,
11508 fold_convert_loc (loc
, type
,
11509 invert_truthvalue_loc (loc
,
11512 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11513 if (TREE_CODE (arg0
) == LT_EXPR
11514 && integer_zerop (TREE_OPERAND (arg0
, 1))
11515 && integer_zerop (op2
)
11516 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
11518 /* sign_bit_p looks through both zero and sign extensions,
11519 but for this optimization only sign extensions are
11521 tree tem2
= TREE_OPERAND (arg0
, 0);
11522 while (tem
!= tem2
)
11524 if (TREE_CODE (tem2
) != NOP_EXPR
11525 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
11530 tem2
= TREE_OPERAND (tem2
, 0);
11532 /* sign_bit_p only checks ARG1 bits within A's precision.
11533 If <sign bit of A> has wider type than A, bits outside
11534 of A's precision in <sign bit of A> need to be checked.
11535 If they are all 0, this optimization needs to be done
11536 in unsigned A's type, if they are all 1 in signed A's type,
11537 otherwise this can't be done. */
11539 && TYPE_PRECISION (TREE_TYPE (tem
))
11540 < TYPE_PRECISION (TREE_TYPE (arg1
))
11541 && TYPE_PRECISION (TREE_TYPE (tem
))
11542 < TYPE_PRECISION (type
))
11544 int inner_width
, outer_width
;
11547 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
11548 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
11549 if (outer_width
> TYPE_PRECISION (type
))
11550 outer_width
= TYPE_PRECISION (type
);
11552 wide_int mask
= wi::shifted_mask
11553 (inner_width
, outer_width
- inner_width
, false,
11554 TYPE_PRECISION (TREE_TYPE (arg1
)));
11556 wide_int common
= mask
& arg1
;
11557 if (common
== mask
)
11559 tem_type
= signed_type_for (TREE_TYPE (tem
));
11560 tem
= fold_convert_loc (loc
, tem_type
, tem
);
11562 else if (common
== 0)
11564 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
11565 tem
= fold_convert_loc (loc
, tem_type
, tem
);
11573 fold_convert_loc (loc
, type
,
11574 fold_build2_loc (loc
, BIT_AND_EXPR
,
11575 TREE_TYPE (tem
), tem
,
11576 fold_convert_loc (loc
,
11581 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11582 already handled above. */
11583 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11584 && integer_onep (TREE_OPERAND (arg0
, 1))
11585 && integer_zerop (op2
)
11586 && integer_pow2p (arg1
))
11588 tree tem
= TREE_OPERAND (arg0
, 0);
11590 if (TREE_CODE (tem
) == RSHIFT_EXPR
11591 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
11592 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
11593 tree_to_uhwi (TREE_OPERAND (tem
, 1)))
11594 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11595 TREE_OPERAND (tem
, 0), arg1
);
11598 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11599 is probably obsolete because the first operand should be a
11600 truth value (that's why we have the two cases above), but let's
11601 leave it in until we can confirm this for all front-ends. */
11602 if (integer_zerop (op2
)
11603 && TREE_CODE (arg0
) == NE_EXPR
11604 && integer_zerop (TREE_OPERAND (arg0
, 1))
11605 && integer_pow2p (arg1
)
11606 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
11607 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11608 arg1
, OEP_ONLY_CONST
))
11609 return pedantic_non_lvalue_loc (loc
,
11610 fold_convert_loc (loc
, type
,
11611 TREE_OPERAND (arg0
, 0)));
11613 /* Disable the transformations below for vectors, since
11614 fold_binary_op_with_conditional_arg may undo them immediately,
11615 yielding an infinite loop. */
11616 if (code
== VEC_COND_EXPR
)
11619 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11620 if (integer_zerop (op2
)
11621 && truth_value_p (TREE_CODE (arg0
))
11622 && truth_value_p (TREE_CODE (arg1
))
11623 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11624 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
11625 : TRUTH_ANDIF_EXPR
,
11626 type
, fold_convert_loc (loc
, type
, arg0
), arg1
);
11628 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11629 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
11630 && truth_value_p (TREE_CODE (arg0
))
11631 && truth_value_p (TREE_CODE (arg1
))
11632 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11634 location_t loc0
= expr_location_or (arg0
, loc
);
11635 /* Only perform transformation if ARG0 is easily inverted. */
11636 tem
= fold_invert_truthvalue (loc0
, arg0
);
11638 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
11641 type
, fold_convert_loc (loc
, type
, tem
),
11645 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11646 if (integer_zerop (arg1
)
11647 && truth_value_p (TREE_CODE (arg0
))
11648 && truth_value_p (TREE_CODE (op2
))
11649 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11651 location_t loc0
= expr_location_or (arg0
, loc
);
11652 /* Only perform transformation if ARG0 is easily inverted. */
11653 tem
= fold_invert_truthvalue (loc0
, arg0
);
11655 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
11656 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
11657 type
, fold_convert_loc (loc
, type
, tem
),
11661 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11662 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
11663 && truth_value_p (TREE_CODE (arg0
))
11664 && truth_value_p (TREE_CODE (op2
))
11665 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11666 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
11667 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
11668 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
11673 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11674 of fold_ternary on them. */
11675 gcc_unreachable ();
11677 case BIT_FIELD_REF
:
11678 if ((TREE_CODE (arg0
) == VECTOR_CST
11679 || (TREE_CODE (arg0
) == CONSTRUCTOR
11680 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
11681 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
11682 || (TREE_CODE (type
) == VECTOR_TYPE
11683 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
11685 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
11686 unsigned HOST_WIDE_INT width
= tree_to_uhwi (TYPE_SIZE (eltype
));
11687 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
11688 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
11691 && (idx
% width
) == 0
11692 && (n
% width
) == 0
11693 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
11698 if (TREE_CODE (arg0
) == VECTOR_CST
)
11701 return VECTOR_CST_ELT (arg0
, idx
);
11703 tree
*vals
= XALLOCAVEC (tree
, n
);
11704 for (unsigned i
= 0; i
< n
; ++i
)
11705 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
11706 return build_vector (type
, vals
);
11709 /* Constructor elements can be subvectors. */
11710 unsigned HOST_WIDE_INT k
= 1;
11711 if (CONSTRUCTOR_NELTS (arg0
) != 0)
11713 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
11714 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
11715 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
11718 /* We keep an exact subset of the constructor elements. */
11719 if ((idx
% k
) == 0 && (n
% k
) == 0)
11721 if (CONSTRUCTOR_NELTS (arg0
) == 0)
11722 return build_constructor (type
, NULL
);
11727 if (idx
< CONSTRUCTOR_NELTS (arg0
))
11728 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
11729 return build_zero_cst (type
);
11732 vec
<constructor_elt
, va_gc
> *vals
;
11733 vec_alloc (vals
, n
);
11734 for (unsigned i
= 0;
11735 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
11737 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
11739 (arg0
, idx
+ i
)->value
);
11740 return build_constructor (type
, vals
);
11742 /* The bitfield references a single constructor element. */
11743 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
11745 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
11746 return build_zero_cst (type
);
11748 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
11750 return fold_build3_loc (loc
, code
, type
,
11751 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
11752 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
11757 /* A bit-field-ref that referenced the full argument can be stripped. */
11758 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11759 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_to_uhwi (arg1
)
11760 && integer_zerop (op2
))
11761 return fold_convert_loc (loc
, type
, arg0
);
11763 /* On constants we can use native encode/interpret to constant
11764 fold (nearly) all BIT_FIELD_REFs. */
11765 if (CONSTANT_CLASS_P (arg0
)
11766 && can_native_interpret_type_p (type
)
11767 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)))
11768 /* This limitation should not be necessary, we just need to
11769 round this up to mode size. */
11770 && tree_to_uhwi (op1
) % BITS_PER_UNIT
== 0
11771 /* Need bit-shifting of the buffer to relax the following. */
11772 && tree_to_uhwi (op2
) % BITS_PER_UNIT
== 0)
11774 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
11775 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
11776 unsigned HOST_WIDE_INT clen
;
11777 clen
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)));
11778 /* ??? We cannot tell native_encode_expr to start at
11779 some random byte only. So limit us to a reasonable amount
11783 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
11784 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
11786 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
11788 tree v
= native_interpret_expr (type
,
11789 b
+ bitpos
/ BITS_PER_UNIT
,
11790 bitsize
/ BITS_PER_UNIT
);
11800 /* For integers we can decompose the FMA if possible. */
11801 if (TREE_CODE (arg0
) == INTEGER_CST
11802 && TREE_CODE (arg1
) == INTEGER_CST
)
11803 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
11804 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
11805 if (integer_zerop (arg2
))
11806 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11808 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
11810 case VEC_PERM_EXPR
:
11811 if (TREE_CODE (arg2
) == VECTOR_CST
)
11813 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
, mask2
;
11814 unsigned char *sel
= XALLOCAVEC (unsigned char, 2 * nelts
);
11815 unsigned char *sel2
= sel
+ nelts
;
11816 bool need_mask_canon
= false;
11817 bool need_mask_canon2
= false;
11818 bool all_in_vec0
= true;
11819 bool all_in_vec1
= true;
11820 bool maybe_identity
= true;
11821 bool single_arg
= (op0
== op1
);
11822 bool changed
= false;
11824 mask2
= 2 * nelts
- 1;
11825 mask
= single_arg
? (nelts
- 1) : mask2
;
11826 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
11827 for (i
= 0; i
< nelts
; i
++)
11829 tree val
= VECTOR_CST_ELT (arg2
, i
);
11830 if (TREE_CODE (val
) != INTEGER_CST
)
11833 /* Make sure that the perm value is in an acceptable
11836 need_mask_canon
|= wi::gtu_p (t
, mask
);
11837 need_mask_canon2
|= wi::gtu_p (t
, mask2
);
11838 sel
[i
] = t
.to_uhwi () & mask
;
11839 sel2
[i
] = t
.to_uhwi () & mask2
;
11841 if (sel
[i
] < nelts
)
11842 all_in_vec1
= false;
11844 all_in_vec0
= false;
11846 if ((sel
[i
] & (nelts
-1)) != i
)
11847 maybe_identity
= false;
11850 if (maybe_identity
)
11860 else if (all_in_vec1
)
11863 for (i
= 0; i
< nelts
; i
++)
11865 need_mask_canon
= true;
11868 if ((TREE_CODE (op0
) == VECTOR_CST
11869 || TREE_CODE (op0
) == CONSTRUCTOR
)
11870 && (TREE_CODE (op1
) == VECTOR_CST
11871 || TREE_CODE (op1
) == CONSTRUCTOR
))
11873 tree t
= fold_vec_perm (type
, op0
, op1
, sel
);
11874 if (t
!= NULL_TREE
)
11878 if (op0
== op1
&& !single_arg
)
11881 /* Some targets are deficient and fail to expand a single
11882 argument permutation while still allowing an equivalent
11883 2-argument version. */
11884 if (need_mask_canon
&& arg2
== op2
11885 && !can_vec_perm_p (TYPE_MODE (type
), false, sel
)
11886 && can_vec_perm_p (TYPE_MODE (type
), false, sel2
))
11888 need_mask_canon
= need_mask_canon2
;
11892 if (need_mask_canon
&& arg2
== op2
)
11894 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
11895 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
11896 for (i
= 0; i
< nelts
; i
++)
11897 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
11898 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
11903 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
11909 } /* switch (code) */
11912 /* Perform constant folding and related simplification of EXPR.
11913 The related simplifications include x*1 => x, x*0 => 0, etc.,
11914 and application of the associative law.
11915 NOP_EXPR conversions may be removed freely (as long as we
11916 are careful not to change the type of the overall expression).
11917 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11918 but we can constant-fold them if they have constant operands. */
11920 #ifdef ENABLE_FOLD_CHECKING
11921 # define fold(x) fold_1 (x)
11922 static tree
fold_1 (tree
);
11928 const tree t
= expr
;
11929 enum tree_code code
= TREE_CODE (t
);
11930 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
11932 location_t loc
= EXPR_LOCATION (expr
);
11934 /* Return right away if a constant. */
11935 if (kind
== tcc_constant
)
11938 /* CALL_EXPR-like objects with variable numbers of operands are
11939 treated specially. */
11940 if (kind
== tcc_vl_exp
)
11942 if (code
== CALL_EXPR
)
11944 tem
= fold_call_expr (loc
, expr
, false);
11945 return tem
? tem
: expr
;
11950 if (IS_EXPR_CODE_CLASS (kind
))
11952 tree type
= TREE_TYPE (t
);
11953 tree op0
, op1
, op2
;
11955 switch (TREE_CODE_LENGTH (code
))
11958 op0
= TREE_OPERAND (t
, 0);
11959 tem
= fold_unary_loc (loc
, code
, type
, op0
);
11960 return tem
? tem
: expr
;
11962 op0
= TREE_OPERAND (t
, 0);
11963 op1
= TREE_OPERAND (t
, 1);
11964 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
11965 return tem
? tem
: expr
;
11967 op0
= TREE_OPERAND (t
, 0);
11968 op1
= TREE_OPERAND (t
, 1);
11969 op2
= TREE_OPERAND (t
, 2);
11970 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
11971 return tem
? tem
: expr
;
11981 tree op0
= TREE_OPERAND (t
, 0);
11982 tree op1
= TREE_OPERAND (t
, 1);
11984 if (TREE_CODE (op1
) == INTEGER_CST
11985 && TREE_CODE (op0
) == CONSTRUCTOR
11986 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
11988 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (op0
);
11989 unsigned HOST_WIDE_INT end
= vec_safe_length (elts
);
11990 unsigned HOST_WIDE_INT begin
= 0;
11992 /* Find a matching index by means of a binary search. */
11993 while (begin
!= end
)
11995 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
11996 tree index
= (*elts
)[middle
].index
;
11998 if (TREE_CODE (index
) == INTEGER_CST
11999 && tree_int_cst_lt (index
, op1
))
12000 begin
= middle
+ 1;
12001 else if (TREE_CODE (index
) == INTEGER_CST
12002 && tree_int_cst_lt (op1
, index
))
12004 else if (TREE_CODE (index
) == RANGE_EXPR
12005 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
12006 begin
= middle
+ 1;
12007 else if (TREE_CODE (index
) == RANGE_EXPR
12008 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
12011 return (*elts
)[middle
].value
;
12018 /* Return a VECTOR_CST if possible. */
12021 tree type
= TREE_TYPE (t
);
12022 if (TREE_CODE (type
) != VECTOR_TYPE
)
12025 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
12026 unsigned HOST_WIDE_INT idx
, pos
= 0;
12029 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), idx
, value
)
12031 if (!CONSTANT_CLASS_P (value
))
12033 if (TREE_CODE (value
) == VECTOR_CST
)
12035 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
12036 vec
[pos
++] = VECTOR_CST_ELT (value
, i
);
12039 vec
[pos
++] = value
;
12041 for (; pos
< TYPE_VECTOR_SUBPARTS (type
); ++pos
)
12042 vec
[pos
] = build_zero_cst (TREE_TYPE (type
));
12044 return build_vector (type
, vec
);
12048 return fold (DECL_INITIAL (t
));
12052 } /* switch (code) */
12055 #ifdef ENABLE_FOLD_CHECKING
12058 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
12059 hash_table
<nofree_ptr_hash
<const tree_node
> > *);
12060 static void fold_check_failed (const_tree
, const_tree
);
12061 void print_fold_checksum (const_tree
);
12063 /* When --enable-checking=fold, compute a digest of expr before
12064 and after actual fold call to see if fold did not accidentally
12065 change original expr. */
12071 struct md5_ctx ctx
;
12072 unsigned char checksum_before
[16], checksum_after
[16];
12073 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12075 md5_init_ctx (&ctx
);
12076 fold_checksum_tree (expr
, &ctx
, &ht
);
12077 md5_finish_ctx (&ctx
, checksum_before
);
12080 ret
= fold_1 (expr
);
12082 md5_init_ctx (&ctx
);
12083 fold_checksum_tree (expr
, &ctx
, &ht
);
12084 md5_finish_ctx (&ctx
, checksum_after
);
12086 if (memcmp (checksum_before
, checksum_after
, 16))
12087 fold_check_failed (expr
, ret
);
12093 print_fold_checksum (const_tree expr
)
12095 struct md5_ctx ctx
;
12096 unsigned char checksum
[16], cnt
;
12097 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12099 md5_init_ctx (&ctx
);
12100 fold_checksum_tree (expr
, &ctx
, &ht
);
12101 md5_finish_ctx (&ctx
, checksum
);
12102 for (cnt
= 0; cnt
< 16; ++cnt
)
12103 fprintf (stderr
, "%02x", checksum
[cnt
]);
12104 putc ('\n', stderr
);
12108 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
12110 internal_error ("fold check: original tree changed by fold");
12114 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
12115 hash_table
<nofree_ptr_hash
<const tree_node
> > *ht
)
12117 const tree_node
**slot
;
12118 enum tree_code code
;
12119 union tree_node buf
;
12125 slot
= ht
->find_slot (expr
, INSERT
);
12129 code
= TREE_CODE (expr
);
12130 if (TREE_CODE_CLASS (code
) == tcc_declaration
12131 && HAS_DECL_ASSEMBLER_NAME_P (expr
))
12133 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12134 memcpy ((char *) &buf
, expr
, tree_size (expr
));
12135 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
12136 buf
.decl_with_vis
.symtab_node
= NULL
;
12137 expr
= (tree
) &buf
;
12139 else if (TREE_CODE_CLASS (code
) == tcc_type
12140 && (TYPE_POINTER_TO (expr
)
12141 || TYPE_REFERENCE_TO (expr
)
12142 || TYPE_CACHED_VALUES_P (expr
)
12143 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
12144 || TYPE_NEXT_VARIANT (expr
)))
12146 /* Allow these fields to be modified. */
12148 memcpy ((char *) &buf
, expr
, tree_size (expr
));
12149 expr
= tmp
= (tree
) &buf
;
12150 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
12151 TYPE_POINTER_TO (tmp
) = NULL
;
12152 TYPE_REFERENCE_TO (tmp
) = NULL
;
12153 TYPE_NEXT_VARIANT (tmp
) = NULL
;
12154 if (TYPE_CACHED_VALUES_P (tmp
))
12156 TYPE_CACHED_VALUES_P (tmp
) = 0;
12157 TYPE_CACHED_VALUES (tmp
) = NULL
;
12160 md5_process_bytes (expr
, tree_size (expr
), ctx
);
12161 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
12162 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
12163 if (TREE_CODE_CLASS (code
) != tcc_type
12164 && TREE_CODE_CLASS (code
) != tcc_declaration
12165 && code
!= TREE_LIST
12166 && code
!= SSA_NAME
12167 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
12168 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
12169 switch (TREE_CODE_CLASS (code
))
12175 md5_process_bytes (TREE_STRING_POINTER (expr
),
12176 TREE_STRING_LENGTH (expr
), ctx
);
12179 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
12180 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
12183 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
12184 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
12190 case tcc_exceptional
:
12194 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
12195 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
12196 expr
= TREE_CHAIN (expr
);
12197 goto recursive_label
;
12200 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
12201 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
12207 case tcc_expression
:
12208 case tcc_reference
:
12209 case tcc_comparison
:
12212 case tcc_statement
:
12214 len
= TREE_OPERAND_LENGTH (expr
);
12215 for (i
= 0; i
< len
; ++i
)
12216 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
12218 case tcc_declaration
:
12219 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
12220 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
12221 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
12223 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
12224 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
12225 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
12226 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
12227 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
12230 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
12232 if (TREE_CODE (expr
) == FUNCTION_DECL
)
12234 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
12235 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
12237 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
12241 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
12242 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
12243 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
12244 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
12245 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
12246 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
12247 if (INTEGRAL_TYPE_P (expr
)
12248 || SCALAR_FLOAT_TYPE_P (expr
))
12250 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
12251 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
12253 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
12254 if (TREE_CODE (expr
) == RECORD_TYPE
12255 || TREE_CODE (expr
) == UNION_TYPE
12256 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
12257 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
12258 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
12265 /* Helper function for outputting the checksum of a tree T. When
12266 debugging with gdb, you can "define mynext" to be "next" followed
12267 by "call debug_fold_checksum (op0)", then just trace down till the
12270 DEBUG_FUNCTION
void
12271 debug_fold_checksum (const_tree t
)
12274 unsigned char checksum
[16];
12275 struct md5_ctx ctx
;
12276 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12278 md5_init_ctx (&ctx
);
12279 fold_checksum_tree (t
, &ctx
, &ht
);
12280 md5_finish_ctx (&ctx
, checksum
);
12283 for (i
= 0; i
< 16; i
++)
12284 fprintf (stderr
, "%d ", checksum
[i
]);
12286 fprintf (stderr
, "\n");
12291 /* Fold a unary tree expression with code CODE of type TYPE with an
12292 operand OP0. LOC is the location of the resulting expression.
12293 Return a folded expression if successful. Otherwise, return a tree
12294 expression with code CODE of type TYPE with an operand OP0. */
12297 fold_build1_stat_loc (location_t loc
,
12298 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
12301 #ifdef ENABLE_FOLD_CHECKING
12302 unsigned char checksum_before
[16], checksum_after
[16];
12303 struct md5_ctx ctx
;
12304 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12306 md5_init_ctx (&ctx
);
12307 fold_checksum_tree (op0
, &ctx
, &ht
);
12308 md5_finish_ctx (&ctx
, checksum_before
);
12312 tem
= fold_unary_loc (loc
, code
, type
, op0
);
12314 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
12316 #ifdef ENABLE_FOLD_CHECKING
12317 md5_init_ctx (&ctx
);
12318 fold_checksum_tree (op0
, &ctx
, &ht
);
12319 md5_finish_ctx (&ctx
, checksum_after
);
12321 if (memcmp (checksum_before
, checksum_after
, 16))
12322 fold_check_failed (op0
, tem
);
12327 /* Fold a binary tree expression with code CODE of type TYPE with
12328 operands OP0 and OP1. LOC is the location of the resulting
12329 expression. Return a folded expression if successful. Otherwise,
12330 return a tree expression with code CODE of type TYPE with operands
12334 fold_build2_stat_loc (location_t loc
,
12335 enum tree_code code
, tree type
, tree op0
, tree op1
12339 #ifdef ENABLE_FOLD_CHECKING
12340 unsigned char checksum_before_op0
[16],
12341 checksum_before_op1
[16],
12342 checksum_after_op0
[16],
12343 checksum_after_op1
[16];
12344 struct md5_ctx ctx
;
12345 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12347 md5_init_ctx (&ctx
);
12348 fold_checksum_tree (op0
, &ctx
, &ht
);
12349 md5_finish_ctx (&ctx
, checksum_before_op0
);
12352 md5_init_ctx (&ctx
);
12353 fold_checksum_tree (op1
, &ctx
, &ht
);
12354 md5_finish_ctx (&ctx
, checksum_before_op1
);
12358 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
12360 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
12362 #ifdef ENABLE_FOLD_CHECKING
12363 md5_init_ctx (&ctx
);
12364 fold_checksum_tree (op0
, &ctx
, &ht
);
12365 md5_finish_ctx (&ctx
, checksum_after_op0
);
12368 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
12369 fold_check_failed (op0
, tem
);
12371 md5_init_ctx (&ctx
);
12372 fold_checksum_tree (op1
, &ctx
, &ht
);
12373 md5_finish_ctx (&ctx
, checksum_after_op1
);
12375 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
12376 fold_check_failed (op1
, tem
);
12381 /* Fold a ternary tree expression with code CODE of type TYPE with
12382 operands OP0, OP1, and OP2. Return a folded expression if
12383 successful. Otherwise, return a tree expression with code CODE of
12384 type TYPE with operands OP0, OP1, and OP2. */
12387 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
12388 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
12391 #ifdef ENABLE_FOLD_CHECKING
12392 unsigned char checksum_before_op0
[16],
12393 checksum_before_op1
[16],
12394 checksum_before_op2
[16],
12395 checksum_after_op0
[16],
12396 checksum_after_op1
[16],
12397 checksum_after_op2
[16];
12398 struct md5_ctx ctx
;
12399 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12401 md5_init_ctx (&ctx
);
12402 fold_checksum_tree (op0
, &ctx
, &ht
);
12403 md5_finish_ctx (&ctx
, checksum_before_op0
);
12406 md5_init_ctx (&ctx
);
12407 fold_checksum_tree (op1
, &ctx
, &ht
);
12408 md5_finish_ctx (&ctx
, checksum_before_op1
);
12411 md5_init_ctx (&ctx
);
12412 fold_checksum_tree (op2
, &ctx
, &ht
);
12413 md5_finish_ctx (&ctx
, checksum_before_op2
);
12417 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
12418 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
12420 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
12422 #ifdef ENABLE_FOLD_CHECKING
12423 md5_init_ctx (&ctx
);
12424 fold_checksum_tree (op0
, &ctx
, &ht
);
12425 md5_finish_ctx (&ctx
, checksum_after_op0
);
12428 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
12429 fold_check_failed (op0
, tem
);
12431 md5_init_ctx (&ctx
);
12432 fold_checksum_tree (op1
, &ctx
, &ht
);
12433 md5_finish_ctx (&ctx
, checksum_after_op1
);
12436 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
12437 fold_check_failed (op1
, tem
);
12439 md5_init_ctx (&ctx
);
12440 fold_checksum_tree (op2
, &ctx
, &ht
);
12441 md5_finish_ctx (&ctx
, checksum_after_op2
);
12443 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
12444 fold_check_failed (op2
, tem
);
12449 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12450 arguments in ARGARRAY, and a null static chain.
12451 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12452 of type TYPE from the given operands as constructed by build_call_array. */
12455 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
12456 int nargs
, tree
*argarray
)
12459 #ifdef ENABLE_FOLD_CHECKING
12460 unsigned char checksum_before_fn
[16],
12461 checksum_before_arglist
[16],
12462 checksum_after_fn
[16],
12463 checksum_after_arglist
[16];
12464 struct md5_ctx ctx
;
12465 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12468 md5_init_ctx (&ctx
);
12469 fold_checksum_tree (fn
, &ctx
, &ht
);
12470 md5_finish_ctx (&ctx
, checksum_before_fn
);
12473 md5_init_ctx (&ctx
);
12474 for (i
= 0; i
< nargs
; i
++)
12475 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
12476 md5_finish_ctx (&ctx
, checksum_before_arglist
);
12480 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
12482 tem
= build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
12484 #ifdef ENABLE_FOLD_CHECKING
12485 md5_init_ctx (&ctx
);
12486 fold_checksum_tree (fn
, &ctx
, &ht
);
12487 md5_finish_ctx (&ctx
, checksum_after_fn
);
12490 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
12491 fold_check_failed (fn
, tem
);
12493 md5_init_ctx (&ctx
);
12494 for (i
= 0; i
< nargs
; i
++)
12495 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
12496 md5_finish_ctx (&ctx
, checksum_after_arglist
);
12498 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
12499 fold_check_failed (NULL_TREE
, tem
);
12504 /* Perform constant folding and related simplification of initializer
12505 expression EXPR. These behave identically to "fold_buildN" but ignore
12506 potential run-time traps and exceptions that fold must preserve. */
12508 #define START_FOLD_INIT \
12509 int saved_signaling_nans = flag_signaling_nans;\
12510 int saved_trapping_math = flag_trapping_math;\
12511 int saved_rounding_math = flag_rounding_math;\
12512 int saved_trapv = flag_trapv;\
12513 int saved_folding_initializer = folding_initializer;\
12514 flag_signaling_nans = 0;\
12515 flag_trapping_math = 0;\
12516 flag_rounding_math = 0;\
12518 folding_initializer = 1;
12520 #define END_FOLD_INIT \
12521 flag_signaling_nans = saved_signaling_nans;\
12522 flag_trapping_math = saved_trapping_math;\
12523 flag_rounding_math = saved_rounding_math;\
12524 flag_trapv = saved_trapv;\
12525 folding_initializer = saved_folding_initializer;
12528 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
12529 tree type
, tree op
)
12534 result
= fold_build1_loc (loc
, code
, type
, op
);
12541 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
12542 tree type
, tree op0
, tree op1
)
12547 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
12554 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
12555 int nargs
, tree
*argarray
)
12560 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
12566 #undef START_FOLD_INIT
12567 #undef END_FOLD_INIT
12569 /* Determine if first argument is a multiple of second argument. Return 0 if
12570 it is not, or we cannot easily determined it to be.
12572 An example of the sort of thing we care about (at this point; this routine
12573 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12574 fold cases do now) is discovering that
12576 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12582 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12584 This code also handles discovering that
12586 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12588 is a multiple of 8 so we don't have to worry about dealing with a
12589 possible remainder.
12591 Note that we *look* inside a SAVE_EXPR only to determine how it was
12592 calculated; it is not safe for fold to do much of anything else with the
12593 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12594 at run time. For example, the latter example above *cannot* be implemented
12595 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12596 evaluation time of the original SAVE_EXPR is not necessarily the same at
12597 the time the new expression is evaluated. The only optimization of this
12598 sort that would be valid is changing
12600 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12604 SAVE_EXPR (I) * SAVE_EXPR (J)
12606 (where the same SAVE_EXPR (J) is used in the original and the
12607 transformed version). */
12610 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
12612 if (operand_equal_p (top
, bottom
, 0))
12615 if (TREE_CODE (type
) != INTEGER_TYPE
)
12618 switch (TREE_CODE (top
))
12621 /* Bitwise and provides a power of two multiple. If the mask is
12622 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12623 if (!integer_pow2p (bottom
))
12628 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
12629 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
12633 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
12634 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
12637 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
12641 op1
= TREE_OPERAND (top
, 1);
12642 /* const_binop may not detect overflow correctly,
12643 so check for it explicitly here. */
12644 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
12645 && 0 != (t1
= fold_convert (type
,
12646 const_binop (LSHIFT_EXPR
,
12649 && !TREE_OVERFLOW (t1
))
12650 return multiple_of_p (type
, t1
, bottom
);
12655 /* Can't handle conversions from non-integral or wider integral type. */
12656 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
12657 || (TYPE_PRECISION (type
)
12658 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
12661 /* .. fall through ... */
12664 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
12667 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
12668 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
12671 if (TREE_CODE (bottom
) != INTEGER_CST
12672 || integer_zerop (bottom
)
12673 || (TYPE_UNSIGNED (type
)
12674 && (tree_int_cst_sgn (top
) < 0
12675 || tree_int_cst_sgn (bottom
) < 0)))
12677 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
12685 #define tree_expr_nonnegative_warnv_p(X, Y) \
12686 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12688 #define RECURSE(X) \
12689 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12691 /* Return true if CODE or TYPE is known to be non-negative. */
12694 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
12696 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
12697 && truth_value_p (code
))
12698 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12699 have a signed:1 type (where the value is -1 and 0). */
12704 /* Return true if (CODE OP0) is known to be non-negative. If the return
12705 value is based on the assumption that signed overflow is undefined,
12706 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12707 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12710 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
12711 bool *strict_overflow_p
, int depth
)
12713 if (TYPE_UNSIGNED (type
))
12719 /* We can't return 1 if flag_wrapv is set because
12720 ABS_EXPR<INT_MIN> = INT_MIN. */
12721 if (!ANY_INTEGRAL_TYPE_P (type
))
12723 if (TYPE_OVERFLOW_UNDEFINED (type
))
12725 *strict_overflow_p
= true;
12730 case NON_LVALUE_EXPR
:
12732 case FIX_TRUNC_EXPR
:
12733 return RECURSE (op0
);
12737 tree inner_type
= TREE_TYPE (op0
);
12738 tree outer_type
= type
;
12740 if (TREE_CODE (outer_type
) == REAL_TYPE
)
12742 if (TREE_CODE (inner_type
) == REAL_TYPE
)
12743 return RECURSE (op0
);
12744 if (INTEGRAL_TYPE_P (inner_type
))
12746 if (TYPE_UNSIGNED (inner_type
))
12748 return RECURSE (op0
);
12751 else if (INTEGRAL_TYPE_P (outer_type
))
12753 if (TREE_CODE (inner_type
) == REAL_TYPE
)
12754 return RECURSE (op0
);
12755 if (INTEGRAL_TYPE_P (inner_type
))
12756 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
12757 && TYPE_UNSIGNED (inner_type
);
12763 return tree_simple_nonnegative_warnv_p (code
, type
);
12766 /* We don't know sign of `t', so be conservative and return false. */
12770 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12771 value is based on the assumption that signed overflow is undefined,
12772 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12773 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12776 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
12777 tree op1
, bool *strict_overflow_p
,
12780 if (TYPE_UNSIGNED (type
))
12785 case POINTER_PLUS_EXPR
:
12787 if (FLOAT_TYPE_P (type
))
12788 return RECURSE (op0
) && RECURSE (op1
);
12790 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12791 both unsigned and at least 2 bits shorter than the result. */
12792 if (TREE_CODE (type
) == INTEGER_TYPE
12793 && TREE_CODE (op0
) == NOP_EXPR
12794 && TREE_CODE (op1
) == NOP_EXPR
)
12796 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
12797 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
12798 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
12799 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
12801 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
12802 TYPE_PRECISION (inner2
)) + 1;
12803 return prec
< TYPE_PRECISION (type
);
12809 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12811 /* x * x is always non-negative for floating point x
12812 or without overflow. */
12813 if (operand_equal_p (op0
, op1
, 0)
12814 || (RECURSE (op0
) && RECURSE (op1
)))
12816 if (ANY_INTEGRAL_TYPE_P (type
)
12817 && TYPE_OVERFLOW_UNDEFINED (type
))
12818 *strict_overflow_p
= true;
12823 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12824 both unsigned and their total bits is shorter than the result. */
12825 if (TREE_CODE (type
) == INTEGER_TYPE
12826 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
12827 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
12829 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
12830 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
12832 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
12833 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
12836 bool unsigned0
= TYPE_UNSIGNED (inner0
);
12837 bool unsigned1
= TYPE_UNSIGNED (inner1
);
12839 if (TREE_CODE (op0
) == INTEGER_CST
)
12840 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
12842 if (TREE_CODE (op1
) == INTEGER_CST
)
12843 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
12845 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
12846 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
12848 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
12849 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
12850 : TYPE_PRECISION (inner0
);
12852 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
12853 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
12854 : TYPE_PRECISION (inner1
);
12856 return precision0
+ precision1
< TYPE_PRECISION (type
);
12863 return RECURSE (op0
) || RECURSE (op1
);
12869 case TRUNC_DIV_EXPR
:
12870 case CEIL_DIV_EXPR
:
12871 case FLOOR_DIV_EXPR
:
12872 case ROUND_DIV_EXPR
:
12873 return RECURSE (op0
) && RECURSE (op1
);
12875 case TRUNC_MOD_EXPR
:
12876 return RECURSE (op0
);
12878 case FLOOR_MOD_EXPR
:
12879 return RECURSE (op1
);
12881 case CEIL_MOD_EXPR
:
12882 case ROUND_MOD_EXPR
:
12884 return tree_simple_nonnegative_warnv_p (code
, type
);
12887 /* We don't know sign of `t', so be conservative and return false. */
12891 /* Return true if T is known to be non-negative. If the return
12892 value is based on the assumption that signed overflow is undefined,
12893 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12894 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12897 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
12899 if (TREE_CODE (t
) == SSA_NAME
12900 && name_registered_for_update_p (t
))
12903 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
12906 switch (TREE_CODE (t
))
12909 return tree_int_cst_sgn (t
) >= 0;
12912 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
12915 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
12918 return RECURSE (TREE_OPERAND (t
, 1)) && RECURSE (TREE_OPERAND (t
, 2));
12921 /* Limit the depth of recursion to avoid quadratic behavior.
12922 This is expected to catch almost all occurrences in practice.
12923 If this code misses important cases that unbounded recursion
12924 would not, passes that need this information could be revised
12925 to provide it through dataflow propagation. */
12926 if (depth
< PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH
))
12927 return gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t
),
12928 strict_overflow_p
, depth
);
12932 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
), TREE_TYPE (t
));
12936 /* Return true if T is known to be non-negative. If the return
12937 value is based on the assumption that signed overflow is undefined,
12938 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12939 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12942 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
, tree arg0
, tree arg1
,
12943 bool *strict_overflow_p
, int depth
)
12945 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
12946 switch (DECL_FUNCTION_CODE (fndecl
))
12948 CASE_FLT_FN (BUILT_IN_ACOS
):
12949 CASE_FLT_FN (BUILT_IN_ACOSH
):
12950 CASE_FLT_FN (BUILT_IN_CABS
):
12951 CASE_FLT_FN (BUILT_IN_COSH
):
12952 CASE_FLT_FN (BUILT_IN_ERFC
):
12953 CASE_FLT_FN (BUILT_IN_EXP
):
12954 CASE_FLT_FN (BUILT_IN_EXP10
):
12955 CASE_FLT_FN (BUILT_IN_EXP2
):
12956 CASE_FLT_FN (BUILT_IN_FABS
):
12957 CASE_FLT_FN (BUILT_IN_FDIM
):
12958 CASE_FLT_FN (BUILT_IN_HYPOT
):
12959 CASE_FLT_FN (BUILT_IN_POW10
):
12960 CASE_INT_FN (BUILT_IN_FFS
):
12961 CASE_INT_FN (BUILT_IN_PARITY
):
12962 CASE_INT_FN (BUILT_IN_POPCOUNT
):
12963 CASE_INT_FN (BUILT_IN_CLZ
):
12964 CASE_INT_FN (BUILT_IN_CLRSB
):
12965 case BUILT_IN_BSWAP32
:
12966 case BUILT_IN_BSWAP64
:
12970 CASE_FLT_FN (BUILT_IN_SQRT
):
12971 /* sqrt(-0.0) is -0.0. */
12972 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
12974 return RECURSE (arg0
);
12976 CASE_FLT_FN (BUILT_IN_ASINH
):
12977 CASE_FLT_FN (BUILT_IN_ATAN
):
12978 CASE_FLT_FN (BUILT_IN_ATANH
):
12979 CASE_FLT_FN (BUILT_IN_CBRT
):
12980 CASE_FLT_FN (BUILT_IN_CEIL
):
12981 CASE_FLT_FN (BUILT_IN_ERF
):
12982 CASE_FLT_FN (BUILT_IN_EXPM1
):
12983 CASE_FLT_FN (BUILT_IN_FLOOR
):
12984 CASE_FLT_FN (BUILT_IN_FMOD
):
12985 CASE_FLT_FN (BUILT_IN_FREXP
):
12986 CASE_FLT_FN (BUILT_IN_ICEIL
):
12987 CASE_FLT_FN (BUILT_IN_IFLOOR
):
12988 CASE_FLT_FN (BUILT_IN_IRINT
):
12989 CASE_FLT_FN (BUILT_IN_IROUND
):
12990 CASE_FLT_FN (BUILT_IN_LCEIL
):
12991 CASE_FLT_FN (BUILT_IN_LDEXP
):
12992 CASE_FLT_FN (BUILT_IN_LFLOOR
):
12993 CASE_FLT_FN (BUILT_IN_LLCEIL
):
12994 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
12995 CASE_FLT_FN (BUILT_IN_LLRINT
):
12996 CASE_FLT_FN (BUILT_IN_LLROUND
):
12997 CASE_FLT_FN (BUILT_IN_LRINT
):
12998 CASE_FLT_FN (BUILT_IN_LROUND
):
12999 CASE_FLT_FN (BUILT_IN_MODF
):
13000 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
13001 CASE_FLT_FN (BUILT_IN_RINT
):
13002 CASE_FLT_FN (BUILT_IN_ROUND
):
13003 CASE_FLT_FN (BUILT_IN_SCALB
):
13004 CASE_FLT_FN (BUILT_IN_SCALBLN
):
13005 CASE_FLT_FN (BUILT_IN_SCALBN
):
13006 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
13007 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
13008 CASE_FLT_FN (BUILT_IN_SINH
):
13009 CASE_FLT_FN (BUILT_IN_TANH
):
13010 CASE_FLT_FN (BUILT_IN_TRUNC
):
13011 /* True if the 1st argument is nonnegative. */
13012 return RECURSE (arg0
);
13014 CASE_FLT_FN (BUILT_IN_FMAX
):
13015 /* True if the 1st OR 2nd arguments are nonnegative. */
13016 return RECURSE (arg0
) || RECURSE (arg1
);
13018 CASE_FLT_FN (BUILT_IN_FMIN
):
13019 /* True if the 1st AND 2nd arguments are nonnegative. */
13020 return RECURSE (arg0
) && RECURSE (arg1
);
13022 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
13023 /* True if the 2nd argument is nonnegative. */
13024 return RECURSE (arg1
);
13026 CASE_FLT_FN (BUILT_IN_POWI
):
13027 /* True if the 1st argument is nonnegative or the second
13028 argument is an even integer. */
13029 if (TREE_CODE (arg1
) == INTEGER_CST
13030 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
13032 return RECURSE (arg0
);
13034 CASE_FLT_FN (BUILT_IN_POW
):
13035 /* True if the 1st argument is nonnegative or the second
13036 argument is an even integer valued real. */
13037 if (TREE_CODE (arg1
) == REAL_CST
)
13042 c
= TREE_REAL_CST (arg1
);
13043 n
= real_to_integer (&c
);
13046 REAL_VALUE_TYPE cint
;
13047 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
13048 if (real_identical (&c
, &cint
))
13052 return RECURSE (arg0
);
13057 return tree_simple_nonnegative_warnv_p (CALL_EXPR
, type
);
13060 /* Return true if T is known to be non-negative. If the return
13061 value is based on the assumption that signed overflow is undefined,
13062 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13063 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13066 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
13068 enum tree_code code
= TREE_CODE (t
);
13069 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
13076 tree temp
= TARGET_EXPR_SLOT (t
);
13077 t
= TARGET_EXPR_INITIAL (t
);
13079 /* If the initializer is non-void, then it's a normal expression
13080 that will be assigned to the slot. */
13081 if (!VOID_TYPE_P (t
))
13082 return RECURSE (t
);
13084 /* Otherwise, the initializer sets the slot in some way. One common
13085 way is an assignment statement at the end of the initializer. */
13088 if (TREE_CODE (t
) == BIND_EXPR
)
13089 t
= expr_last (BIND_EXPR_BODY (t
));
13090 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
13091 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
13092 t
= expr_last (TREE_OPERAND (t
, 0));
13093 else if (TREE_CODE (t
) == STATEMENT_LIST
)
13098 if (TREE_CODE (t
) == MODIFY_EXPR
13099 && TREE_OPERAND (t
, 0) == temp
)
13100 return RECURSE (TREE_OPERAND (t
, 1));
13107 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
13108 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
13110 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
13111 get_callee_fndecl (t
),
13114 strict_overflow_p
, depth
);
13116 case COMPOUND_EXPR
:
13118 return RECURSE (TREE_OPERAND (t
, 1));
13121 return RECURSE (expr_last (TREE_OPERAND (t
, 1)));
13124 return RECURSE (TREE_OPERAND (t
, 0));
13127 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
), TREE_TYPE (t
));
13132 #undef tree_expr_nonnegative_warnv_p
13134 /* Return true if T is known to be non-negative. If the return
13135 value is based on the assumption that signed overflow is undefined,
13136 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13137 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13140 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
13142 enum tree_code code
;
13143 if (t
== error_mark_node
)
13146 code
= TREE_CODE (t
);
13147 switch (TREE_CODE_CLASS (code
))
13150 case tcc_comparison
:
13151 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
13153 TREE_OPERAND (t
, 0),
13154 TREE_OPERAND (t
, 1),
13155 strict_overflow_p
, depth
);
13158 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
13160 TREE_OPERAND (t
, 0),
13161 strict_overflow_p
, depth
);
13164 case tcc_declaration
:
13165 case tcc_reference
:
13166 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
13174 case TRUTH_AND_EXPR
:
13175 case TRUTH_OR_EXPR
:
13176 case TRUTH_XOR_EXPR
:
13177 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
13179 TREE_OPERAND (t
, 0),
13180 TREE_OPERAND (t
, 1),
13181 strict_overflow_p
, depth
);
13182 case TRUTH_NOT_EXPR
:
13183 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
13185 TREE_OPERAND (t
, 0),
13186 strict_overflow_p
, depth
);
13193 case WITH_SIZE_EXPR
:
13195 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
13198 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
13202 /* Return true if `t' is known to be non-negative. Handle warnings
13203 about undefined signed overflow. */
13206 tree_expr_nonnegative_p (tree t
)
13208 bool ret
, strict_overflow_p
;
13210 strict_overflow_p
= false;
13211 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
13212 if (strict_overflow_p
)
13213 fold_overflow_warning (("assuming signed overflow does not occur when "
13214 "determining that expression is always "
13216 WARN_STRICT_OVERFLOW_MISC
);
13221 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13222 For floating point we further ensure that T is not denormal.
13223 Similar logic is present in nonzero_address in rtlanal.h.
13225 If the return value is based on the assumption that signed overflow
13226 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13227 change *STRICT_OVERFLOW_P. */
13230 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
13231 bool *strict_overflow_p
)
13236 return tree_expr_nonzero_warnv_p (op0
,
13237 strict_overflow_p
);
13241 tree inner_type
= TREE_TYPE (op0
);
13242 tree outer_type
= type
;
13244 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
13245 && tree_expr_nonzero_warnv_p (op0
,
13246 strict_overflow_p
));
13250 case NON_LVALUE_EXPR
:
13251 return tree_expr_nonzero_warnv_p (op0
,
13252 strict_overflow_p
);
13261 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13262 For floating point we further ensure that T is not denormal.
13263 Similar logic is present in nonzero_address in rtlanal.h.
13265 If the return value is based on the assumption that signed overflow
13266 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13267 change *STRICT_OVERFLOW_P. */
13270 tree_binary_nonzero_warnv_p (enum tree_code code
,
13273 tree op1
, bool *strict_overflow_p
)
13275 bool sub_strict_overflow_p
;
13278 case POINTER_PLUS_EXPR
:
13280 if (ANY_INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_UNDEFINED (type
))
13282 /* With the presence of negative values it is hard
13283 to say something. */
13284 sub_strict_overflow_p
= false;
13285 if (!tree_expr_nonnegative_warnv_p (op0
,
13286 &sub_strict_overflow_p
)
13287 || !tree_expr_nonnegative_warnv_p (op1
,
13288 &sub_strict_overflow_p
))
13290 /* One of operands must be positive and the other non-negative. */
13291 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13292 overflows, on a twos-complement machine the sum of two
13293 nonnegative numbers can never be zero. */
13294 return (tree_expr_nonzero_warnv_p (op0
,
13296 || tree_expr_nonzero_warnv_p (op1
,
13297 strict_overflow_p
));
13302 if (TYPE_OVERFLOW_UNDEFINED (type
))
13304 if (tree_expr_nonzero_warnv_p (op0
,
13306 && tree_expr_nonzero_warnv_p (op1
,
13307 strict_overflow_p
))
13309 *strict_overflow_p
= true;
13316 sub_strict_overflow_p
= false;
13317 if (tree_expr_nonzero_warnv_p (op0
,
13318 &sub_strict_overflow_p
)
13319 && tree_expr_nonzero_warnv_p (op1
,
13320 &sub_strict_overflow_p
))
13322 if (sub_strict_overflow_p
)
13323 *strict_overflow_p
= true;
13328 sub_strict_overflow_p
= false;
13329 if (tree_expr_nonzero_warnv_p (op0
,
13330 &sub_strict_overflow_p
))
13332 if (sub_strict_overflow_p
)
13333 *strict_overflow_p
= true;
13335 /* When both operands are nonzero, then MAX must be too. */
13336 if (tree_expr_nonzero_warnv_p (op1
,
13337 strict_overflow_p
))
13340 /* MAX where operand 0 is positive is positive. */
13341 return tree_expr_nonnegative_warnv_p (op0
,
13342 strict_overflow_p
);
13344 /* MAX where operand 1 is positive is positive. */
13345 else if (tree_expr_nonzero_warnv_p (op1
,
13346 &sub_strict_overflow_p
)
13347 && tree_expr_nonnegative_warnv_p (op1
,
13348 &sub_strict_overflow_p
))
13350 if (sub_strict_overflow_p
)
13351 *strict_overflow_p
= true;
13357 return (tree_expr_nonzero_warnv_p (op1
,
13359 || tree_expr_nonzero_warnv_p (op0
,
13360 strict_overflow_p
));
13369 /* Return true when T is an address and is known to be nonzero.
13370 For floating point we further ensure that T is not denormal.
13371 Similar logic is present in nonzero_address in rtlanal.h.
13373 If the return value is based on the assumption that signed overflow
13374 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13375 change *STRICT_OVERFLOW_P. */
13378 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
13380 bool sub_strict_overflow_p
;
13381 switch (TREE_CODE (t
))
13384 return !integer_zerop (t
);
13388 tree base
= TREE_OPERAND (t
, 0);
13390 if (!DECL_P (base
))
13391 base
= get_base_address (base
);
13396 /* For objects in symbol table check if we know they are non-zero.
13397 Don't do anything for variables and functions before symtab is built;
13398 it is quite possible that they will be declared weak later. */
13399 if (DECL_P (base
) && decl_in_symtab_p (base
))
13401 struct symtab_node
*symbol
;
13403 symbol
= symtab_node::get_create (base
);
13405 return symbol
->nonzero_address ();
13410 /* Function local objects are never NULL. */
13412 && (DECL_CONTEXT (base
)
13413 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
13414 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
))))
13417 /* Constants are never weak. */
13418 if (CONSTANT_CLASS_P (base
))
13425 sub_strict_overflow_p
= false;
13426 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
13427 &sub_strict_overflow_p
)
13428 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
13429 &sub_strict_overflow_p
))
13431 if (sub_strict_overflow_p
)
13432 *strict_overflow_p
= true;
13443 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13444 attempt to fold the expression to a constant without modifying TYPE,
13447 If the expression could be simplified to a constant, then return
13448 the constant. If the expression would not be simplified to a
13449 constant, then return NULL_TREE. */
13452 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
13454 tree tem
= fold_binary (code
, type
, op0
, op1
);
13455 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
13458 /* Given the components of a unary expression CODE, TYPE and OP0,
13459 attempt to fold the expression to a constant without modifying
13462 If the expression could be simplified to a constant, then return
13463 the constant. If the expression would not be simplified to a
13464 constant, then return NULL_TREE. */
13467 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
13469 tree tem
= fold_unary (code
, type
, op0
);
13470 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
13473 /* If EXP represents referencing an element in a constant string
13474 (either via pointer arithmetic or array indexing), return the
13475 tree representing the value accessed, otherwise return NULL. */
13478 fold_read_from_constant_string (tree exp
)
13480 if ((TREE_CODE (exp
) == INDIRECT_REF
13481 || TREE_CODE (exp
) == ARRAY_REF
)
13482 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
13484 tree exp1
= TREE_OPERAND (exp
, 0);
13487 location_t loc
= EXPR_LOCATION (exp
);
13489 if (TREE_CODE (exp
) == INDIRECT_REF
)
13490 string
= string_constant (exp1
, &index
);
13493 tree low_bound
= array_ref_low_bound (exp
);
13494 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
13496 /* Optimize the special-case of a zero lower bound.
13498 We convert the low_bound to sizetype to avoid some problems
13499 with constant folding. (E.g. suppose the lower bound is 1,
13500 and its mode is QI. Without the conversion,l (ARRAY
13501 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13502 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13503 if (! integer_zerop (low_bound
))
13504 index
= size_diffop_loc (loc
, index
,
13505 fold_convert_loc (loc
, sizetype
, low_bound
));
13511 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
13512 && TREE_CODE (string
) == STRING_CST
13513 && TREE_CODE (index
) == INTEGER_CST
13514 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
13515 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
13517 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
13518 return build_int_cst_type (TREE_TYPE (exp
),
13519 (TREE_STRING_POINTER (string
)
13520 [TREE_INT_CST_LOW (index
)]));
13525 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13526 an integer constant, real, or fixed-point constant.
13528 TYPE is the type of the result. */
13531 fold_negate_const (tree arg0
, tree type
)
13533 tree t
= NULL_TREE
;
13535 switch (TREE_CODE (arg0
))
13540 wide_int val
= wi::neg (arg0
, &overflow
);
13541 t
= force_fit_type (type
, val
, 1,
13542 (overflow
| TREE_OVERFLOW (arg0
))
13543 && !TYPE_UNSIGNED (type
));
13548 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
13553 FIXED_VALUE_TYPE f
;
13554 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
13555 &(TREE_FIXED_CST (arg0
)), NULL
,
13556 TYPE_SATURATING (type
));
13557 t
= build_fixed (type
, f
);
13558 /* Propagate overflow flags. */
13559 if (overflow_p
| TREE_OVERFLOW (arg0
))
13560 TREE_OVERFLOW (t
) = 1;
13565 gcc_unreachable ();
13571 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13572 an integer constant or real constant.
13574 TYPE is the type of the result. */
13577 fold_abs_const (tree arg0
, tree type
)
13579 tree t
= NULL_TREE
;
13581 switch (TREE_CODE (arg0
))
13585 /* If the value is unsigned or non-negative, then the absolute value
13586 is the same as the ordinary value. */
13587 if (!wi::neg_p (arg0
, TYPE_SIGN (type
)))
13590 /* If the value is negative, then the absolute value is
13595 wide_int val
= wi::neg (arg0
, &overflow
);
13596 t
= force_fit_type (type
, val
, -1,
13597 overflow
| TREE_OVERFLOW (arg0
));
13603 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
13604 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
13610 gcc_unreachable ();
13616 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13617 constant. TYPE is the type of the result. */
13620 fold_not_const (const_tree arg0
, tree type
)
13622 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
13624 return force_fit_type (type
, wi::bit_not (arg0
), 0, TREE_OVERFLOW (arg0
));
13627 /* Given CODE, a relational operator, the target type, TYPE and two
13628 constant operands OP0 and OP1, return the result of the
13629 relational operation. If the result is not a compile time
13630 constant, then return NULL_TREE. */
13633 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
13635 int result
, invert
;
13637 /* From here on, the only cases we handle are when the result is
13638 known to be a constant. */
13640 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
13642 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
13643 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
13645 /* Handle the cases where either operand is a NaN. */
13646 if (real_isnan (c0
) || real_isnan (c1
))
13656 case UNORDERED_EXPR
:
13670 if (flag_trapping_math
)
13676 gcc_unreachable ();
13679 return constant_boolean_node (result
, type
);
13682 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
13685 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
13687 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
13688 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
13689 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
13692 /* Handle equality/inequality of complex constants. */
13693 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
13695 tree rcond
= fold_relational_const (code
, type
,
13696 TREE_REALPART (op0
),
13697 TREE_REALPART (op1
));
13698 tree icond
= fold_relational_const (code
, type
,
13699 TREE_IMAGPART (op0
),
13700 TREE_IMAGPART (op1
));
13701 if (code
== EQ_EXPR
)
13702 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
13703 else if (code
== NE_EXPR
)
13704 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
13709 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
13711 unsigned count
= VECTOR_CST_NELTS (op0
);
13712 tree
*elts
= XALLOCAVEC (tree
, count
);
13713 gcc_assert (VECTOR_CST_NELTS (op1
) == count
13714 && TYPE_VECTOR_SUBPARTS (type
) == count
);
13716 for (unsigned i
= 0; i
< count
; i
++)
13718 tree elem_type
= TREE_TYPE (type
);
13719 tree elem0
= VECTOR_CST_ELT (op0
, i
);
13720 tree elem1
= VECTOR_CST_ELT (op1
, i
);
13722 tree tem
= fold_relational_const (code
, elem_type
,
13725 if (tem
== NULL_TREE
)
13728 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
13731 return build_vector (type
, elts
);
13734 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13736 To compute GT, swap the arguments and do LT.
13737 To compute GE, do LT and invert the result.
13738 To compute LE, swap the arguments, do LT and invert the result.
13739 To compute NE, do EQ and invert the result.
13741 Therefore, the code below must handle only EQ and LT. */
13743 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13745 std::swap (op0
, op1
);
13746 code
= swap_tree_comparison (code
);
13749 /* Note that it is safe to invert for real values here because we
13750 have already handled the one case that it matters. */
13753 if (code
== NE_EXPR
|| code
== GE_EXPR
)
13756 code
= invert_tree_comparison (code
, false);
13759 /* Compute a result for LT or EQ if args permit;
13760 Otherwise return T. */
13761 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
13763 if (code
== EQ_EXPR
)
13764 result
= tree_int_cst_equal (op0
, op1
);
13766 result
= tree_int_cst_lt (op0
, op1
);
13773 return constant_boolean_node (result
, type
);
13776 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13777 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13781 fold_build_cleanup_point_expr (tree type
, tree expr
)
13783 /* If the expression does not have side effects then we don't have to wrap
13784 it with a cleanup point expression. */
13785 if (!TREE_SIDE_EFFECTS (expr
))
13788 /* If the expression is a return, check to see if the expression inside the
13789 return has no side effects or the right hand side of the modify expression
13790 inside the return. If either don't have side effects set we don't need to
13791 wrap the expression in a cleanup point expression. Note we don't check the
13792 left hand side of the modify because it should always be a return decl. */
13793 if (TREE_CODE (expr
) == RETURN_EXPR
)
13795 tree op
= TREE_OPERAND (expr
, 0);
13796 if (!op
|| !TREE_SIDE_EFFECTS (op
))
13798 op
= TREE_OPERAND (op
, 1);
13799 if (!TREE_SIDE_EFFECTS (op
))
13803 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
13806 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13807 of an indirection through OP0, or NULL_TREE if no simplification is
13811 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
13817 subtype
= TREE_TYPE (sub
);
13818 if (!POINTER_TYPE_P (subtype
))
13821 if (TREE_CODE (sub
) == ADDR_EXPR
)
13823 tree op
= TREE_OPERAND (sub
, 0);
13824 tree optype
= TREE_TYPE (op
);
13825 /* *&CONST_DECL -> to the value of the const decl. */
13826 if (TREE_CODE (op
) == CONST_DECL
)
13827 return DECL_INITIAL (op
);
13828 /* *&p => p; make sure to handle *&"str"[cst] here. */
13829 if (type
== optype
)
13831 tree fop
= fold_read_from_constant_string (op
);
13837 /* *(foo *)&fooarray => fooarray[0] */
13838 else if (TREE_CODE (optype
) == ARRAY_TYPE
13839 && type
== TREE_TYPE (optype
)
13840 && (!in_gimple_form
13841 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
13843 tree type_domain
= TYPE_DOMAIN (optype
);
13844 tree min_val
= size_zero_node
;
13845 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
13846 min_val
= TYPE_MIN_VALUE (type_domain
);
13848 && TREE_CODE (min_val
) != INTEGER_CST
)
13850 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
13851 NULL_TREE
, NULL_TREE
);
13853 /* *(foo *)&complexfoo => __real__ complexfoo */
13854 else if (TREE_CODE (optype
) == COMPLEX_TYPE
13855 && type
== TREE_TYPE (optype
))
13856 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
13857 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13858 else if (TREE_CODE (optype
) == VECTOR_TYPE
13859 && type
== TREE_TYPE (optype
))
13861 tree part_width
= TYPE_SIZE (type
);
13862 tree index
= bitsize_int (0);
13863 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
13867 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
13868 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
13870 tree op00
= TREE_OPERAND (sub
, 0);
13871 tree op01
= TREE_OPERAND (sub
, 1);
13874 if (TREE_CODE (op00
) == ADDR_EXPR
)
13877 op00
= TREE_OPERAND (op00
, 0);
13878 op00type
= TREE_TYPE (op00
);
13880 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
13881 if (TREE_CODE (op00type
) == VECTOR_TYPE
13882 && type
== TREE_TYPE (op00type
))
13884 HOST_WIDE_INT offset
= tree_to_shwi (op01
);
13885 tree part_width
= TYPE_SIZE (type
);
13886 unsigned HOST_WIDE_INT part_widthi
= tree_to_shwi (part_width
)/BITS_PER_UNIT
;
13887 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
13888 tree index
= bitsize_int (indexi
);
13890 if (offset
/ part_widthi
< TYPE_VECTOR_SUBPARTS (op00type
))
13891 return fold_build3_loc (loc
,
13892 BIT_FIELD_REF
, type
, op00
,
13893 part_width
, index
);
13896 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13897 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
13898 && type
== TREE_TYPE (op00type
))
13900 tree size
= TYPE_SIZE_UNIT (type
);
13901 if (tree_int_cst_equal (size
, op01
))
13902 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
13904 /* ((foo *)&fooarray)[1] => fooarray[1] */
13905 else if (TREE_CODE (op00type
) == ARRAY_TYPE
13906 && type
== TREE_TYPE (op00type
))
13908 tree type_domain
= TYPE_DOMAIN (op00type
);
13909 tree min_val
= size_zero_node
;
13910 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
13911 min_val
= TYPE_MIN_VALUE (type_domain
);
13912 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
13913 TYPE_SIZE_UNIT (type
));
13914 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
13915 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
13916 NULL_TREE
, NULL_TREE
);
13921 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13922 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
13923 && type
== TREE_TYPE (TREE_TYPE (subtype
))
13924 && (!in_gimple_form
13925 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
13928 tree min_val
= size_zero_node
;
13929 sub
= build_fold_indirect_ref_loc (loc
, sub
);
13930 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
13931 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
13932 min_val
= TYPE_MIN_VALUE (type_domain
);
13934 && TREE_CODE (min_val
) != INTEGER_CST
)
13936 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
13943 /* Builds an expression for an indirection through T, simplifying some
13947 build_fold_indirect_ref_loc (location_t loc
, tree t
)
13949 tree type
= TREE_TYPE (TREE_TYPE (t
));
13950 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
13955 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
13958 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13961 fold_indirect_ref_loc (location_t loc
, tree t
)
13963 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
13971 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13972 whose result is ignored. The type of the returned tree need not be
13973 the same as the original expression. */
13976 fold_ignored_result (tree t
)
13978 if (!TREE_SIDE_EFFECTS (t
))
13979 return integer_zero_node
;
13982 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
13985 t
= TREE_OPERAND (t
, 0);
13989 case tcc_comparison
:
13990 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
13991 t
= TREE_OPERAND (t
, 0);
13992 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
13993 t
= TREE_OPERAND (t
, 1);
13998 case tcc_expression
:
13999 switch (TREE_CODE (t
))
14001 case COMPOUND_EXPR
:
14002 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
14004 t
= TREE_OPERAND (t
, 0);
14008 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
14009 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
14011 t
= TREE_OPERAND (t
, 0);
14024 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14027 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
14029 tree div
= NULL_TREE
;
14034 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14035 have to do anything. Only do this when we are not given a const,
14036 because in that case, this check is more expensive than just
14038 if (TREE_CODE (value
) != INTEGER_CST
)
14040 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14042 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
14046 /* If divisor is a power of two, simplify this to bit manipulation. */
14047 if (divisor
== (divisor
& -divisor
))
14049 if (TREE_CODE (value
) == INTEGER_CST
)
14051 wide_int val
= value
;
14054 if ((val
& (divisor
- 1)) == 0)
14057 overflow_p
= TREE_OVERFLOW (value
);
14058 val
+= divisor
- 1;
14059 val
&= - (int) divisor
;
14063 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
14069 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
14070 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
14071 t
= build_int_cst (TREE_TYPE (value
), - (int) divisor
);
14072 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
14078 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14079 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
14080 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
14086 /* Likewise, but round down. */
14089 round_down_loc (location_t loc
, tree value
, int divisor
)
14091 tree div
= NULL_TREE
;
14093 gcc_assert (divisor
> 0);
14097 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14098 have to do anything. Only do this when we are not given a const,
14099 because in that case, this check is more expensive than just
14101 if (TREE_CODE (value
) != INTEGER_CST
)
14103 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14105 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
14109 /* If divisor is a power of two, simplify this to bit manipulation. */
14110 if (divisor
== (divisor
& -divisor
))
14114 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
14115 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
14120 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14121 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
14122 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
14128 /* Returns the pointer to the base of the object addressed by EXP and
14129 extracts the information about the offset of the access, storing it
14130 to PBITPOS and POFFSET. */
14133 split_address_to_core_and_offset (tree exp
,
14134 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
14138 int unsignedp
, volatilep
;
14139 HOST_WIDE_INT bitsize
;
14140 location_t loc
= EXPR_LOCATION (exp
);
14142 if (TREE_CODE (exp
) == ADDR_EXPR
)
14144 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
14145 poffset
, &mode
, &unsignedp
, &volatilep
,
14147 core
= build_fold_addr_expr_loc (loc
, core
);
14153 *poffset
= NULL_TREE
;
14159 /* Returns true if addresses of E1 and E2 differ by a constant, false
14160 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14163 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
14166 HOST_WIDE_INT bitpos1
, bitpos2
;
14167 tree toffset1
, toffset2
, tdiff
, type
;
14169 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
14170 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
14172 if (bitpos1
% BITS_PER_UNIT
!= 0
14173 || bitpos2
% BITS_PER_UNIT
!= 0
14174 || !operand_equal_p (core1
, core2
, 0))
14177 if (toffset1
&& toffset2
)
14179 type
= TREE_TYPE (toffset1
);
14180 if (type
!= TREE_TYPE (toffset2
))
14181 toffset2
= fold_convert (type
, toffset2
);
14183 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
14184 if (!cst_and_fits_in_hwi (tdiff
))
14187 *diff
= int_cst_value (tdiff
);
14189 else if (toffset1
|| toffset2
)
14191 /* If only one of the offsets is non-constant, the difference cannot
14198 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
14202 /* Return OFF converted to a pointer offset type suitable as offset for
14203 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14205 convert_to_ptrofftype_loc (location_t loc
, tree off
)
14207 return fold_convert_loc (loc
, sizetype
, off
);
14210 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14212 fold_build_pointer_plus_loc (location_t loc
, tree ptr
, tree off
)
14214 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
14215 ptr
, convert_to_ptrofftype_loc (loc
, off
));
14218 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14220 fold_build_pointer_plus_hwi_loc (location_t loc
, tree ptr
, HOST_WIDE_INT off
)
14222 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
14223 ptr
, size_int (off
));