]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/fold-const.c
Come up with an abstraction.
[thirdparty/gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "params.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82 #include "stringpool.h"
83 #include "attribs.h"
84 #include "tree-vector-builder.h"
85 #include "vec-perm-indices.h"
86
87 /* Nonzero if we are folding constants inside an initializer; zero
88 otherwise. */
89 int folding_initializer = 0;
90
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code {
95 COMPCODE_FALSE = 0,
96 COMPCODE_LT = 1,
97 COMPCODE_EQ = 2,
98 COMPCODE_LE = 3,
99 COMPCODE_GT = 4,
100 COMPCODE_LTGT = 5,
101 COMPCODE_GE = 6,
102 COMPCODE_ORD = 7,
103 COMPCODE_UNORD = 8,
104 COMPCODE_UNLT = 9,
105 COMPCODE_UNEQ = 10,
106 COMPCODE_UNLE = 11,
107 COMPCODE_UNGT = 12,
108 COMPCODE_NE = 13,
109 COMPCODE_UNGE = 14,
110 COMPCODE_TRUE = 15
111 };
112
113 static bool negate_expr_p (tree);
114 static tree negate_expr (tree);
115 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
116 static enum comparison_code comparison_to_compcode (enum tree_code);
117 static enum tree_code compcode_to_comparison (enum comparison_code);
118 static bool twoval_comparison_p (tree, tree *, tree *);
119 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
120 static tree optimize_bit_field_compare (location_t, enum tree_code,
121 tree, tree, tree);
122 static bool simple_operand_p (const_tree);
123 static bool simple_operand_p_2 (tree);
124 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
125 static tree range_predecessor (tree);
126 static tree range_successor (tree);
127 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
132 static tree fold_binary_op_with_conditional_arg (location_t,
133 enum tree_code, tree,
134 tree, tree,
135 tree, tree, int);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
140 static tree fold_view_convert_expr (tree, tree);
141 static tree fold_negate_expr (location_t, tree);
142
143
144 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
145 Otherwise, return LOC. */
146
147 static location_t
148 expr_location_or (tree t, location_t loc)
149 {
150 location_t tloc = EXPR_LOCATION (t);
151 return tloc == UNKNOWN_LOCATION ? loc : tloc;
152 }
153
154 /* Similar to protected_set_expr_location, but never modify x in place,
155 if location can and needs to be set, unshare it. */
156
157 static inline tree
158 protected_set_expr_location_unshare (tree x, location_t loc)
159 {
160 if (CAN_HAVE_LOCATION_P (x)
161 && EXPR_LOCATION (x) != loc
162 && !(TREE_CODE (x) == SAVE_EXPR
163 || TREE_CODE (x) == TARGET_EXPR
164 || TREE_CODE (x) == BIND_EXPR))
165 {
166 x = copy_node (x);
167 SET_EXPR_LOCATION (x, loc);
168 }
169 return x;
170 }
171 \f
172 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
173 division and returns the quotient. Otherwise returns
174 NULL_TREE. */
175
176 tree
177 div_if_zero_remainder (const_tree arg1, const_tree arg2)
178 {
179 widest_int quo;
180
181 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
182 SIGNED, &quo))
183 return wide_int_to_tree (TREE_TYPE (arg1), quo);
184
185 return NULL_TREE;
186 }
187 \f
188 /* This is nonzero if we should defer warnings about undefined
189 overflow. This facility exists because these warnings are a
190 special case. The code to estimate loop iterations does not want
191 to issue any warnings, since it works with expressions which do not
192 occur in user code. Various bits of cleanup code call fold(), but
193 only use the result if it has certain characteristics (e.g., is a
194 constant); that code only wants to issue a warning if the result is
195 used. */
196
197 static int fold_deferring_overflow_warnings;
198
199 /* If a warning about undefined overflow is deferred, this is the
200 warning. Note that this may cause us to turn two warnings into
201 one, but that is fine since it is sufficient to only give one
202 warning per expression. */
203
204 static const char* fold_deferred_overflow_warning;
205
206 /* If a warning about undefined overflow is deferred, this is the
207 level at which the warning should be emitted. */
208
209 static enum warn_strict_overflow_code fold_deferred_overflow_code;
210
211 /* Start deferring overflow warnings. We could use a stack here to
212 permit nested calls, but at present it is not necessary. */
213
214 void
215 fold_defer_overflow_warnings (void)
216 {
217 ++fold_deferring_overflow_warnings;
218 }
219
220 /* Stop deferring overflow warnings. If there is a pending warning,
221 and ISSUE is true, then issue the warning if appropriate. STMT is
222 the statement with which the warning should be associated (used for
223 location information); STMT may be NULL. CODE is the level of the
224 warning--a warn_strict_overflow_code value. This function will use
225 the smaller of CODE and the deferred code when deciding whether to
226 issue the warning. CODE may be zero to mean to always use the
227 deferred code. */
228
229 void
230 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
231 {
232 const char *warnmsg;
233 location_t locus;
234
235 gcc_assert (fold_deferring_overflow_warnings > 0);
236 --fold_deferring_overflow_warnings;
237 if (fold_deferring_overflow_warnings > 0)
238 {
239 if (fold_deferred_overflow_warning != NULL
240 && code != 0
241 && code < (int) fold_deferred_overflow_code)
242 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
243 return;
244 }
245
246 warnmsg = fold_deferred_overflow_warning;
247 fold_deferred_overflow_warning = NULL;
248
249 if (!issue || warnmsg == NULL)
250 return;
251
252 if (gimple_no_warning_p (stmt))
253 return;
254
255 /* Use the smallest code level when deciding to issue the
256 warning. */
257 if (code == 0 || code > (int) fold_deferred_overflow_code)
258 code = fold_deferred_overflow_code;
259
260 if (!issue_strict_overflow_warning (code))
261 return;
262
263 if (stmt == NULL)
264 locus = input_location;
265 else
266 locus = gimple_location (stmt);
267 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
268 }
269
270 /* Stop deferring overflow warnings, ignoring any deferred
271 warnings. */
272
273 void
274 fold_undefer_and_ignore_overflow_warnings (void)
275 {
276 fold_undefer_overflow_warnings (false, NULL, 0);
277 }
278
279 /* Whether we are deferring overflow warnings. */
280
281 bool
282 fold_deferring_overflow_warnings_p (void)
283 {
284 return fold_deferring_overflow_warnings > 0;
285 }
286
287 /* This is called when we fold something based on the fact that signed
288 overflow is undefined. */
289
290 void
291 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
292 {
293 if (fold_deferring_overflow_warnings > 0)
294 {
295 if (fold_deferred_overflow_warning == NULL
296 || wc < fold_deferred_overflow_code)
297 {
298 fold_deferred_overflow_warning = gmsgid;
299 fold_deferred_overflow_code = wc;
300 }
301 }
302 else if (issue_strict_overflow_warning (wc))
303 warning (OPT_Wstrict_overflow, gmsgid);
304 }
305 \f
306 /* Return true if the built-in mathematical function specified by CODE
307 is odd, i.e. -f(x) == f(-x). */
308
309 bool
310 negate_mathfn_p (combined_fn fn)
311 {
312 switch (fn)
313 {
314 CASE_CFN_ASIN:
315 CASE_CFN_ASINH:
316 CASE_CFN_ATAN:
317 CASE_CFN_ATANH:
318 CASE_CFN_CASIN:
319 CASE_CFN_CASINH:
320 CASE_CFN_CATAN:
321 CASE_CFN_CATANH:
322 CASE_CFN_CBRT:
323 CASE_CFN_CPROJ:
324 CASE_CFN_CSIN:
325 CASE_CFN_CSINH:
326 CASE_CFN_CTAN:
327 CASE_CFN_CTANH:
328 CASE_CFN_ERF:
329 CASE_CFN_LLROUND:
330 CASE_CFN_LROUND:
331 CASE_CFN_ROUND:
332 CASE_CFN_ROUNDEVEN:
333 CASE_CFN_ROUNDEVEN_FN:
334 CASE_CFN_SIN:
335 CASE_CFN_SINH:
336 CASE_CFN_TAN:
337 CASE_CFN_TANH:
338 CASE_CFN_TRUNC:
339 return true;
340
341 CASE_CFN_LLRINT:
342 CASE_CFN_LRINT:
343 CASE_CFN_NEARBYINT:
344 CASE_CFN_RINT:
345 return !flag_rounding_math;
346
347 default:
348 break;
349 }
350 return false;
351 }
352
353 /* Check whether we may negate an integer constant T without causing
354 overflow. */
355
356 bool
357 may_negate_without_overflow_p (const_tree t)
358 {
359 tree type;
360
361 gcc_assert (TREE_CODE (t) == INTEGER_CST);
362
363 type = TREE_TYPE (t);
364 if (TYPE_UNSIGNED (type))
365 return false;
366
367 return !wi::only_sign_bit_p (wi::to_wide (t));
368 }
369
370 /* Determine whether an expression T can be cheaply negated using
371 the function negate_expr without introducing undefined overflow. */
372
373 static bool
374 negate_expr_p (tree t)
375 {
376 tree type;
377
378 if (t == 0)
379 return false;
380
381 type = TREE_TYPE (t);
382
383 STRIP_SIGN_NOPS (t);
384 switch (TREE_CODE (t))
385 {
386 case INTEGER_CST:
387 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
388 return true;
389
390 /* Check that -CST will not overflow type. */
391 return may_negate_without_overflow_p (t);
392 case BIT_NOT_EXPR:
393 return (INTEGRAL_TYPE_P (type)
394 && TYPE_OVERFLOW_WRAPS (type));
395
396 case FIXED_CST:
397 return true;
398
399 case NEGATE_EXPR:
400 return !TYPE_OVERFLOW_SANITIZED (type);
401
402 case REAL_CST:
403 /* We want to canonicalize to positive real constants. Pretend
404 that only negative ones can be easily negated. */
405 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
406
407 case COMPLEX_CST:
408 return negate_expr_p (TREE_REALPART (t))
409 && negate_expr_p (TREE_IMAGPART (t));
410
411 case VECTOR_CST:
412 {
413 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
414 return true;
415
416 /* Steps don't prevent negation. */
417 unsigned int count = vector_cst_encoded_nelts (t);
418 for (unsigned int i = 0; i < count; ++i)
419 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
420 return false;
421
422 return true;
423 }
424
425 case COMPLEX_EXPR:
426 return negate_expr_p (TREE_OPERAND (t, 0))
427 && negate_expr_p (TREE_OPERAND (t, 1));
428
429 case CONJ_EXPR:
430 return negate_expr_p (TREE_OPERAND (t, 0));
431
432 case PLUS_EXPR:
433 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
434 || HONOR_SIGNED_ZEROS (element_mode (type))
435 || (ANY_INTEGRAL_TYPE_P (type)
436 && ! TYPE_OVERFLOW_WRAPS (type)))
437 return false;
438 /* -(A + B) -> (-B) - A. */
439 if (negate_expr_p (TREE_OPERAND (t, 1)))
440 return true;
441 /* -(A + B) -> (-A) - B. */
442 return negate_expr_p (TREE_OPERAND (t, 0));
443
444 case MINUS_EXPR:
445 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
446 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
447 && !HONOR_SIGNED_ZEROS (element_mode (type))
448 && (! ANY_INTEGRAL_TYPE_P (type)
449 || TYPE_OVERFLOW_WRAPS (type));
450
451 case MULT_EXPR:
452 if (TYPE_UNSIGNED (type))
453 break;
454 /* INT_MIN/n * n doesn't overflow while negating one operand it does
455 if n is a (negative) power of two. */
456 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
457 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
458 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
459 && (wi::popcount
460 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
461 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
462 && (wi::popcount
463 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
464 break;
465
466 /* Fall through. */
467
468 case RDIV_EXPR:
469 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
470 return negate_expr_p (TREE_OPERAND (t, 1))
471 || negate_expr_p (TREE_OPERAND (t, 0));
472 break;
473
474 case TRUNC_DIV_EXPR:
475 case ROUND_DIV_EXPR:
476 case EXACT_DIV_EXPR:
477 if (TYPE_UNSIGNED (type))
478 break;
479 /* In general we can't negate A in A / B, because if A is INT_MIN and
480 B is not 1 we change the sign of the result. */
481 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
482 && negate_expr_p (TREE_OPERAND (t, 0)))
483 return true;
484 /* In general we can't negate B in A / B, because if A is INT_MIN and
485 B is 1, we may turn this into INT_MIN / -1 which is undefined
486 and actually traps on some architectures. */
487 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
488 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
489 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
490 && ! integer_onep (TREE_OPERAND (t, 1))))
491 return negate_expr_p (TREE_OPERAND (t, 1));
492 break;
493
494 case NOP_EXPR:
495 /* Negate -((double)float) as (double)(-float). */
496 if (TREE_CODE (type) == REAL_TYPE)
497 {
498 tree tem = strip_float_extensions (t);
499 if (tem != t)
500 return negate_expr_p (tem);
501 }
502 break;
503
504 case CALL_EXPR:
505 /* Negate -f(x) as f(-x). */
506 if (negate_mathfn_p (get_call_combined_fn (t)))
507 return negate_expr_p (CALL_EXPR_ARG (t, 0));
508 break;
509
510 case RSHIFT_EXPR:
511 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
512 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
513 {
514 tree op1 = TREE_OPERAND (t, 1);
515 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
516 return true;
517 }
518 break;
519
520 default:
521 break;
522 }
523 return false;
524 }
525
526 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
527 simplification is possible.
528 If negate_expr_p would return true for T, NULL_TREE will never be
529 returned. */
530
531 static tree
532 fold_negate_expr_1 (location_t loc, tree t)
533 {
534 tree type = TREE_TYPE (t);
535 tree tem;
536
537 switch (TREE_CODE (t))
538 {
539 /* Convert - (~A) to A + 1. */
540 case BIT_NOT_EXPR:
541 if (INTEGRAL_TYPE_P (type))
542 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
543 build_one_cst (type));
544 break;
545
546 case INTEGER_CST:
547 tem = fold_negate_const (t, type);
548 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
549 || (ANY_INTEGRAL_TYPE_P (type)
550 && !TYPE_OVERFLOW_TRAPS (type)
551 && TYPE_OVERFLOW_WRAPS (type))
552 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
553 return tem;
554 break;
555
556 case POLY_INT_CST:
557 case REAL_CST:
558 case FIXED_CST:
559 tem = fold_negate_const (t, type);
560 return tem;
561
562 case COMPLEX_CST:
563 {
564 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
565 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
566 if (rpart && ipart)
567 return build_complex (type, rpart, ipart);
568 }
569 break;
570
571 case VECTOR_CST:
572 {
573 tree_vector_builder elts;
574 elts.new_unary_operation (type, t, true);
575 unsigned int count = elts.encoded_nelts ();
576 for (unsigned int i = 0; i < count; ++i)
577 {
578 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
579 if (elt == NULL_TREE)
580 return NULL_TREE;
581 elts.quick_push (elt);
582 }
583
584 return elts.build ();
585 }
586
587 case COMPLEX_EXPR:
588 if (negate_expr_p (t))
589 return fold_build2_loc (loc, COMPLEX_EXPR, type,
590 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
591 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
592 break;
593
594 case CONJ_EXPR:
595 if (negate_expr_p (t))
596 return fold_build1_loc (loc, CONJ_EXPR, type,
597 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
598 break;
599
600 case NEGATE_EXPR:
601 if (!TYPE_OVERFLOW_SANITIZED (type))
602 return TREE_OPERAND (t, 0);
603 break;
604
605 case PLUS_EXPR:
606 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
607 && !HONOR_SIGNED_ZEROS (element_mode (type)))
608 {
609 /* -(A + B) -> (-B) - A. */
610 if (negate_expr_p (TREE_OPERAND (t, 1)))
611 {
612 tem = negate_expr (TREE_OPERAND (t, 1));
613 return fold_build2_loc (loc, MINUS_EXPR, type,
614 tem, TREE_OPERAND (t, 0));
615 }
616
617 /* -(A + B) -> (-A) - B. */
618 if (negate_expr_p (TREE_OPERAND (t, 0)))
619 {
620 tem = negate_expr (TREE_OPERAND (t, 0));
621 return fold_build2_loc (loc, MINUS_EXPR, type,
622 tem, TREE_OPERAND (t, 1));
623 }
624 }
625 break;
626
627 case MINUS_EXPR:
628 /* - (A - B) -> B - A */
629 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
630 && !HONOR_SIGNED_ZEROS (element_mode (type)))
631 return fold_build2_loc (loc, MINUS_EXPR, type,
632 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
633 break;
634
635 case MULT_EXPR:
636 if (TYPE_UNSIGNED (type))
637 break;
638
639 /* Fall through. */
640
641 case RDIV_EXPR:
642 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
643 {
644 tem = TREE_OPERAND (t, 1);
645 if (negate_expr_p (tem))
646 return fold_build2_loc (loc, TREE_CODE (t), type,
647 TREE_OPERAND (t, 0), negate_expr (tem));
648 tem = TREE_OPERAND (t, 0);
649 if (negate_expr_p (tem))
650 return fold_build2_loc (loc, TREE_CODE (t), type,
651 negate_expr (tem), TREE_OPERAND (t, 1));
652 }
653 break;
654
655 case TRUNC_DIV_EXPR:
656 case ROUND_DIV_EXPR:
657 case EXACT_DIV_EXPR:
658 if (TYPE_UNSIGNED (type))
659 break;
660 /* In general we can't negate A in A / B, because if A is INT_MIN and
661 B is not 1 we change the sign of the result. */
662 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
663 && negate_expr_p (TREE_OPERAND (t, 0)))
664 return fold_build2_loc (loc, TREE_CODE (t), type,
665 negate_expr (TREE_OPERAND (t, 0)),
666 TREE_OPERAND (t, 1));
667 /* In general we can't negate B in A / B, because if A is INT_MIN and
668 B is 1, we may turn this into INT_MIN / -1 which is undefined
669 and actually traps on some architectures. */
670 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
671 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
672 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
673 && ! integer_onep (TREE_OPERAND (t, 1))))
674 && negate_expr_p (TREE_OPERAND (t, 1)))
675 return fold_build2_loc (loc, TREE_CODE (t), type,
676 TREE_OPERAND (t, 0),
677 negate_expr (TREE_OPERAND (t, 1)));
678 break;
679
680 case NOP_EXPR:
681 /* Convert -((double)float) into (double)(-float). */
682 if (TREE_CODE (type) == REAL_TYPE)
683 {
684 tem = strip_float_extensions (t);
685 if (tem != t && negate_expr_p (tem))
686 return fold_convert_loc (loc, type, negate_expr (tem));
687 }
688 break;
689
690 case CALL_EXPR:
691 /* Negate -f(x) as f(-x). */
692 if (negate_mathfn_p (get_call_combined_fn (t))
693 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
694 {
695 tree fndecl, arg;
696
697 fndecl = get_callee_fndecl (t);
698 arg = negate_expr (CALL_EXPR_ARG (t, 0));
699 return build_call_expr_loc (loc, fndecl, 1, arg);
700 }
701 break;
702
703 case RSHIFT_EXPR:
704 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
705 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
706 {
707 tree op1 = TREE_OPERAND (t, 1);
708 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
709 {
710 tree ntype = TYPE_UNSIGNED (type)
711 ? signed_type_for (type)
712 : unsigned_type_for (type);
713 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
714 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
715 return fold_convert_loc (loc, type, temp);
716 }
717 }
718 break;
719
720 default:
721 break;
722 }
723
724 return NULL_TREE;
725 }
726
727 /* A wrapper for fold_negate_expr_1. */
728
729 static tree
730 fold_negate_expr (location_t loc, tree t)
731 {
732 tree type = TREE_TYPE (t);
733 STRIP_SIGN_NOPS (t);
734 tree tem = fold_negate_expr_1 (loc, t);
735 if (tem == NULL_TREE)
736 return NULL_TREE;
737 return fold_convert_loc (loc, type, tem);
738 }
739
740 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
741 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
742 return NULL_TREE. */
743
744 static tree
745 negate_expr (tree t)
746 {
747 tree type, tem;
748 location_t loc;
749
750 if (t == NULL_TREE)
751 return NULL_TREE;
752
753 loc = EXPR_LOCATION (t);
754 type = TREE_TYPE (t);
755 STRIP_SIGN_NOPS (t);
756
757 tem = fold_negate_expr (loc, t);
758 if (!tem)
759 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
760 return fold_convert_loc (loc, type, tem);
761 }
762 \f
763 /* Split a tree IN into a constant, literal and variable parts that could be
764 combined with CODE to make IN. "constant" means an expression with
765 TREE_CONSTANT but that isn't an actual constant. CODE must be a
766 commutative arithmetic operation. Store the constant part into *CONP,
767 the literal in *LITP and return the variable part. If a part isn't
768 present, set it to null. If the tree does not decompose in this way,
769 return the entire tree as the variable part and the other parts as null.
770
771 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
772 case, we negate an operand that was subtracted. Except if it is a
773 literal for which we use *MINUS_LITP instead.
774
775 If NEGATE_P is true, we are negating all of IN, again except a literal
776 for which we use *MINUS_LITP instead. If a variable part is of pointer
777 type, it is negated after converting to TYPE. This prevents us from
778 generating illegal MINUS pointer expression. LOC is the location of
779 the converted variable part.
780
781 If IN is itself a literal or constant, return it as appropriate.
782
783 Note that we do not guarantee that any of the three values will be the
784 same type as IN, but they will have the same signedness and mode. */
785
786 static tree
787 split_tree (tree in, tree type, enum tree_code code,
788 tree *minus_varp, tree *conp, tree *minus_conp,
789 tree *litp, tree *minus_litp, int negate_p)
790 {
791 tree var = 0;
792 *minus_varp = 0;
793 *conp = 0;
794 *minus_conp = 0;
795 *litp = 0;
796 *minus_litp = 0;
797
798 /* Strip any conversions that don't change the machine mode or signedness. */
799 STRIP_SIGN_NOPS (in);
800
801 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
802 || TREE_CODE (in) == FIXED_CST)
803 *litp = in;
804 else if (TREE_CODE (in) == code
805 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
806 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
807 /* We can associate addition and subtraction together (even
808 though the C standard doesn't say so) for integers because
809 the value is not affected. For reals, the value might be
810 affected, so we can't. */
811 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
812 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
813 || (code == MINUS_EXPR
814 && (TREE_CODE (in) == PLUS_EXPR
815 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
816 {
817 tree op0 = TREE_OPERAND (in, 0);
818 tree op1 = TREE_OPERAND (in, 1);
819 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
820 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
821
822 /* First see if either of the operands is a literal, then a constant. */
823 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
824 || TREE_CODE (op0) == FIXED_CST)
825 *litp = op0, op0 = 0;
826 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
827 || TREE_CODE (op1) == FIXED_CST)
828 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
829
830 if (op0 != 0 && TREE_CONSTANT (op0))
831 *conp = op0, op0 = 0;
832 else if (op1 != 0 && TREE_CONSTANT (op1))
833 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
834
835 /* If we haven't dealt with either operand, this is not a case we can
836 decompose. Otherwise, VAR is either of the ones remaining, if any. */
837 if (op0 != 0 && op1 != 0)
838 var = in;
839 else if (op0 != 0)
840 var = op0;
841 else
842 var = op1, neg_var_p = neg1_p;
843
844 /* Now do any needed negations. */
845 if (neg_litp_p)
846 *minus_litp = *litp, *litp = 0;
847 if (neg_conp_p && *conp)
848 *minus_conp = *conp, *conp = 0;
849 if (neg_var_p && var)
850 *minus_varp = var, var = 0;
851 }
852 else if (TREE_CONSTANT (in))
853 *conp = in;
854 else if (TREE_CODE (in) == BIT_NOT_EXPR
855 && code == PLUS_EXPR)
856 {
857 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
858 when IN is constant. */
859 *litp = build_minus_one_cst (type);
860 *minus_varp = TREE_OPERAND (in, 0);
861 }
862 else
863 var = in;
864
865 if (negate_p)
866 {
867 if (*litp)
868 *minus_litp = *litp, *litp = 0;
869 else if (*minus_litp)
870 *litp = *minus_litp, *minus_litp = 0;
871 if (*conp)
872 *minus_conp = *conp, *conp = 0;
873 else if (*minus_conp)
874 *conp = *minus_conp, *minus_conp = 0;
875 if (var)
876 *minus_varp = var, var = 0;
877 else if (*minus_varp)
878 var = *minus_varp, *minus_varp = 0;
879 }
880
881 if (*litp
882 && TREE_OVERFLOW_P (*litp))
883 *litp = drop_tree_overflow (*litp);
884 if (*minus_litp
885 && TREE_OVERFLOW_P (*minus_litp))
886 *minus_litp = drop_tree_overflow (*minus_litp);
887
888 return var;
889 }
890
891 /* Re-associate trees split by the above function. T1 and T2 are
892 either expressions to associate or null. Return the new
893 expression, if any. LOC is the location of the new expression. If
894 we build an operation, do it in TYPE and with CODE. */
895
896 static tree
897 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
898 {
899 if (t1 == 0)
900 {
901 gcc_assert (t2 == 0 || code != MINUS_EXPR);
902 return t2;
903 }
904 else if (t2 == 0)
905 return t1;
906
907 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
908 try to fold this since we will have infinite recursion. But do
909 deal with any NEGATE_EXPRs. */
910 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
911 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
912 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
913 {
914 if (code == PLUS_EXPR)
915 {
916 if (TREE_CODE (t1) == NEGATE_EXPR)
917 return build2_loc (loc, MINUS_EXPR, type,
918 fold_convert_loc (loc, type, t2),
919 fold_convert_loc (loc, type,
920 TREE_OPERAND (t1, 0)));
921 else if (TREE_CODE (t2) == NEGATE_EXPR)
922 return build2_loc (loc, MINUS_EXPR, type,
923 fold_convert_loc (loc, type, t1),
924 fold_convert_loc (loc, type,
925 TREE_OPERAND (t2, 0)));
926 else if (integer_zerop (t2))
927 return fold_convert_loc (loc, type, t1);
928 }
929 else if (code == MINUS_EXPR)
930 {
931 if (integer_zerop (t2))
932 return fold_convert_loc (loc, type, t1);
933 }
934
935 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
936 fold_convert_loc (loc, type, t2));
937 }
938
939 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
940 fold_convert_loc (loc, type, t2));
941 }
942 \f
943 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
944 for use in int_const_binop, size_binop and size_diffop. */
945
946 static bool
947 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
948 {
949 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
950 return false;
951 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
952 return false;
953
954 switch (code)
955 {
956 case LSHIFT_EXPR:
957 case RSHIFT_EXPR:
958 case LROTATE_EXPR:
959 case RROTATE_EXPR:
960 return true;
961
962 default:
963 break;
964 }
965
966 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
967 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
968 && TYPE_MODE (type1) == TYPE_MODE (type2);
969 }
970
971 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
972 a new constant in RES. Return FALSE if we don't know how to
973 evaluate CODE at compile-time. */
974
975 bool
976 wide_int_binop (wide_int &res,
977 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
978 signop sign, wi::overflow_type *overflow)
979 {
980 wide_int tmp;
981 *overflow = wi::OVF_NONE;
982 switch (code)
983 {
984 case BIT_IOR_EXPR:
985 res = wi::bit_or (arg1, arg2);
986 break;
987
988 case BIT_XOR_EXPR:
989 res = wi::bit_xor (arg1, arg2);
990 break;
991
992 case BIT_AND_EXPR:
993 res = wi::bit_and (arg1, arg2);
994 break;
995
996 case RSHIFT_EXPR:
997 case LSHIFT_EXPR:
998 if (wi::neg_p (arg2))
999 {
1000 tmp = -arg2;
1001 if (code == RSHIFT_EXPR)
1002 code = LSHIFT_EXPR;
1003 else
1004 code = RSHIFT_EXPR;
1005 }
1006 else
1007 tmp = arg2;
1008
1009 if (code == RSHIFT_EXPR)
1010 /* It's unclear from the C standard whether shifts can overflow.
1011 The following code ignores overflow; perhaps a C standard
1012 interpretation ruling is needed. */
1013 res = wi::rshift (arg1, tmp, sign);
1014 else
1015 res = wi::lshift (arg1, tmp);
1016 break;
1017
1018 case RROTATE_EXPR:
1019 case LROTATE_EXPR:
1020 if (wi::neg_p (arg2))
1021 {
1022 tmp = -arg2;
1023 if (code == RROTATE_EXPR)
1024 code = LROTATE_EXPR;
1025 else
1026 code = RROTATE_EXPR;
1027 }
1028 else
1029 tmp = arg2;
1030
1031 if (code == RROTATE_EXPR)
1032 res = wi::rrotate (arg1, tmp);
1033 else
1034 res = wi::lrotate (arg1, tmp);
1035 break;
1036
1037 case PLUS_EXPR:
1038 res = wi::add (arg1, arg2, sign, overflow);
1039 break;
1040
1041 case MINUS_EXPR:
1042 res = wi::sub (arg1, arg2, sign, overflow);
1043 break;
1044
1045 case MULT_EXPR:
1046 res = wi::mul (arg1, arg2, sign, overflow);
1047 break;
1048
1049 case MULT_HIGHPART_EXPR:
1050 res = wi::mul_high (arg1, arg2, sign);
1051 break;
1052
1053 case TRUNC_DIV_EXPR:
1054 case EXACT_DIV_EXPR:
1055 if (arg2 == 0)
1056 return false;
1057 res = wi::div_trunc (arg1, arg2, sign, overflow);
1058 break;
1059
1060 case FLOOR_DIV_EXPR:
1061 if (arg2 == 0)
1062 return false;
1063 res = wi::div_floor (arg1, arg2, sign, overflow);
1064 break;
1065
1066 case CEIL_DIV_EXPR:
1067 if (arg2 == 0)
1068 return false;
1069 res = wi::div_ceil (arg1, arg2, sign, overflow);
1070 break;
1071
1072 case ROUND_DIV_EXPR:
1073 if (arg2 == 0)
1074 return false;
1075 res = wi::div_round (arg1, arg2, sign, overflow);
1076 break;
1077
1078 case TRUNC_MOD_EXPR:
1079 if (arg2 == 0)
1080 return false;
1081 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1082 break;
1083
1084 case FLOOR_MOD_EXPR:
1085 if (arg2 == 0)
1086 return false;
1087 res = wi::mod_floor (arg1, arg2, sign, overflow);
1088 break;
1089
1090 case CEIL_MOD_EXPR:
1091 if (arg2 == 0)
1092 return false;
1093 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1094 break;
1095
1096 case ROUND_MOD_EXPR:
1097 if (arg2 == 0)
1098 return false;
1099 res = wi::mod_round (arg1, arg2, sign, overflow);
1100 break;
1101
1102 case MIN_EXPR:
1103 res = wi::min (arg1, arg2, sign);
1104 break;
1105
1106 case MAX_EXPR:
1107 res = wi::max (arg1, arg2, sign);
1108 break;
1109
1110 default:
1111 return false;
1112 }
1113 return true;
1114 }
1115
1116 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1117 produce a new constant in RES. Return FALSE if we don't know how
1118 to evaluate CODE at compile-time. */
1119
1120 static bool
1121 poly_int_binop (poly_wide_int &res, enum tree_code code,
1122 const_tree arg1, const_tree arg2,
1123 signop sign, wi::overflow_type *overflow)
1124 {
1125 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1126 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1127 switch (code)
1128 {
1129 case PLUS_EXPR:
1130 res = wi::add (wi::to_poly_wide (arg1),
1131 wi::to_poly_wide (arg2), sign, overflow);
1132 break;
1133
1134 case MINUS_EXPR:
1135 res = wi::sub (wi::to_poly_wide (arg1),
1136 wi::to_poly_wide (arg2), sign, overflow);
1137 break;
1138
1139 case MULT_EXPR:
1140 if (TREE_CODE (arg2) == INTEGER_CST)
1141 res = wi::mul (wi::to_poly_wide (arg1),
1142 wi::to_wide (arg2), sign, overflow);
1143 else if (TREE_CODE (arg1) == INTEGER_CST)
1144 res = wi::mul (wi::to_poly_wide (arg2),
1145 wi::to_wide (arg1), sign, overflow);
1146 else
1147 return NULL_TREE;
1148 break;
1149
1150 case LSHIFT_EXPR:
1151 if (TREE_CODE (arg2) == INTEGER_CST)
1152 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1153 else
1154 return false;
1155 break;
1156
1157 case BIT_IOR_EXPR:
1158 if (TREE_CODE (arg2) != INTEGER_CST
1159 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1160 &res))
1161 return false;
1162 break;
1163
1164 default:
1165 return false;
1166 }
1167 return true;
1168 }
1169
1170 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1171 produce a new constant. Return NULL_TREE if we don't know how to
1172 evaluate CODE at compile-time. */
1173
1174 tree
1175 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1176 int overflowable)
1177 {
1178 poly_wide_int poly_res;
1179 tree type = TREE_TYPE (arg1);
1180 signop sign = TYPE_SIGN (type);
1181 wi::overflow_type overflow = wi::OVF_NONE;
1182
1183 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1184 {
1185 wide_int warg1 = wi::to_wide (arg1), res;
1186 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1187 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1188 return NULL_TREE;
1189 poly_res = res;
1190 }
1191 else if (!poly_int_tree_p (arg1)
1192 || !poly_int_tree_p (arg2)
1193 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1194 return NULL_TREE;
1195 return force_fit_type (type, poly_res, overflowable,
1196 (((sign == SIGNED || overflowable == -1)
1197 && overflow)
1198 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1199 }
1200
1201 /* Return true if binary operation OP distributes over addition in operand
1202 OPNO, with the other operand being held constant. OPNO counts from 1. */
1203
1204 static bool
1205 distributes_over_addition_p (tree_code op, int opno)
1206 {
1207 switch (op)
1208 {
1209 case PLUS_EXPR:
1210 case MINUS_EXPR:
1211 case MULT_EXPR:
1212 return true;
1213
1214 case LSHIFT_EXPR:
1215 return opno == 1;
1216
1217 default:
1218 return false;
1219 }
1220 }
1221
1222 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1223 constant. We assume ARG1 and ARG2 have the same data type, or at least
1224 are the same kind of constant and the same machine mode. Return zero if
1225 combining the constants is not allowed in the current operating mode. */
1226
1227 static tree
1228 const_binop (enum tree_code code, tree arg1, tree arg2)
1229 {
1230 /* Sanity check for the recursive cases. */
1231 if (!arg1 || !arg2)
1232 return NULL_TREE;
1233
1234 STRIP_NOPS (arg1);
1235 STRIP_NOPS (arg2);
1236
1237 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1238 {
1239 if (code == POINTER_PLUS_EXPR)
1240 return int_const_binop (PLUS_EXPR,
1241 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1242
1243 return int_const_binop (code, arg1, arg2);
1244 }
1245
1246 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1247 {
1248 machine_mode mode;
1249 REAL_VALUE_TYPE d1;
1250 REAL_VALUE_TYPE d2;
1251 REAL_VALUE_TYPE value;
1252 REAL_VALUE_TYPE result;
1253 bool inexact;
1254 tree t, type;
1255
1256 /* The following codes are handled by real_arithmetic. */
1257 switch (code)
1258 {
1259 case PLUS_EXPR:
1260 case MINUS_EXPR:
1261 case MULT_EXPR:
1262 case RDIV_EXPR:
1263 case MIN_EXPR:
1264 case MAX_EXPR:
1265 break;
1266
1267 default:
1268 return NULL_TREE;
1269 }
1270
1271 d1 = TREE_REAL_CST (arg1);
1272 d2 = TREE_REAL_CST (arg2);
1273
1274 type = TREE_TYPE (arg1);
1275 mode = TYPE_MODE (type);
1276
1277 /* Don't perform operation if we honor signaling NaNs and
1278 either operand is a signaling NaN. */
1279 if (HONOR_SNANS (mode)
1280 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1281 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1282 return NULL_TREE;
1283
1284 /* Don't perform operation if it would raise a division
1285 by zero exception. */
1286 if (code == RDIV_EXPR
1287 && real_equal (&d2, &dconst0)
1288 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1289 return NULL_TREE;
1290
1291 /* If either operand is a NaN, just return it. Otherwise, set up
1292 for floating-point trap; we return an overflow. */
1293 if (REAL_VALUE_ISNAN (d1))
1294 {
1295 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1296 is off. */
1297 d1.signalling = 0;
1298 t = build_real (type, d1);
1299 return t;
1300 }
1301 else if (REAL_VALUE_ISNAN (d2))
1302 {
1303 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1304 is off. */
1305 d2.signalling = 0;
1306 t = build_real (type, d2);
1307 return t;
1308 }
1309
1310 inexact = real_arithmetic (&value, code, &d1, &d2);
1311 real_convert (&result, mode, &value);
1312
1313 /* Don't constant fold this floating point operation if
1314 the result has overflowed and flag_trapping_math. */
1315 if (flag_trapping_math
1316 && MODE_HAS_INFINITIES (mode)
1317 && REAL_VALUE_ISINF (result)
1318 && !REAL_VALUE_ISINF (d1)
1319 && !REAL_VALUE_ISINF (d2))
1320 return NULL_TREE;
1321
1322 /* Don't constant fold this floating point operation if the
1323 result may dependent upon the run-time rounding mode and
1324 flag_rounding_math is set, or if GCC's software emulation
1325 is unable to accurately represent the result. */
1326 if ((flag_rounding_math
1327 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1328 && (inexact || !real_identical (&result, &value)))
1329 return NULL_TREE;
1330
1331 t = build_real (type, result);
1332
1333 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1334 return t;
1335 }
1336
1337 if (TREE_CODE (arg1) == FIXED_CST)
1338 {
1339 FIXED_VALUE_TYPE f1;
1340 FIXED_VALUE_TYPE f2;
1341 FIXED_VALUE_TYPE result;
1342 tree t, type;
1343 int sat_p;
1344 bool overflow_p;
1345
1346 /* The following codes are handled by fixed_arithmetic. */
1347 switch (code)
1348 {
1349 case PLUS_EXPR:
1350 case MINUS_EXPR:
1351 case MULT_EXPR:
1352 case TRUNC_DIV_EXPR:
1353 if (TREE_CODE (arg2) != FIXED_CST)
1354 return NULL_TREE;
1355 f2 = TREE_FIXED_CST (arg2);
1356 break;
1357
1358 case LSHIFT_EXPR:
1359 case RSHIFT_EXPR:
1360 {
1361 if (TREE_CODE (arg2) != INTEGER_CST)
1362 return NULL_TREE;
1363 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1364 f2.data.high = w2.elt (1);
1365 f2.data.low = w2.ulow ();
1366 f2.mode = SImode;
1367 }
1368 break;
1369
1370 default:
1371 return NULL_TREE;
1372 }
1373
1374 f1 = TREE_FIXED_CST (arg1);
1375 type = TREE_TYPE (arg1);
1376 sat_p = TYPE_SATURATING (type);
1377 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1378 t = build_fixed (type, result);
1379 /* Propagate overflow flags. */
1380 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1381 TREE_OVERFLOW (t) = 1;
1382 return t;
1383 }
1384
1385 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1386 {
1387 tree type = TREE_TYPE (arg1);
1388 tree r1 = TREE_REALPART (arg1);
1389 tree i1 = TREE_IMAGPART (arg1);
1390 tree r2 = TREE_REALPART (arg2);
1391 tree i2 = TREE_IMAGPART (arg2);
1392 tree real, imag;
1393
1394 switch (code)
1395 {
1396 case PLUS_EXPR:
1397 case MINUS_EXPR:
1398 real = const_binop (code, r1, r2);
1399 imag = const_binop (code, i1, i2);
1400 break;
1401
1402 case MULT_EXPR:
1403 if (COMPLEX_FLOAT_TYPE_P (type))
1404 return do_mpc_arg2 (arg1, arg2, type,
1405 /* do_nonfinite= */ folding_initializer,
1406 mpc_mul);
1407
1408 real = const_binop (MINUS_EXPR,
1409 const_binop (MULT_EXPR, r1, r2),
1410 const_binop (MULT_EXPR, i1, i2));
1411 imag = const_binop (PLUS_EXPR,
1412 const_binop (MULT_EXPR, r1, i2),
1413 const_binop (MULT_EXPR, i1, r2));
1414 break;
1415
1416 case RDIV_EXPR:
1417 if (COMPLEX_FLOAT_TYPE_P (type))
1418 return do_mpc_arg2 (arg1, arg2, type,
1419 /* do_nonfinite= */ folding_initializer,
1420 mpc_div);
1421 /* Fallthru. */
1422 case TRUNC_DIV_EXPR:
1423 case CEIL_DIV_EXPR:
1424 case FLOOR_DIV_EXPR:
1425 case ROUND_DIV_EXPR:
1426 if (flag_complex_method == 0)
1427 {
1428 /* Keep this algorithm in sync with
1429 tree-complex.c:expand_complex_div_straight().
1430
1431 Expand complex division to scalars, straightforward algorithm.
1432 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1433 t = br*br + bi*bi
1434 */
1435 tree magsquared
1436 = const_binop (PLUS_EXPR,
1437 const_binop (MULT_EXPR, r2, r2),
1438 const_binop (MULT_EXPR, i2, i2));
1439 tree t1
1440 = const_binop (PLUS_EXPR,
1441 const_binop (MULT_EXPR, r1, r2),
1442 const_binop (MULT_EXPR, i1, i2));
1443 tree t2
1444 = const_binop (MINUS_EXPR,
1445 const_binop (MULT_EXPR, i1, r2),
1446 const_binop (MULT_EXPR, r1, i2));
1447
1448 real = const_binop (code, t1, magsquared);
1449 imag = const_binop (code, t2, magsquared);
1450 }
1451 else
1452 {
1453 /* Keep this algorithm in sync with
1454 tree-complex.c:expand_complex_div_wide().
1455
1456 Expand complex division to scalars, modified algorithm to minimize
1457 overflow with wide input ranges. */
1458 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1459 fold_abs_const (r2, TREE_TYPE (type)),
1460 fold_abs_const (i2, TREE_TYPE (type)));
1461
1462 if (integer_nonzerop (compare))
1463 {
1464 /* In the TRUE branch, we compute
1465 ratio = br/bi;
1466 div = (br * ratio) + bi;
1467 tr = (ar * ratio) + ai;
1468 ti = (ai * ratio) - ar;
1469 tr = tr / div;
1470 ti = ti / div; */
1471 tree ratio = const_binop (code, r2, i2);
1472 tree div = const_binop (PLUS_EXPR, i2,
1473 const_binop (MULT_EXPR, r2, ratio));
1474 real = const_binop (MULT_EXPR, r1, ratio);
1475 real = const_binop (PLUS_EXPR, real, i1);
1476 real = const_binop (code, real, div);
1477
1478 imag = const_binop (MULT_EXPR, i1, ratio);
1479 imag = const_binop (MINUS_EXPR, imag, r1);
1480 imag = const_binop (code, imag, div);
1481 }
1482 else
1483 {
1484 /* In the FALSE branch, we compute
1485 ratio = d/c;
1486 divisor = (d * ratio) + c;
1487 tr = (b * ratio) + a;
1488 ti = b - (a * ratio);
1489 tr = tr / div;
1490 ti = ti / div; */
1491 tree ratio = const_binop (code, i2, r2);
1492 tree div = const_binop (PLUS_EXPR, r2,
1493 const_binop (MULT_EXPR, i2, ratio));
1494
1495 real = const_binop (MULT_EXPR, i1, ratio);
1496 real = const_binop (PLUS_EXPR, real, r1);
1497 real = const_binop (code, real, div);
1498
1499 imag = const_binop (MULT_EXPR, r1, ratio);
1500 imag = const_binop (MINUS_EXPR, i1, imag);
1501 imag = const_binop (code, imag, div);
1502 }
1503 }
1504 break;
1505
1506 default:
1507 return NULL_TREE;
1508 }
1509
1510 if (real && imag)
1511 return build_complex (type, real, imag);
1512 }
1513
1514 if (TREE_CODE (arg1) == VECTOR_CST
1515 && TREE_CODE (arg2) == VECTOR_CST
1516 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1517 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1518 {
1519 tree type = TREE_TYPE (arg1);
1520 bool step_ok_p;
1521 if (VECTOR_CST_STEPPED_P (arg1)
1522 && VECTOR_CST_STEPPED_P (arg2))
1523 /* We can operate directly on the encoding if:
1524
1525 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1526 implies
1527 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1528
1529 Addition and subtraction are the supported operators
1530 for which this is true. */
1531 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1532 else if (VECTOR_CST_STEPPED_P (arg1))
1533 /* We can operate directly on stepped encodings if:
1534
1535 a3 - a2 == a2 - a1
1536 implies:
1537 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1538
1539 which is true if (x -> x op c) distributes over addition. */
1540 step_ok_p = distributes_over_addition_p (code, 1);
1541 else
1542 /* Similarly in reverse. */
1543 step_ok_p = distributes_over_addition_p (code, 2);
1544 tree_vector_builder elts;
1545 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1546 return NULL_TREE;
1547 unsigned int count = elts.encoded_nelts ();
1548 for (unsigned int i = 0; i < count; ++i)
1549 {
1550 tree elem1 = VECTOR_CST_ELT (arg1, i);
1551 tree elem2 = VECTOR_CST_ELT (arg2, i);
1552
1553 tree elt = const_binop (code, elem1, elem2);
1554
1555 /* It is possible that const_binop cannot handle the given
1556 code and return NULL_TREE */
1557 if (elt == NULL_TREE)
1558 return NULL_TREE;
1559 elts.quick_push (elt);
1560 }
1561
1562 return elts.build ();
1563 }
1564
1565 /* Shifts allow a scalar offset for a vector. */
1566 if (TREE_CODE (arg1) == VECTOR_CST
1567 && TREE_CODE (arg2) == INTEGER_CST)
1568 {
1569 tree type = TREE_TYPE (arg1);
1570 bool step_ok_p = distributes_over_addition_p (code, 1);
1571 tree_vector_builder elts;
1572 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1573 return NULL_TREE;
1574 unsigned int count = elts.encoded_nelts ();
1575 for (unsigned int i = 0; i < count; ++i)
1576 {
1577 tree elem1 = VECTOR_CST_ELT (arg1, i);
1578
1579 tree elt = const_binop (code, elem1, arg2);
1580
1581 /* It is possible that const_binop cannot handle the given
1582 code and return NULL_TREE. */
1583 if (elt == NULL_TREE)
1584 return NULL_TREE;
1585 elts.quick_push (elt);
1586 }
1587
1588 return elts.build ();
1589 }
1590 return NULL_TREE;
1591 }
1592
1593 /* Overload that adds a TYPE parameter to be able to dispatch
1594 to fold_relational_const. */
1595
1596 tree
1597 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1598 {
1599 if (TREE_CODE_CLASS (code) == tcc_comparison)
1600 return fold_relational_const (code, type, arg1, arg2);
1601
1602 /* ??? Until we make the const_binop worker take the type of the
1603 result as argument put those cases that need it here. */
1604 switch (code)
1605 {
1606 case VEC_SERIES_EXPR:
1607 if (CONSTANT_CLASS_P (arg1)
1608 && CONSTANT_CLASS_P (arg2))
1609 return build_vec_series (type, arg1, arg2);
1610 return NULL_TREE;
1611
1612 case COMPLEX_EXPR:
1613 if ((TREE_CODE (arg1) == REAL_CST
1614 && TREE_CODE (arg2) == REAL_CST)
1615 || (TREE_CODE (arg1) == INTEGER_CST
1616 && TREE_CODE (arg2) == INTEGER_CST))
1617 return build_complex (type, arg1, arg2);
1618 return NULL_TREE;
1619
1620 case POINTER_DIFF_EXPR:
1621 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1622 {
1623 poly_offset_int res = (wi::to_poly_offset (arg1)
1624 - wi::to_poly_offset (arg2));
1625 return force_fit_type (type, res, 1,
1626 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1627 }
1628 return NULL_TREE;
1629
1630 case VEC_PACK_TRUNC_EXPR:
1631 case VEC_PACK_FIX_TRUNC_EXPR:
1632 case VEC_PACK_FLOAT_EXPR:
1633 {
1634 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1635
1636 if (TREE_CODE (arg1) != VECTOR_CST
1637 || TREE_CODE (arg2) != VECTOR_CST)
1638 return NULL_TREE;
1639
1640 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1641 return NULL_TREE;
1642
1643 out_nelts = in_nelts * 2;
1644 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1645 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1646
1647 tree_vector_builder elts (type, out_nelts, 1);
1648 for (i = 0; i < out_nelts; i++)
1649 {
1650 tree elt = (i < in_nelts
1651 ? VECTOR_CST_ELT (arg1, i)
1652 : VECTOR_CST_ELT (arg2, i - in_nelts));
1653 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1654 ? NOP_EXPR
1655 : code == VEC_PACK_FLOAT_EXPR
1656 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1657 TREE_TYPE (type), elt);
1658 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1659 return NULL_TREE;
1660 elts.quick_push (elt);
1661 }
1662
1663 return elts.build ();
1664 }
1665
1666 case VEC_WIDEN_MULT_LO_EXPR:
1667 case VEC_WIDEN_MULT_HI_EXPR:
1668 case VEC_WIDEN_MULT_EVEN_EXPR:
1669 case VEC_WIDEN_MULT_ODD_EXPR:
1670 {
1671 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1672
1673 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1674 return NULL_TREE;
1675
1676 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1677 return NULL_TREE;
1678 out_nelts = in_nelts / 2;
1679 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1680 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1681
1682 if (code == VEC_WIDEN_MULT_LO_EXPR)
1683 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1684 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1685 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1686 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1687 scale = 1, ofs = 0;
1688 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1689 scale = 1, ofs = 1;
1690
1691 tree_vector_builder elts (type, out_nelts, 1);
1692 for (out = 0; out < out_nelts; out++)
1693 {
1694 unsigned int in = (out << scale) + ofs;
1695 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1696 VECTOR_CST_ELT (arg1, in));
1697 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1698 VECTOR_CST_ELT (arg2, in));
1699
1700 if (t1 == NULL_TREE || t2 == NULL_TREE)
1701 return NULL_TREE;
1702 tree elt = const_binop (MULT_EXPR, t1, t2);
1703 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1704 return NULL_TREE;
1705 elts.quick_push (elt);
1706 }
1707
1708 return elts.build ();
1709 }
1710
1711 default:;
1712 }
1713
1714 if (TREE_CODE_CLASS (code) != tcc_binary)
1715 return NULL_TREE;
1716
1717 /* Make sure type and arg0 have the same saturating flag. */
1718 gcc_checking_assert (TYPE_SATURATING (type)
1719 == TYPE_SATURATING (TREE_TYPE (arg1)));
1720
1721 return const_binop (code, arg1, arg2);
1722 }
1723
1724 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1725 Return zero if computing the constants is not possible. */
1726
1727 tree
1728 const_unop (enum tree_code code, tree type, tree arg0)
1729 {
1730 /* Don't perform the operation, other than NEGATE and ABS, if
1731 flag_signaling_nans is on and the operand is a signaling NaN. */
1732 if (TREE_CODE (arg0) == REAL_CST
1733 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1734 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1735 && code != NEGATE_EXPR
1736 && code != ABS_EXPR
1737 && code != ABSU_EXPR)
1738 return NULL_TREE;
1739
1740 switch (code)
1741 {
1742 CASE_CONVERT:
1743 case FLOAT_EXPR:
1744 case FIX_TRUNC_EXPR:
1745 case FIXED_CONVERT_EXPR:
1746 return fold_convert_const (code, type, arg0);
1747
1748 case ADDR_SPACE_CONVERT_EXPR:
1749 /* If the source address is 0, and the source address space
1750 cannot have a valid object at 0, fold to dest type null. */
1751 if (integer_zerop (arg0)
1752 && !(targetm.addr_space.zero_address_valid
1753 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1754 return fold_convert_const (code, type, arg0);
1755 break;
1756
1757 case VIEW_CONVERT_EXPR:
1758 return fold_view_convert_expr (type, arg0);
1759
1760 case NEGATE_EXPR:
1761 {
1762 /* Can't call fold_negate_const directly here as that doesn't
1763 handle all cases and we might not be able to negate some
1764 constants. */
1765 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1766 if (tem && CONSTANT_CLASS_P (tem))
1767 return tem;
1768 break;
1769 }
1770
1771 case ABS_EXPR:
1772 case ABSU_EXPR:
1773 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1774 return fold_abs_const (arg0, type);
1775 break;
1776
1777 case CONJ_EXPR:
1778 if (TREE_CODE (arg0) == COMPLEX_CST)
1779 {
1780 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1781 TREE_TYPE (type));
1782 return build_complex (type, TREE_REALPART (arg0), ipart);
1783 }
1784 break;
1785
1786 case BIT_NOT_EXPR:
1787 if (TREE_CODE (arg0) == INTEGER_CST)
1788 return fold_not_const (arg0, type);
1789 else if (POLY_INT_CST_P (arg0))
1790 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1791 /* Perform BIT_NOT_EXPR on each element individually. */
1792 else if (TREE_CODE (arg0) == VECTOR_CST)
1793 {
1794 tree elem;
1795
1796 /* This can cope with stepped encodings because ~x == -1 - x. */
1797 tree_vector_builder elements;
1798 elements.new_unary_operation (type, arg0, true);
1799 unsigned int i, count = elements.encoded_nelts ();
1800 for (i = 0; i < count; ++i)
1801 {
1802 elem = VECTOR_CST_ELT (arg0, i);
1803 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1804 if (elem == NULL_TREE)
1805 break;
1806 elements.quick_push (elem);
1807 }
1808 if (i == count)
1809 return elements.build ();
1810 }
1811 break;
1812
1813 case TRUTH_NOT_EXPR:
1814 if (TREE_CODE (arg0) == INTEGER_CST)
1815 return constant_boolean_node (integer_zerop (arg0), type);
1816 break;
1817
1818 case REALPART_EXPR:
1819 if (TREE_CODE (arg0) == COMPLEX_CST)
1820 return fold_convert (type, TREE_REALPART (arg0));
1821 break;
1822
1823 case IMAGPART_EXPR:
1824 if (TREE_CODE (arg0) == COMPLEX_CST)
1825 return fold_convert (type, TREE_IMAGPART (arg0));
1826 break;
1827
1828 case VEC_UNPACK_LO_EXPR:
1829 case VEC_UNPACK_HI_EXPR:
1830 case VEC_UNPACK_FLOAT_LO_EXPR:
1831 case VEC_UNPACK_FLOAT_HI_EXPR:
1832 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1833 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1834 {
1835 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1836 enum tree_code subcode;
1837
1838 if (TREE_CODE (arg0) != VECTOR_CST)
1839 return NULL_TREE;
1840
1841 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1842 return NULL_TREE;
1843 out_nelts = in_nelts / 2;
1844 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1845
1846 unsigned int offset = 0;
1847 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1848 || code == VEC_UNPACK_FLOAT_LO_EXPR
1849 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1850 offset = out_nelts;
1851
1852 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1853 subcode = NOP_EXPR;
1854 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1855 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1856 subcode = FLOAT_EXPR;
1857 else
1858 subcode = FIX_TRUNC_EXPR;
1859
1860 tree_vector_builder elts (type, out_nelts, 1);
1861 for (i = 0; i < out_nelts; i++)
1862 {
1863 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1864 VECTOR_CST_ELT (arg0, i + offset));
1865 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1866 return NULL_TREE;
1867 elts.quick_push (elt);
1868 }
1869
1870 return elts.build ();
1871 }
1872
1873 case VEC_DUPLICATE_EXPR:
1874 if (CONSTANT_CLASS_P (arg0))
1875 return build_vector_from_val (type, arg0);
1876 return NULL_TREE;
1877
1878 default:
1879 break;
1880 }
1881
1882 return NULL_TREE;
1883 }
1884
1885 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1886 indicates which particular sizetype to create. */
1887
1888 tree
1889 size_int_kind (poly_int64 number, enum size_type_kind kind)
1890 {
1891 return build_int_cst (sizetype_tab[(int) kind], number);
1892 }
1893 \f
1894 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1895 is a tree code. The type of the result is taken from the operands.
1896 Both must be equivalent integer types, ala int_binop_types_match_p.
1897 If the operands are constant, so is the result. */
1898
1899 tree
1900 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1901 {
1902 tree type = TREE_TYPE (arg0);
1903
1904 if (arg0 == error_mark_node || arg1 == error_mark_node)
1905 return error_mark_node;
1906
1907 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1908 TREE_TYPE (arg1)));
1909
1910 /* Handle the special case of two poly_int constants faster. */
1911 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1912 {
1913 /* And some specific cases even faster than that. */
1914 if (code == PLUS_EXPR)
1915 {
1916 if (integer_zerop (arg0)
1917 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1918 return arg1;
1919 if (integer_zerop (arg1)
1920 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1921 return arg0;
1922 }
1923 else if (code == MINUS_EXPR)
1924 {
1925 if (integer_zerop (arg1)
1926 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1927 return arg0;
1928 }
1929 else if (code == MULT_EXPR)
1930 {
1931 if (integer_onep (arg0)
1932 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1933 return arg1;
1934 }
1935
1936 /* Handle general case of two integer constants. For sizetype
1937 constant calculations we always want to know about overflow,
1938 even in the unsigned case. */
1939 tree res = int_const_binop (code, arg0, arg1, -1);
1940 if (res != NULL_TREE)
1941 return res;
1942 }
1943
1944 return fold_build2_loc (loc, code, type, arg0, arg1);
1945 }
1946
1947 /* Given two values, either both of sizetype or both of bitsizetype,
1948 compute the difference between the two values. Return the value
1949 in signed type corresponding to the type of the operands. */
1950
1951 tree
1952 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1953 {
1954 tree type = TREE_TYPE (arg0);
1955 tree ctype;
1956
1957 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1958 TREE_TYPE (arg1)));
1959
1960 /* If the type is already signed, just do the simple thing. */
1961 if (!TYPE_UNSIGNED (type))
1962 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1963
1964 if (type == sizetype)
1965 ctype = ssizetype;
1966 else if (type == bitsizetype)
1967 ctype = sbitsizetype;
1968 else
1969 ctype = signed_type_for (type);
1970
1971 /* If either operand is not a constant, do the conversions to the signed
1972 type and subtract. The hardware will do the right thing with any
1973 overflow in the subtraction. */
1974 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1975 return size_binop_loc (loc, MINUS_EXPR,
1976 fold_convert_loc (loc, ctype, arg0),
1977 fold_convert_loc (loc, ctype, arg1));
1978
1979 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1980 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1981 overflow) and negate (which can't either). Special-case a result
1982 of zero while we're here. */
1983 if (tree_int_cst_equal (arg0, arg1))
1984 return build_int_cst (ctype, 0);
1985 else if (tree_int_cst_lt (arg1, arg0))
1986 return fold_convert_loc (loc, ctype,
1987 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1988 else
1989 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1990 fold_convert_loc (loc, ctype,
1991 size_binop_loc (loc,
1992 MINUS_EXPR,
1993 arg1, arg0)));
1994 }
1995 \f
1996 /* A subroutine of fold_convert_const handling conversions of an
1997 INTEGER_CST to another integer type. */
1998
1999 static tree
2000 fold_convert_const_int_from_int (tree type, const_tree arg1)
2001 {
2002 /* Given an integer constant, make new constant with new type,
2003 appropriately sign-extended or truncated. Use widest_int
2004 so that any extension is done according ARG1's type. */
2005 return force_fit_type (type, wi::to_widest (arg1),
2006 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2007 TREE_OVERFLOW (arg1));
2008 }
2009
2010 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2011 to an integer type. */
2012
2013 static tree
2014 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2015 {
2016 bool overflow = false;
2017 tree t;
2018
2019 /* The following code implements the floating point to integer
2020 conversion rules required by the Java Language Specification,
2021 that IEEE NaNs are mapped to zero and values that overflow
2022 the target precision saturate, i.e. values greater than
2023 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2024 are mapped to INT_MIN. These semantics are allowed by the
2025 C and C++ standards that simply state that the behavior of
2026 FP-to-integer conversion is unspecified upon overflow. */
2027
2028 wide_int val;
2029 REAL_VALUE_TYPE r;
2030 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2031
2032 switch (code)
2033 {
2034 case FIX_TRUNC_EXPR:
2035 real_trunc (&r, VOIDmode, &x);
2036 break;
2037
2038 default:
2039 gcc_unreachable ();
2040 }
2041
2042 /* If R is NaN, return zero and show we have an overflow. */
2043 if (REAL_VALUE_ISNAN (r))
2044 {
2045 overflow = true;
2046 val = wi::zero (TYPE_PRECISION (type));
2047 }
2048
2049 /* See if R is less than the lower bound or greater than the
2050 upper bound. */
2051
2052 if (! overflow)
2053 {
2054 tree lt = TYPE_MIN_VALUE (type);
2055 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2056 if (real_less (&r, &l))
2057 {
2058 overflow = true;
2059 val = wi::to_wide (lt);
2060 }
2061 }
2062
2063 if (! overflow)
2064 {
2065 tree ut = TYPE_MAX_VALUE (type);
2066 if (ut)
2067 {
2068 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2069 if (real_less (&u, &r))
2070 {
2071 overflow = true;
2072 val = wi::to_wide (ut);
2073 }
2074 }
2075 }
2076
2077 if (! overflow)
2078 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2079
2080 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2081 return t;
2082 }
2083
2084 /* A subroutine of fold_convert_const handling conversions of a
2085 FIXED_CST to an integer type. */
2086
2087 static tree
2088 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2089 {
2090 tree t;
2091 double_int temp, temp_trunc;
2092 scalar_mode mode;
2093
2094 /* Right shift FIXED_CST to temp by fbit. */
2095 temp = TREE_FIXED_CST (arg1).data;
2096 mode = TREE_FIXED_CST (arg1).mode;
2097 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2098 {
2099 temp = temp.rshift (GET_MODE_FBIT (mode),
2100 HOST_BITS_PER_DOUBLE_INT,
2101 SIGNED_FIXED_POINT_MODE_P (mode));
2102
2103 /* Left shift temp to temp_trunc by fbit. */
2104 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2105 HOST_BITS_PER_DOUBLE_INT,
2106 SIGNED_FIXED_POINT_MODE_P (mode));
2107 }
2108 else
2109 {
2110 temp = double_int_zero;
2111 temp_trunc = double_int_zero;
2112 }
2113
2114 /* If FIXED_CST is negative, we need to round the value toward 0.
2115 By checking if the fractional bits are not zero to add 1 to temp. */
2116 if (SIGNED_FIXED_POINT_MODE_P (mode)
2117 && temp_trunc.is_negative ()
2118 && TREE_FIXED_CST (arg1).data != temp_trunc)
2119 temp += double_int_one;
2120
2121 /* Given a fixed-point constant, make new constant with new type,
2122 appropriately sign-extended or truncated. */
2123 t = force_fit_type (type, temp, -1,
2124 (temp.is_negative ()
2125 && (TYPE_UNSIGNED (type)
2126 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2127 | TREE_OVERFLOW (arg1));
2128
2129 return t;
2130 }
2131
2132 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2133 to another floating point type. */
2134
2135 static tree
2136 fold_convert_const_real_from_real (tree type, const_tree arg1)
2137 {
2138 REAL_VALUE_TYPE value;
2139 tree t;
2140
2141 /* Don't perform the operation if flag_signaling_nans is on
2142 and the operand is a signaling NaN. */
2143 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2144 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2145 return NULL_TREE;
2146
2147 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2148 t = build_real (type, value);
2149
2150 /* If converting an infinity or NAN to a representation that doesn't
2151 have one, set the overflow bit so that we can produce some kind of
2152 error message at the appropriate point if necessary. It's not the
2153 most user-friendly message, but it's better than nothing. */
2154 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2155 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2156 TREE_OVERFLOW (t) = 1;
2157 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2158 && !MODE_HAS_NANS (TYPE_MODE (type)))
2159 TREE_OVERFLOW (t) = 1;
2160 /* Regular overflow, conversion produced an infinity in a mode that
2161 can't represent them. */
2162 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2163 && REAL_VALUE_ISINF (value)
2164 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2165 TREE_OVERFLOW (t) = 1;
2166 else
2167 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2168 return t;
2169 }
2170
2171 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2172 to a floating point type. */
2173
2174 static tree
2175 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2176 {
2177 REAL_VALUE_TYPE value;
2178 tree t;
2179
2180 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2181 &TREE_FIXED_CST (arg1));
2182 t = build_real (type, value);
2183
2184 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2185 return t;
2186 }
2187
2188 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2189 to another fixed-point type. */
2190
2191 static tree
2192 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2193 {
2194 FIXED_VALUE_TYPE value;
2195 tree t;
2196 bool overflow_p;
2197
2198 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2199 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2200 t = build_fixed (type, value);
2201
2202 /* Propagate overflow flags. */
2203 if (overflow_p | TREE_OVERFLOW (arg1))
2204 TREE_OVERFLOW (t) = 1;
2205 return t;
2206 }
2207
2208 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2209 to a fixed-point type. */
2210
2211 static tree
2212 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2213 {
2214 FIXED_VALUE_TYPE value;
2215 tree t;
2216 bool overflow_p;
2217 double_int di;
2218
2219 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2220
2221 di.low = TREE_INT_CST_ELT (arg1, 0);
2222 if (TREE_INT_CST_NUNITS (arg1) == 1)
2223 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2224 else
2225 di.high = TREE_INT_CST_ELT (arg1, 1);
2226
2227 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2228 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2229 TYPE_SATURATING (type));
2230 t = build_fixed (type, value);
2231
2232 /* Propagate overflow flags. */
2233 if (overflow_p | TREE_OVERFLOW (arg1))
2234 TREE_OVERFLOW (t) = 1;
2235 return t;
2236 }
2237
2238 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2239 to a fixed-point type. */
2240
2241 static tree
2242 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2243 {
2244 FIXED_VALUE_TYPE value;
2245 tree t;
2246 bool overflow_p;
2247
2248 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2249 &TREE_REAL_CST (arg1),
2250 TYPE_SATURATING (type));
2251 t = build_fixed (type, value);
2252
2253 /* Propagate overflow flags. */
2254 if (overflow_p | TREE_OVERFLOW (arg1))
2255 TREE_OVERFLOW (t) = 1;
2256 return t;
2257 }
2258
2259 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2260 type TYPE. If no simplification can be done return NULL_TREE. */
2261
2262 static tree
2263 fold_convert_const (enum tree_code code, tree type, tree arg1)
2264 {
2265 tree arg_type = TREE_TYPE (arg1);
2266 if (arg_type == type)
2267 return arg1;
2268
2269 /* We can't widen types, since the runtime value could overflow the
2270 original type before being extended to the new type. */
2271 if (POLY_INT_CST_P (arg1)
2272 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2273 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2274 return build_poly_int_cst (type,
2275 poly_wide_int::from (poly_int_cst_value (arg1),
2276 TYPE_PRECISION (type),
2277 TYPE_SIGN (arg_type)));
2278
2279 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2280 || TREE_CODE (type) == OFFSET_TYPE)
2281 {
2282 if (TREE_CODE (arg1) == INTEGER_CST)
2283 return fold_convert_const_int_from_int (type, arg1);
2284 else if (TREE_CODE (arg1) == REAL_CST)
2285 return fold_convert_const_int_from_real (code, type, arg1);
2286 else if (TREE_CODE (arg1) == FIXED_CST)
2287 return fold_convert_const_int_from_fixed (type, arg1);
2288 }
2289 else if (TREE_CODE (type) == REAL_TYPE)
2290 {
2291 if (TREE_CODE (arg1) == INTEGER_CST)
2292 return build_real_from_int_cst (type, arg1);
2293 else if (TREE_CODE (arg1) == REAL_CST)
2294 return fold_convert_const_real_from_real (type, arg1);
2295 else if (TREE_CODE (arg1) == FIXED_CST)
2296 return fold_convert_const_real_from_fixed (type, arg1);
2297 }
2298 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2299 {
2300 if (TREE_CODE (arg1) == FIXED_CST)
2301 return fold_convert_const_fixed_from_fixed (type, arg1);
2302 else if (TREE_CODE (arg1) == INTEGER_CST)
2303 return fold_convert_const_fixed_from_int (type, arg1);
2304 else if (TREE_CODE (arg1) == REAL_CST)
2305 return fold_convert_const_fixed_from_real (type, arg1);
2306 }
2307 else if (TREE_CODE (type) == VECTOR_TYPE)
2308 {
2309 if (TREE_CODE (arg1) == VECTOR_CST
2310 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2311 {
2312 tree elttype = TREE_TYPE (type);
2313 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2314 /* We can't handle steps directly when extending, since the
2315 values need to wrap at the original precision first. */
2316 bool step_ok_p
2317 = (INTEGRAL_TYPE_P (elttype)
2318 && INTEGRAL_TYPE_P (arg1_elttype)
2319 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2320 tree_vector_builder v;
2321 if (!v.new_unary_operation (type, arg1, step_ok_p))
2322 return NULL_TREE;
2323 unsigned int len = v.encoded_nelts ();
2324 for (unsigned int i = 0; i < len; ++i)
2325 {
2326 tree elt = VECTOR_CST_ELT (arg1, i);
2327 tree cvt = fold_convert_const (code, elttype, elt);
2328 if (cvt == NULL_TREE)
2329 return NULL_TREE;
2330 v.quick_push (cvt);
2331 }
2332 return v.build ();
2333 }
2334 }
2335 return NULL_TREE;
2336 }
2337
2338 /* Construct a vector of zero elements of vector type TYPE. */
2339
2340 static tree
2341 build_zero_vector (tree type)
2342 {
2343 tree t;
2344
2345 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2346 return build_vector_from_val (type, t);
2347 }
2348
2349 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2350
2351 bool
2352 fold_convertible_p (const_tree type, const_tree arg)
2353 {
2354 tree orig = TREE_TYPE (arg);
2355
2356 if (type == orig)
2357 return true;
2358
2359 if (TREE_CODE (arg) == ERROR_MARK
2360 || TREE_CODE (type) == ERROR_MARK
2361 || TREE_CODE (orig) == ERROR_MARK)
2362 return false;
2363
2364 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2365 return true;
2366
2367 switch (TREE_CODE (type))
2368 {
2369 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2370 case POINTER_TYPE: case REFERENCE_TYPE:
2371 case OFFSET_TYPE:
2372 return (INTEGRAL_TYPE_P (orig)
2373 || (POINTER_TYPE_P (orig)
2374 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2375 || TREE_CODE (orig) == OFFSET_TYPE);
2376
2377 case REAL_TYPE:
2378 case FIXED_POINT_TYPE:
2379 case VECTOR_TYPE:
2380 case VOID_TYPE:
2381 return TREE_CODE (type) == TREE_CODE (orig);
2382
2383 default:
2384 return false;
2385 }
2386 }
2387
2388 /* Convert expression ARG to type TYPE. Used by the middle-end for
2389 simple conversions in preference to calling the front-end's convert. */
2390
2391 tree
2392 fold_convert_loc (location_t loc, tree type, tree arg)
2393 {
2394 tree orig = TREE_TYPE (arg);
2395 tree tem;
2396
2397 if (type == orig)
2398 return arg;
2399
2400 if (TREE_CODE (arg) == ERROR_MARK
2401 || TREE_CODE (type) == ERROR_MARK
2402 || TREE_CODE (orig) == ERROR_MARK)
2403 return error_mark_node;
2404
2405 switch (TREE_CODE (type))
2406 {
2407 case POINTER_TYPE:
2408 case REFERENCE_TYPE:
2409 /* Handle conversions between pointers to different address spaces. */
2410 if (POINTER_TYPE_P (orig)
2411 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2412 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2413 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2414 /* fall through */
2415
2416 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2417 case OFFSET_TYPE:
2418 if (TREE_CODE (arg) == INTEGER_CST)
2419 {
2420 tem = fold_convert_const (NOP_EXPR, type, arg);
2421 if (tem != NULL_TREE)
2422 return tem;
2423 }
2424 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2425 || TREE_CODE (orig) == OFFSET_TYPE)
2426 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2427 if (TREE_CODE (orig) == COMPLEX_TYPE)
2428 return fold_convert_loc (loc, type,
2429 fold_build1_loc (loc, REALPART_EXPR,
2430 TREE_TYPE (orig), arg));
2431 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2432 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2433 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2434
2435 case REAL_TYPE:
2436 if (TREE_CODE (arg) == INTEGER_CST)
2437 {
2438 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2439 if (tem != NULL_TREE)
2440 return tem;
2441 }
2442 else if (TREE_CODE (arg) == REAL_CST)
2443 {
2444 tem = fold_convert_const (NOP_EXPR, type, arg);
2445 if (tem != NULL_TREE)
2446 return tem;
2447 }
2448 else if (TREE_CODE (arg) == FIXED_CST)
2449 {
2450 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2451 if (tem != NULL_TREE)
2452 return tem;
2453 }
2454
2455 switch (TREE_CODE (orig))
2456 {
2457 case INTEGER_TYPE:
2458 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2459 case POINTER_TYPE: case REFERENCE_TYPE:
2460 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2461
2462 case REAL_TYPE:
2463 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2464
2465 case FIXED_POINT_TYPE:
2466 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2467
2468 case COMPLEX_TYPE:
2469 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2470 return fold_convert_loc (loc, type, tem);
2471
2472 default:
2473 gcc_unreachable ();
2474 }
2475
2476 case FIXED_POINT_TYPE:
2477 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2478 || TREE_CODE (arg) == REAL_CST)
2479 {
2480 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2481 if (tem != NULL_TREE)
2482 goto fold_convert_exit;
2483 }
2484
2485 switch (TREE_CODE (orig))
2486 {
2487 case FIXED_POINT_TYPE:
2488 case INTEGER_TYPE:
2489 case ENUMERAL_TYPE:
2490 case BOOLEAN_TYPE:
2491 case REAL_TYPE:
2492 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2493
2494 case COMPLEX_TYPE:
2495 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2496 return fold_convert_loc (loc, type, tem);
2497
2498 default:
2499 gcc_unreachable ();
2500 }
2501
2502 case COMPLEX_TYPE:
2503 switch (TREE_CODE (orig))
2504 {
2505 case INTEGER_TYPE:
2506 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2507 case POINTER_TYPE: case REFERENCE_TYPE:
2508 case REAL_TYPE:
2509 case FIXED_POINT_TYPE:
2510 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2511 fold_convert_loc (loc, TREE_TYPE (type), arg),
2512 fold_convert_loc (loc, TREE_TYPE (type),
2513 integer_zero_node));
2514 case COMPLEX_TYPE:
2515 {
2516 tree rpart, ipart;
2517
2518 if (TREE_CODE (arg) == COMPLEX_EXPR)
2519 {
2520 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2521 TREE_OPERAND (arg, 0));
2522 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2523 TREE_OPERAND (arg, 1));
2524 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2525 }
2526
2527 arg = save_expr (arg);
2528 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2529 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2530 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2531 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2532 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2533 }
2534
2535 default:
2536 gcc_unreachable ();
2537 }
2538
2539 case VECTOR_TYPE:
2540 if (integer_zerop (arg))
2541 return build_zero_vector (type);
2542 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2543 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2544 || TREE_CODE (orig) == VECTOR_TYPE);
2545 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2546
2547 case VOID_TYPE:
2548 tem = fold_ignored_result (arg);
2549 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2550
2551 default:
2552 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2553 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2554 gcc_unreachable ();
2555 }
2556 fold_convert_exit:
2557 protected_set_expr_location_unshare (tem, loc);
2558 return tem;
2559 }
2560 \f
2561 /* Return false if expr can be assumed not to be an lvalue, true
2562 otherwise. */
2563
2564 static bool
2565 maybe_lvalue_p (const_tree x)
2566 {
2567 /* We only need to wrap lvalue tree codes. */
2568 switch (TREE_CODE (x))
2569 {
2570 case VAR_DECL:
2571 case PARM_DECL:
2572 case RESULT_DECL:
2573 case LABEL_DECL:
2574 case FUNCTION_DECL:
2575 case SSA_NAME:
2576
2577 case COMPONENT_REF:
2578 case MEM_REF:
2579 case INDIRECT_REF:
2580 case ARRAY_REF:
2581 case ARRAY_RANGE_REF:
2582 case BIT_FIELD_REF:
2583 case OBJ_TYPE_REF:
2584
2585 case REALPART_EXPR:
2586 case IMAGPART_EXPR:
2587 case PREINCREMENT_EXPR:
2588 case PREDECREMENT_EXPR:
2589 case SAVE_EXPR:
2590 case TRY_CATCH_EXPR:
2591 case WITH_CLEANUP_EXPR:
2592 case COMPOUND_EXPR:
2593 case MODIFY_EXPR:
2594 case TARGET_EXPR:
2595 case COND_EXPR:
2596 case BIND_EXPR:
2597 break;
2598
2599 default:
2600 /* Assume the worst for front-end tree codes. */
2601 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2602 break;
2603 return false;
2604 }
2605
2606 return true;
2607 }
2608
2609 /* Return an expr equal to X but certainly not valid as an lvalue. */
2610
2611 tree
2612 non_lvalue_loc (location_t loc, tree x)
2613 {
2614 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2615 us. */
2616 if (in_gimple_form)
2617 return x;
2618
2619 if (! maybe_lvalue_p (x))
2620 return x;
2621 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2622 }
2623
2624 /* When pedantic, return an expr equal to X but certainly not valid as a
2625 pedantic lvalue. Otherwise, return X. */
2626
2627 static tree
2628 pedantic_non_lvalue_loc (location_t loc, tree x)
2629 {
2630 return protected_set_expr_location_unshare (x, loc);
2631 }
2632 \f
2633 /* Given a tree comparison code, return the code that is the logical inverse.
2634 It is generally not safe to do this for floating-point comparisons, except
2635 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2636 ERROR_MARK in this case. */
2637
2638 enum tree_code
2639 invert_tree_comparison (enum tree_code code, bool honor_nans)
2640 {
2641 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2642 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2643 return ERROR_MARK;
2644
2645 switch (code)
2646 {
2647 case EQ_EXPR:
2648 return NE_EXPR;
2649 case NE_EXPR:
2650 return EQ_EXPR;
2651 case GT_EXPR:
2652 return honor_nans ? UNLE_EXPR : LE_EXPR;
2653 case GE_EXPR:
2654 return honor_nans ? UNLT_EXPR : LT_EXPR;
2655 case LT_EXPR:
2656 return honor_nans ? UNGE_EXPR : GE_EXPR;
2657 case LE_EXPR:
2658 return honor_nans ? UNGT_EXPR : GT_EXPR;
2659 case LTGT_EXPR:
2660 return UNEQ_EXPR;
2661 case UNEQ_EXPR:
2662 return LTGT_EXPR;
2663 case UNGT_EXPR:
2664 return LE_EXPR;
2665 case UNGE_EXPR:
2666 return LT_EXPR;
2667 case UNLT_EXPR:
2668 return GE_EXPR;
2669 case UNLE_EXPR:
2670 return GT_EXPR;
2671 case ORDERED_EXPR:
2672 return UNORDERED_EXPR;
2673 case UNORDERED_EXPR:
2674 return ORDERED_EXPR;
2675 default:
2676 gcc_unreachable ();
2677 }
2678 }
2679
2680 /* Similar, but return the comparison that results if the operands are
2681 swapped. This is safe for floating-point. */
2682
2683 enum tree_code
2684 swap_tree_comparison (enum tree_code code)
2685 {
2686 switch (code)
2687 {
2688 case EQ_EXPR:
2689 case NE_EXPR:
2690 case ORDERED_EXPR:
2691 case UNORDERED_EXPR:
2692 case LTGT_EXPR:
2693 case UNEQ_EXPR:
2694 return code;
2695 case GT_EXPR:
2696 return LT_EXPR;
2697 case GE_EXPR:
2698 return LE_EXPR;
2699 case LT_EXPR:
2700 return GT_EXPR;
2701 case LE_EXPR:
2702 return GE_EXPR;
2703 case UNGT_EXPR:
2704 return UNLT_EXPR;
2705 case UNGE_EXPR:
2706 return UNLE_EXPR;
2707 case UNLT_EXPR:
2708 return UNGT_EXPR;
2709 case UNLE_EXPR:
2710 return UNGE_EXPR;
2711 default:
2712 gcc_unreachable ();
2713 }
2714 }
2715
2716
2717 /* Convert a comparison tree code from an enum tree_code representation
2718 into a compcode bit-based encoding. This function is the inverse of
2719 compcode_to_comparison. */
2720
2721 static enum comparison_code
2722 comparison_to_compcode (enum tree_code code)
2723 {
2724 switch (code)
2725 {
2726 case LT_EXPR:
2727 return COMPCODE_LT;
2728 case EQ_EXPR:
2729 return COMPCODE_EQ;
2730 case LE_EXPR:
2731 return COMPCODE_LE;
2732 case GT_EXPR:
2733 return COMPCODE_GT;
2734 case NE_EXPR:
2735 return COMPCODE_NE;
2736 case GE_EXPR:
2737 return COMPCODE_GE;
2738 case ORDERED_EXPR:
2739 return COMPCODE_ORD;
2740 case UNORDERED_EXPR:
2741 return COMPCODE_UNORD;
2742 case UNLT_EXPR:
2743 return COMPCODE_UNLT;
2744 case UNEQ_EXPR:
2745 return COMPCODE_UNEQ;
2746 case UNLE_EXPR:
2747 return COMPCODE_UNLE;
2748 case UNGT_EXPR:
2749 return COMPCODE_UNGT;
2750 case LTGT_EXPR:
2751 return COMPCODE_LTGT;
2752 case UNGE_EXPR:
2753 return COMPCODE_UNGE;
2754 default:
2755 gcc_unreachable ();
2756 }
2757 }
2758
2759 /* Convert a compcode bit-based encoding of a comparison operator back
2760 to GCC's enum tree_code representation. This function is the
2761 inverse of comparison_to_compcode. */
2762
2763 static enum tree_code
2764 compcode_to_comparison (enum comparison_code code)
2765 {
2766 switch (code)
2767 {
2768 case COMPCODE_LT:
2769 return LT_EXPR;
2770 case COMPCODE_EQ:
2771 return EQ_EXPR;
2772 case COMPCODE_LE:
2773 return LE_EXPR;
2774 case COMPCODE_GT:
2775 return GT_EXPR;
2776 case COMPCODE_NE:
2777 return NE_EXPR;
2778 case COMPCODE_GE:
2779 return GE_EXPR;
2780 case COMPCODE_ORD:
2781 return ORDERED_EXPR;
2782 case COMPCODE_UNORD:
2783 return UNORDERED_EXPR;
2784 case COMPCODE_UNLT:
2785 return UNLT_EXPR;
2786 case COMPCODE_UNEQ:
2787 return UNEQ_EXPR;
2788 case COMPCODE_UNLE:
2789 return UNLE_EXPR;
2790 case COMPCODE_UNGT:
2791 return UNGT_EXPR;
2792 case COMPCODE_LTGT:
2793 return LTGT_EXPR;
2794 case COMPCODE_UNGE:
2795 return UNGE_EXPR;
2796 default:
2797 gcc_unreachable ();
2798 }
2799 }
2800
2801 /* Return true if COND1 tests the opposite condition of COND2. */
2802
2803 bool
2804 inverse_conditions_p (const_tree cond1, const_tree cond2)
2805 {
2806 return (COMPARISON_CLASS_P (cond1)
2807 && COMPARISON_CLASS_P (cond2)
2808 && (invert_tree_comparison
2809 (TREE_CODE (cond1),
2810 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2811 && operand_equal_p (TREE_OPERAND (cond1, 0),
2812 TREE_OPERAND (cond2, 0), 0)
2813 && operand_equal_p (TREE_OPERAND (cond1, 1),
2814 TREE_OPERAND (cond2, 1), 0));
2815 }
2816
2817 /* Return a tree for the comparison which is the combination of
2818 doing the AND or OR (depending on CODE) of the two operations LCODE
2819 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2820 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2821 if this makes the transformation invalid. */
2822
2823 tree
2824 combine_comparisons (location_t loc,
2825 enum tree_code code, enum tree_code lcode,
2826 enum tree_code rcode, tree truth_type,
2827 tree ll_arg, tree lr_arg)
2828 {
2829 bool honor_nans = HONOR_NANS (ll_arg);
2830 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2831 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2832 int compcode;
2833
2834 switch (code)
2835 {
2836 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2837 compcode = lcompcode & rcompcode;
2838 break;
2839
2840 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2841 compcode = lcompcode | rcompcode;
2842 break;
2843
2844 default:
2845 return NULL_TREE;
2846 }
2847
2848 if (!honor_nans)
2849 {
2850 /* Eliminate unordered comparisons, as well as LTGT and ORD
2851 which are not used unless the mode has NaNs. */
2852 compcode &= ~COMPCODE_UNORD;
2853 if (compcode == COMPCODE_LTGT)
2854 compcode = COMPCODE_NE;
2855 else if (compcode == COMPCODE_ORD)
2856 compcode = COMPCODE_TRUE;
2857 }
2858 else if (flag_trapping_math)
2859 {
2860 /* Check that the original operation and the optimized ones will trap
2861 under the same condition. */
2862 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2863 && (lcompcode != COMPCODE_EQ)
2864 && (lcompcode != COMPCODE_ORD);
2865 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2866 && (rcompcode != COMPCODE_EQ)
2867 && (rcompcode != COMPCODE_ORD);
2868 bool trap = (compcode & COMPCODE_UNORD) == 0
2869 && (compcode != COMPCODE_EQ)
2870 && (compcode != COMPCODE_ORD);
2871
2872 /* In a short-circuited boolean expression the LHS might be
2873 such that the RHS, if evaluated, will never trap. For
2874 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2875 if neither x nor y is NaN. (This is a mixed blessing: for
2876 example, the expression above will never trap, hence
2877 optimizing it to x < y would be invalid). */
2878 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2879 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2880 rtrap = false;
2881
2882 /* If the comparison was short-circuited, and only the RHS
2883 trapped, we may now generate a spurious trap. */
2884 if (rtrap && !ltrap
2885 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2886 return NULL_TREE;
2887
2888 /* If we changed the conditions that cause a trap, we lose. */
2889 if ((ltrap || rtrap) != trap)
2890 return NULL_TREE;
2891 }
2892
2893 if (compcode == COMPCODE_TRUE)
2894 return constant_boolean_node (true, truth_type);
2895 else if (compcode == COMPCODE_FALSE)
2896 return constant_boolean_node (false, truth_type);
2897 else
2898 {
2899 enum tree_code tcode;
2900
2901 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2902 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2903 }
2904 }
2905 \f
2906 /* Return nonzero if two operands (typically of the same tree node)
2907 are necessarily equal. FLAGS modifies behavior as follows:
2908
2909 If OEP_ONLY_CONST is set, only return nonzero for constants.
2910 This function tests whether the operands are indistinguishable;
2911 it does not test whether they are equal using C's == operation.
2912 The distinction is important for IEEE floating point, because
2913 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2914 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2915
2916 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2917 even though it may hold multiple values during a function.
2918 This is because a GCC tree node guarantees that nothing else is
2919 executed between the evaluation of its "operands" (which may often
2920 be evaluated in arbitrary order). Hence if the operands themselves
2921 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2922 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2923 unset means assuming isochronic (or instantaneous) tree equivalence.
2924 Unless comparing arbitrary expression trees, such as from different
2925 statements, this flag can usually be left unset.
2926
2927 If OEP_PURE_SAME is set, then pure functions with identical arguments
2928 are considered the same. It is used when the caller has other ways
2929 to ensure that global memory is unchanged in between.
2930
2931 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2932 not values of expressions.
2933
2934 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2935 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2936
2937 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2938 any operand with side effect. This is unnecesarily conservative in the
2939 case we know that arg0 and arg1 are in disjoint code paths (such as in
2940 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2941 addresses with TREE_CONSTANT flag set so we know that &var == &var
2942 even if var is volatile. */
2943
2944 bool
2945 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
2946 unsigned int flags)
2947 {
2948 bool r;
2949 if (verify_hash_value (arg0, arg1, flags, &r))
2950 return r;
2951
2952 STRIP_ANY_LOCATION_WRAPPER (arg0);
2953 STRIP_ANY_LOCATION_WRAPPER (arg1);
2954
2955 /* If either is ERROR_MARK, they aren't equal. */
2956 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2957 || TREE_TYPE (arg0) == error_mark_node
2958 || TREE_TYPE (arg1) == error_mark_node)
2959 return false;
2960
2961 /* Similar, if either does not have a type (like a template id),
2962 they aren't equal. */
2963 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2964 return false;
2965
2966 /* We cannot consider pointers to different address space equal. */
2967 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2968 && POINTER_TYPE_P (TREE_TYPE (arg1))
2969 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2970 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2971 return false;
2972
2973 /* Check equality of integer constants before bailing out due to
2974 precision differences. */
2975 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2976 {
2977 /* Address of INTEGER_CST is not defined; check that we did not forget
2978 to drop the OEP_ADDRESS_OF flags. */
2979 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2980 return tree_int_cst_equal (arg0, arg1);
2981 }
2982
2983 if (!(flags & OEP_ADDRESS_OF))
2984 {
2985 /* If both types don't have the same signedness, then we can't consider
2986 them equal. We must check this before the STRIP_NOPS calls
2987 because they may change the signedness of the arguments. As pointers
2988 strictly don't have a signedness, require either two pointers or
2989 two non-pointers as well. */
2990 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2991 || POINTER_TYPE_P (TREE_TYPE (arg0))
2992 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2993 return false;
2994
2995 /* If both types don't have the same precision, then it is not safe
2996 to strip NOPs. */
2997 if (element_precision (TREE_TYPE (arg0))
2998 != element_precision (TREE_TYPE (arg1)))
2999 return false;
3000
3001 STRIP_NOPS (arg0);
3002 STRIP_NOPS (arg1);
3003 }
3004 #if 0
3005 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3006 sanity check once the issue is solved. */
3007 else
3008 /* Addresses of conversions and SSA_NAMEs (and many other things)
3009 are not defined. Check that we did not forget to drop the
3010 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3011 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3012 && TREE_CODE (arg0) != SSA_NAME);
3013 #endif
3014
3015 /* In case both args are comparisons but with different comparison
3016 code, try to swap the comparison operands of one arg to produce
3017 a match and compare that variant. */
3018 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3019 && COMPARISON_CLASS_P (arg0)
3020 && COMPARISON_CLASS_P (arg1))
3021 {
3022 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3023
3024 if (TREE_CODE (arg0) == swap_code)
3025 return operand_equal_p (TREE_OPERAND (arg0, 0),
3026 TREE_OPERAND (arg1, 1), flags)
3027 && operand_equal_p (TREE_OPERAND (arg0, 1),
3028 TREE_OPERAND (arg1, 0), flags);
3029 }
3030
3031 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3032 {
3033 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3034 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3035 ;
3036 else if (flags & OEP_ADDRESS_OF)
3037 {
3038 /* If we are interested in comparing addresses ignore
3039 MEM_REF wrappings of the base that can appear just for
3040 TBAA reasons. */
3041 if (TREE_CODE (arg0) == MEM_REF
3042 && DECL_P (arg1)
3043 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3044 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3045 && integer_zerop (TREE_OPERAND (arg0, 1)))
3046 return true;
3047 else if (TREE_CODE (arg1) == MEM_REF
3048 && DECL_P (arg0)
3049 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3050 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3051 && integer_zerop (TREE_OPERAND (arg1, 1)))
3052 return true;
3053 return false;
3054 }
3055 else
3056 return false;
3057 }
3058
3059 /* When not checking adddresses, this is needed for conversions and for
3060 COMPONENT_REF. Might as well play it safe and always test this. */
3061 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3062 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3063 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3064 && !(flags & OEP_ADDRESS_OF)))
3065 return false;
3066
3067 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3068 We don't care about side effects in that case because the SAVE_EXPR
3069 takes care of that for us. In all other cases, two expressions are
3070 equal if they have no side effects. If we have two identical
3071 expressions with side effects that should be treated the same due
3072 to the only side effects being identical SAVE_EXPR's, that will
3073 be detected in the recursive calls below.
3074 If we are taking an invariant address of two identical objects
3075 they are necessarily equal as well. */
3076 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3077 && (TREE_CODE (arg0) == SAVE_EXPR
3078 || (flags & OEP_MATCH_SIDE_EFFECTS)
3079 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3080 return true;
3081
3082 /* Next handle constant cases, those for which we can return 1 even
3083 if ONLY_CONST is set. */
3084 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3085 switch (TREE_CODE (arg0))
3086 {
3087 case INTEGER_CST:
3088 return tree_int_cst_equal (arg0, arg1);
3089
3090 case FIXED_CST:
3091 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3092 TREE_FIXED_CST (arg1));
3093
3094 case REAL_CST:
3095 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3096 return true;
3097
3098
3099 if (!HONOR_SIGNED_ZEROS (arg0))
3100 {
3101 /* If we do not distinguish between signed and unsigned zero,
3102 consider them equal. */
3103 if (real_zerop (arg0) && real_zerop (arg1))
3104 return true;
3105 }
3106 return false;
3107
3108 case VECTOR_CST:
3109 {
3110 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3111 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3112 return false;
3113
3114 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3115 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3116 return false;
3117
3118 unsigned int count = vector_cst_encoded_nelts (arg0);
3119 for (unsigned int i = 0; i < count; ++i)
3120 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3121 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3122 return false;
3123 return true;
3124 }
3125
3126 case COMPLEX_CST:
3127 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3128 flags)
3129 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3130 flags));
3131
3132 case STRING_CST:
3133 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3134 && ! memcmp (TREE_STRING_POINTER (arg0),
3135 TREE_STRING_POINTER (arg1),
3136 TREE_STRING_LENGTH (arg0)));
3137
3138 case ADDR_EXPR:
3139 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3140 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3141 flags | OEP_ADDRESS_OF
3142 | OEP_MATCH_SIDE_EFFECTS);
3143 case CONSTRUCTOR:
3144 /* In GIMPLE empty constructors are allowed in initializers of
3145 aggregates. */
3146 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3147 default:
3148 break;
3149 }
3150
3151 if (flags & OEP_ONLY_CONST)
3152 return false;
3153
3154 /* Define macros to test an operand from arg0 and arg1 for equality and a
3155 variant that allows null and views null as being different from any
3156 non-null value. In the latter case, if either is null, the both
3157 must be; otherwise, do the normal comparison. */
3158 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3159 TREE_OPERAND (arg1, N), flags)
3160
3161 #define OP_SAME_WITH_NULL(N) \
3162 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3163 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3164
3165 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3166 {
3167 case tcc_unary:
3168 /* Two conversions are equal only if signedness and modes match. */
3169 switch (TREE_CODE (arg0))
3170 {
3171 CASE_CONVERT:
3172 case FIX_TRUNC_EXPR:
3173 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3174 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3175 return false;
3176 break;
3177 default:
3178 break;
3179 }
3180
3181 return OP_SAME (0);
3182
3183
3184 case tcc_comparison:
3185 case tcc_binary:
3186 if (OP_SAME (0) && OP_SAME (1))
3187 return true;
3188
3189 /* For commutative ops, allow the other order. */
3190 return (commutative_tree_code (TREE_CODE (arg0))
3191 && operand_equal_p (TREE_OPERAND (arg0, 0),
3192 TREE_OPERAND (arg1, 1), flags)
3193 && operand_equal_p (TREE_OPERAND (arg0, 1),
3194 TREE_OPERAND (arg1, 0), flags));
3195
3196 case tcc_reference:
3197 /* If either of the pointer (or reference) expressions we are
3198 dereferencing contain a side effect, these cannot be equal,
3199 but their addresses can be. */
3200 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3201 && (TREE_SIDE_EFFECTS (arg0)
3202 || TREE_SIDE_EFFECTS (arg1)))
3203 return false;
3204
3205 switch (TREE_CODE (arg0))
3206 {
3207 case INDIRECT_REF:
3208 if (!(flags & OEP_ADDRESS_OF))
3209 {
3210 if (TYPE_ALIGN (TREE_TYPE (arg0))
3211 != TYPE_ALIGN (TREE_TYPE (arg1)))
3212 return false;
3213 /* Verify that the access types are compatible. */
3214 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3215 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3216 return false;
3217 }
3218 flags &= ~OEP_ADDRESS_OF;
3219 return OP_SAME (0);
3220
3221 case IMAGPART_EXPR:
3222 /* Require the same offset. */
3223 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3224 TYPE_SIZE (TREE_TYPE (arg1)),
3225 flags & ~OEP_ADDRESS_OF))
3226 return false;
3227
3228 /* Fallthru. */
3229 case REALPART_EXPR:
3230 case VIEW_CONVERT_EXPR:
3231 return OP_SAME (0);
3232
3233 case TARGET_MEM_REF:
3234 case MEM_REF:
3235 if (!(flags & OEP_ADDRESS_OF))
3236 {
3237 /* Require equal access sizes */
3238 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3239 && (!TYPE_SIZE (TREE_TYPE (arg0))
3240 || !TYPE_SIZE (TREE_TYPE (arg1))
3241 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3242 TYPE_SIZE (TREE_TYPE (arg1)),
3243 flags)))
3244 return false;
3245 /* Verify that access happens in similar types. */
3246 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3247 return false;
3248 /* Verify that accesses are TBAA compatible. */
3249 if (!alias_ptr_types_compatible_p
3250 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3251 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3252 || (MR_DEPENDENCE_CLIQUE (arg0)
3253 != MR_DEPENDENCE_CLIQUE (arg1))
3254 || (MR_DEPENDENCE_BASE (arg0)
3255 != MR_DEPENDENCE_BASE (arg1)))
3256 return false;
3257 /* Verify that alignment is compatible. */
3258 if (TYPE_ALIGN (TREE_TYPE (arg0))
3259 != TYPE_ALIGN (TREE_TYPE (arg1)))
3260 return false;
3261 }
3262 flags &= ~OEP_ADDRESS_OF;
3263 return (OP_SAME (0) && OP_SAME (1)
3264 /* TARGET_MEM_REF require equal extra operands. */
3265 && (TREE_CODE (arg0) != TARGET_MEM_REF
3266 || (OP_SAME_WITH_NULL (2)
3267 && OP_SAME_WITH_NULL (3)
3268 && OP_SAME_WITH_NULL (4))));
3269
3270 case ARRAY_REF:
3271 case ARRAY_RANGE_REF:
3272 if (!OP_SAME (0))
3273 return false;
3274 flags &= ~OEP_ADDRESS_OF;
3275 /* Compare the array index by value if it is constant first as we
3276 may have different types but same value here. */
3277 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3278 TREE_OPERAND (arg1, 1))
3279 || OP_SAME (1))
3280 && OP_SAME_WITH_NULL (2)
3281 && OP_SAME_WITH_NULL (3)
3282 /* Compare low bound and element size as with OEP_ADDRESS_OF
3283 we have to account for the offset of the ref. */
3284 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3285 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3286 || (operand_equal_p (array_ref_low_bound
3287 (CONST_CAST_TREE (arg0)),
3288 array_ref_low_bound
3289 (CONST_CAST_TREE (arg1)), flags)
3290 && operand_equal_p (array_ref_element_size
3291 (CONST_CAST_TREE (arg0)),
3292 array_ref_element_size
3293 (CONST_CAST_TREE (arg1)),
3294 flags))));
3295
3296 case COMPONENT_REF:
3297 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3298 may be NULL when we're called to compare MEM_EXPRs. */
3299 if (!OP_SAME_WITH_NULL (0)
3300 || !OP_SAME (1))
3301 return false;
3302 flags &= ~OEP_ADDRESS_OF;
3303 return OP_SAME_WITH_NULL (2);
3304
3305 case BIT_FIELD_REF:
3306 if (!OP_SAME (0))
3307 return false;
3308 flags &= ~OEP_ADDRESS_OF;
3309 return OP_SAME (1) && OP_SAME (2);
3310
3311 /* Virtual table call. */
3312 case OBJ_TYPE_REF:
3313 {
3314 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3315 OBJ_TYPE_REF_EXPR (arg1), flags))
3316 return false;
3317 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3318 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3319 return false;
3320 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3321 OBJ_TYPE_REF_OBJECT (arg1), flags))
3322 return false;
3323 if (!types_same_for_odr (obj_type_ref_class (arg0),
3324 obj_type_ref_class (arg1)))
3325 return false;
3326 return true;
3327 }
3328
3329 default:
3330 return false;
3331 }
3332
3333 case tcc_expression:
3334 switch (TREE_CODE (arg0))
3335 {
3336 case ADDR_EXPR:
3337 /* Be sure we pass right ADDRESS_OF flag. */
3338 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3339 return operand_equal_p (TREE_OPERAND (arg0, 0),
3340 TREE_OPERAND (arg1, 0),
3341 flags | OEP_ADDRESS_OF);
3342
3343 case TRUTH_NOT_EXPR:
3344 return OP_SAME (0);
3345
3346 case TRUTH_ANDIF_EXPR:
3347 case TRUTH_ORIF_EXPR:
3348 return OP_SAME (0) && OP_SAME (1);
3349
3350 case WIDEN_MULT_PLUS_EXPR:
3351 case WIDEN_MULT_MINUS_EXPR:
3352 if (!OP_SAME (2))
3353 return false;
3354 /* The multiplcation operands are commutative. */
3355 /* FALLTHRU */
3356
3357 case TRUTH_AND_EXPR:
3358 case TRUTH_OR_EXPR:
3359 case TRUTH_XOR_EXPR:
3360 if (OP_SAME (0) && OP_SAME (1))
3361 return true;
3362
3363 /* Otherwise take into account this is a commutative operation. */
3364 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3365 TREE_OPERAND (arg1, 1), flags)
3366 && operand_equal_p (TREE_OPERAND (arg0, 1),
3367 TREE_OPERAND (arg1, 0), flags));
3368
3369 case COND_EXPR:
3370 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3371 return false;
3372 flags &= ~OEP_ADDRESS_OF;
3373 return OP_SAME (0);
3374
3375 case BIT_INSERT_EXPR:
3376 /* BIT_INSERT_EXPR has an implict operand as the type precision
3377 of op1. Need to check to make sure they are the same. */
3378 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3379 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3380 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3381 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3382 return false;
3383 /* FALLTHRU */
3384
3385 case VEC_COND_EXPR:
3386 case DOT_PROD_EXPR:
3387 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3388
3389 case MODIFY_EXPR:
3390 case INIT_EXPR:
3391 case COMPOUND_EXPR:
3392 case PREDECREMENT_EXPR:
3393 case PREINCREMENT_EXPR:
3394 case POSTDECREMENT_EXPR:
3395 case POSTINCREMENT_EXPR:
3396 if (flags & OEP_LEXICOGRAPHIC)
3397 return OP_SAME (0) && OP_SAME (1);
3398 return false;
3399
3400 case CLEANUP_POINT_EXPR:
3401 case EXPR_STMT:
3402 case SAVE_EXPR:
3403 if (flags & OEP_LEXICOGRAPHIC)
3404 return OP_SAME (0);
3405 return false;
3406
3407 default:
3408 return false;
3409 }
3410
3411 case tcc_vl_exp:
3412 switch (TREE_CODE (arg0))
3413 {
3414 case CALL_EXPR:
3415 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3416 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3417 /* If not both CALL_EXPRs are either internal or normal function
3418 functions, then they are not equal. */
3419 return false;
3420 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3421 {
3422 /* If the CALL_EXPRs call different internal functions, then they
3423 are not equal. */
3424 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3425 return false;
3426 }
3427 else
3428 {
3429 /* If the CALL_EXPRs call different functions, then they are not
3430 equal. */
3431 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3432 flags))
3433 return false;
3434 }
3435
3436 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3437 {
3438 unsigned int cef = call_expr_flags (arg0);
3439 if (flags & OEP_PURE_SAME)
3440 cef &= ECF_CONST | ECF_PURE;
3441 else
3442 cef &= ECF_CONST;
3443 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3444 return false;
3445 }
3446
3447 /* Now see if all the arguments are the same. */
3448 {
3449 const_call_expr_arg_iterator iter0, iter1;
3450 const_tree a0, a1;
3451 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3452 a1 = first_const_call_expr_arg (arg1, &iter1);
3453 a0 && a1;
3454 a0 = next_const_call_expr_arg (&iter0),
3455 a1 = next_const_call_expr_arg (&iter1))
3456 if (! operand_equal_p (a0, a1, flags))
3457 return false;
3458
3459 /* If we get here and both argument lists are exhausted
3460 then the CALL_EXPRs are equal. */
3461 return ! (a0 || a1);
3462 }
3463 default:
3464 return false;
3465 }
3466
3467 case tcc_declaration:
3468 /* Consider __builtin_sqrt equal to sqrt. */
3469 return (TREE_CODE (arg0) == FUNCTION_DECL
3470 && fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3471 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3472 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3473 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3474
3475 case tcc_exceptional:
3476 if (TREE_CODE (arg0) == CONSTRUCTOR)
3477 {
3478 /* In GIMPLE constructors are used only to build vectors from
3479 elements. Individual elements in the constructor must be
3480 indexed in increasing order and form an initial sequence.
3481
3482 We make no effort to compare constructors in generic.
3483 (see sem_variable::equals in ipa-icf which can do so for
3484 constants). */
3485 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3486 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3487 return false;
3488
3489 /* Be sure that vectors constructed have the same representation.
3490 We only tested element precision and modes to match.
3491 Vectors may be BLKmode and thus also check that the number of
3492 parts match. */
3493 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3494 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3495 return false;
3496
3497 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3498 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3499 unsigned int len = vec_safe_length (v0);
3500
3501 if (len != vec_safe_length (v1))
3502 return false;
3503
3504 for (unsigned int i = 0; i < len; i++)
3505 {
3506 constructor_elt *c0 = &(*v0)[i];
3507 constructor_elt *c1 = &(*v1)[i];
3508
3509 if (!operand_equal_p (c0->value, c1->value, flags)
3510 /* In GIMPLE the indexes can be either NULL or matching i.
3511 Double check this so we won't get false
3512 positives for GENERIC. */
3513 || (c0->index
3514 && (TREE_CODE (c0->index) != INTEGER_CST
3515 || compare_tree_int (c0->index, i)))
3516 || (c1->index
3517 && (TREE_CODE (c1->index) != INTEGER_CST
3518 || compare_tree_int (c1->index, i))))
3519 return false;
3520 }
3521 return true;
3522 }
3523 else if (TREE_CODE (arg0) == STATEMENT_LIST
3524 && (flags & OEP_LEXICOGRAPHIC))
3525 {
3526 /* Compare the STATEMENT_LISTs. */
3527 tree_stmt_iterator tsi1, tsi2;
3528 tree body1 = CONST_CAST_TREE (arg0);
3529 tree body2 = CONST_CAST_TREE (arg1);
3530 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3531 tsi_next (&tsi1), tsi_next (&tsi2))
3532 {
3533 /* The lists don't have the same number of statements. */
3534 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3535 return false;
3536 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3537 return true;
3538 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3539 flags & (OEP_LEXICOGRAPHIC
3540 | OEP_NO_HASH_CHECK)))
3541 return false;
3542 }
3543 }
3544 return false;
3545
3546 case tcc_statement:
3547 switch (TREE_CODE (arg0))
3548 {
3549 case RETURN_EXPR:
3550 if (flags & OEP_LEXICOGRAPHIC)
3551 return OP_SAME_WITH_NULL (0);
3552 return false;
3553 case DEBUG_BEGIN_STMT:
3554 if (flags & OEP_LEXICOGRAPHIC)
3555 return true;
3556 return false;
3557 default:
3558 return false;
3559 }
3560
3561 default:
3562 return false;
3563 }
3564
3565 #undef OP_SAME
3566 #undef OP_SAME_WITH_NULL
3567 }
3568
3569 /* Generate a hash value for an expression. This can be used iteratively
3570 by passing a previous result as the HSTATE argument. */
3571
3572 void
3573 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3574 unsigned int flags)
3575 {
3576 int i;
3577 enum tree_code code;
3578 enum tree_code_class tclass;
3579
3580 if (t == NULL_TREE || t == error_mark_node)
3581 {
3582 hstate.merge_hash (0);
3583 return;
3584 }
3585
3586 STRIP_ANY_LOCATION_WRAPPER (t);
3587
3588 if (!(flags & OEP_ADDRESS_OF))
3589 STRIP_NOPS (t);
3590
3591 code = TREE_CODE (t);
3592
3593 switch (code)
3594 {
3595 /* Alas, constants aren't shared, so we can't rely on pointer
3596 identity. */
3597 case VOID_CST:
3598 hstate.merge_hash (0);
3599 return;
3600 case INTEGER_CST:
3601 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3602 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3603 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3604 return;
3605 case REAL_CST:
3606 {
3607 unsigned int val2;
3608 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3609 val2 = rvc_zero;
3610 else
3611 val2 = real_hash (TREE_REAL_CST_PTR (t));
3612 hstate.merge_hash (val2);
3613 return;
3614 }
3615 case FIXED_CST:
3616 {
3617 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3618 hstate.merge_hash (val2);
3619 return;
3620 }
3621 case STRING_CST:
3622 hstate.add ((const void *) TREE_STRING_POINTER (t),
3623 TREE_STRING_LENGTH (t));
3624 return;
3625 case COMPLEX_CST:
3626 hash_operand (TREE_REALPART (t), hstate, flags);
3627 hash_operand (TREE_IMAGPART (t), hstate, flags);
3628 return;
3629 case VECTOR_CST:
3630 {
3631 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3632 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3633 unsigned int count = vector_cst_encoded_nelts (t);
3634 for (unsigned int i = 0; i < count; ++i)
3635 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3636 return;
3637 }
3638 case SSA_NAME:
3639 /* We can just compare by pointer. */
3640 hstate.add_hwi (SSA_NAME_VERSION (t));
3641 return;
3642 case PLACEHOLDER_EXPR:
3643 /* The node itself doesn't matter. */
3644 return;
3645 case BLOCK:
3646 case OMP_CLAUSE:
3647 /* Ignore. */
3648 return;
3649 case TREE_LIST:
3650 /* A list of expressions, for a CALL_EXPR or as the elements of a
3651 VECTOR_CST. */
3652 for (; t; t = TREE_CHAIN (t))
3653 hash_operand (TREE_VALUE (t), hstate, flags);
3654 return;
3655 case CONSTRUCTOR:
3656 {
3657 unsigned HOST_WIDE_INT idx;
3658 tree field, value;
3659 flags &= ~OEP_ADDRESS_OF;
3660 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3661 {
3662 hash_operand (field, hstate, flags);
3663 hash_operand (value, hstate, flags);
3664 }
3665 return;
3666 }
3667 case STATEMENT_LIST:
3668 {
3669 tree_stmt_iterator i;
3670 for (i = tsi_start (CONST_CAST_TREE (t));
3671 !tsi_end_p (i); tsi_next (&i))
3672 hash_operand (tsi_stmt (i), hstate, flags);
3673 return;
3674 }
3675 case TREE_VEC:
3676 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3677 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3678 return;
3679 case IDENTIFIER_NODE:
3680 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3681 return;
3682 case FIELD_DECL:
3683 inchash::add_expr (DECL_FIELD_OFFSET (t), hstate, flags);
3684 inchash::add_expr (DECL_FIELD_BIT_OFFSET (t), hstate, flags);
3685 return;
3686 case FUNCTION_DECL:
3687 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3688 Otherwise nodes that compare equal according to operand_equal_p might
3689 get different hash codes. However, don't do this for machine specific
3690 or front end builtins, since the function code is overloaded in those
3691 cases. */
3692 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3693 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3694 {
3695 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3696 code = TREE_CODE (t);
3697 }
3698 /* FALL THROUGH */
3699 default:
3700 if (POLY_INT_CST_P (t))
3701 {
3702 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3703 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3704 return;
3705 }
3706 tclass = TREE_CODE_CLASS (code);
3707
3708 if (tclass == tcc_declaration)
3709 {
3710 /* DECL's have a unique ID */
3711 hstate.add_hwi (DECL_UID (t));
3712 }
3713 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3714 {
3715 /* For comparisons that can be swapped, use the lower
3716 tree code. */
3717 enum tree_code ccode = swap_tree_comparison (code);
3718 if (code < ccode)
3719 ccode = code;
3720 hstate.add_object (ccode);
3721 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3722 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3723 }
3724 else if (CONVERT_EXPR_CODE_P (code))
3725 {
3726 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3727 operand_equal_p. */
3728 enum tree_code ccode = NOP_EXPR;
3729 hstate.add_object (ccode);
3730
3731 /* Don't hash the type, that can lead to having nodes which
3732 compare equal according to operand_equal_p, but which
3733 have different hash codes. Make sure to include signedness
3734 in the hash computation. */
3735 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3736 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3737 }
3738 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3739 else if (code == MEM_REF
3740 && (flags & OEP_ADDRESS_OF) != 0
3741 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3742 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3743 && integer_zerop (TREE_OPERAND (t, 1)))
3744 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3745 hstate, flags);
3746 /* Don't ICE on FE specific trees, or their arguments etc.
3747 during operand_equal_p hash verification. */
3748 else if (!IS_EXPR_CODE_CLASS (tclass))
3749 gcc_assert (flags & OEP_HASH_CHECK);
3750 else
3751 {
3752 unsigned int sflags = flags;
3753
3754 hstate.add_object (code);
3755
3756 switch (code)
3757 {
3758 case ADDR_EXPR:
3759 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3760 flags |= OEP_ADDRESS_OF;
3761 sflags = flags;
3762 break;
3763
3764 case INDIRECT_REF:
3765 case MEM_REF:
3766 case TARGET_MEM_REF:
3767 flags &= ~OEP_ADDRESS_OF;
3768 sflags = flags;
3769 break;
3770
3771 case ARRAY_REF:
3772 case ARRAY_RANGE_REF:
3773 case COMPONENT_REF:
3774 case BIT_FIELD_REF:
3775 sflags &= ~OEP_ADDRESS_OF;
3776 break;
3777
3778 case COND_EXPR:
3779 flags &= ~OEP_ADDRESS_OF;
3780 break;
3781
3782 case WIDEN_MULT_PLUS_EXPR:
3783 case WIDEN_MULT_MINUS_EXPR:
3784 {
3785 /* The multiplication operands are commutative. */
3786 inchash::hash one, two;
3787 hash_operand (TREE_OPERAND (t, 0), one, flags);
3788 hash_operand (TREE_OPERAND (t, 1), two, flags);
3789 hstate.add_commutative (one, two);
3790 hash_operand (TREE_OPERAND (t, 2), two, flags);
3791 return;
3792 }
3793
3794 case CALL_EXPR:
3795 if (CALL_EXPR_FN (t) == NULL_TREE)
3796 hstate.add_int (CALL_EXPR_IFN (t));
3797 break;
3798
3799 case TARGET_EXPR:
3800 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3801 Usually different TARGET_EXPRs just should use
3802 different temporaries in their slots. */
3803 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
3804 return;
3805
3806 /* Virtual table call. */
3807 case OBJ_TYPE_REF:
3808 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
3809 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
3810 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
3811 return;
3812 default:
3813 break;
3814 }
3815
3816 /* Don't hash the type, that can lead to having nodes which
3817 compare equal according to operand_equal_p, but which
3818 have different hash codes. */
3819 if (code == NON_LVALUE_EXPR)
3820 {
3821 /* Make sure to include signness in the hash computation. */
3822 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3823 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3824 }
3825
3826 else if (commutative_tree_code (code))
3827 {
3828 /* It's a commutative expression. We want to hash it the same
3829 however it appears. We do this by first hashing both operands
3830 and then rehashing based on the order of their independent
3831 hashes. */
3832 inchash::hash one, two;
3833 hash_operand (TREE_OPERAND (t, 0), one, flags);
3834 hash_operand (TREE_OPERAND (t, 1), two, flags);
3835 hstate.add_commutative (one, two);
3836 }
3837 else
3838 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3839 hash_operand (TREE_OPERAND (t, i), hstate,
3840 i == 0 ? flags : sflags);
3841 }
3842 return;
3843 }
3844 }
3845
3846 bool
3847 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
3848 unsigned int flags, bool *ret)
3849 {
3850 /* When checking, verify at the outermost operand_equal_p call that
3851 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
3852 hash value. */
3853 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
3854 {
3855 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
3856 {
3857 if (arg0 != arg1)
3858 {
3859 inchash::hash hstate0 (0), hstate1 (0);
3860 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
3861 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
3862 hashval_t h0 = hstate0.end ();
3863 hashval_t h1 = hstate1.end ();
3864 gcc_assert (h0 == h1);
3865 }
3866 *ret = true;
3867 }
3868 else
3869 *ret = false;
3870
3871 return true;
3872 }
3873
3874 return false;
3875 }
3876
3877
3878 static operand_compare default_compare_instance;
3879
3880 /* Conveinece wrapper around operand_compare class because usually we do
3881 not need to play with the valueizer. */
3882
3883 bool
3884 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3885 {
3886 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
3887 }
3888
3889 namespace inchash
3890 {
3891
3892 /* Generate a hash value for an expression. This can be used iteratively
3893 by passing a previous result as the HSTATE argument.
3894
3895 This function is intended to produce the same hash for expressions which
3896 would compare equal using operand_equal_p. */
3897 void
3898 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
3899 {
3900 default_compare_instance.hash_operand (t, hstate, flags);
3901 }
3902
3903 }
3904 \f
3905 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3906 with a different signedness or a narrower precision. */
3907
3908 static bool
3909 operand_equal_for_comparison_p (tree arg0, tree arg1)
3910 {
3911 if (operand_equal_p (arg0, arg1, 0))
3912 return true;
3913
3914 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3915 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3916 return false;
3917
3918 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3919 and see if the inner values are the same. This removes any
3920 signedness comparison, which doesn't matter here. */
3921 tree op0 = arg0;
3922 tree op1 = arg1;
3923 STRIP_NOPS (op0);
3924 STRIP_NOPS (op1);
3925 if (operand_equal_p (op0, op1, 0))
3926 return true;
3927
3928 /* Discard a single widening conversion from ARG1 and see if the inner
3929 value is the same as ARG0. */
3930 if (CONVERT_EXPR_P (arg1)
3931 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3932 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3933 < TYPE_PRECISION (TREE_TYPE (arg1))
3934 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3935 return true;
3936
3937 return false;
3938 }
3939 \f
3940 /* See if ARG is an expression that is either a comparison or is performing
3941 arithmetic on comparisons. The comparisons must only be comparing
3942 two different values, which will be stored in *CVAL1 and *CVAL2; if
3943 they are nonzero it means that some operands have already been found.
3944 No variables may be used anywhere else in the expression except in the
3945 comparisons.
3946
3947 If this is true, return 1. Otherwise, return zero. */
3948
3949 static bool
3950 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
3951 {
3952 enum tree_code code = TREE_CODE (arg);
3953 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3954
3955 /* We can handle some of the tcc_expression cases here. */
3956 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3957 tclass = tcc_unary;
3958 else if (tclass == tcc_expression
3959 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3960 || code == COMPOUND_EXPR))
3961 tclass = tcc_binary;
3962
3963 switch (tclass)
3964 {
3965 case tcc_unary:
3966 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
3967
3968 case tcc_binary:
3969 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3970 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
3971
3972 case tcc_constant:
3973 return true;
3974
3975 case tcc_expression:
3976 if (code == COND_EXPR)
3977 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3978 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
3979 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
3980 return false;
3981
3982 case tcc_comparison:
3983 /* First see if we can handle the first operand, then the second. For
3984 the second operand, we know *CVAL1 can't be zero. It must be that
3985 one side of the comparison is each of the values; test for the
3986 case where this isn't true by failing if the two operands
3987 are the same. */
3988
3989 if (operand_equal_p (TREE_OPERAND (arg, 0),
3990 TREE_OPERAND (arg, 1), 0))
3991 return false;
3992
3993 if (*cval1 == 0)
3994 *cval1 = TREE_OPERAND (arg, 0);
3995 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3996 ;
3997 else if (*cval2 == 0)
3998 *cval2 = TREE_OPERAND (arg, 0);
3999 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4000 ;
4001 else
4002 return false;
4003
4004 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4005 ;
4006 else if (*cval2 == 0)
4007 *cval2 = TREE_OPERAND (arg, 1);
4008 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4009 ;
4010 else
4011 return false;
4012
4013 return true;
4014
4015 default:
4016 return false;
4017 }
4018 }
4019 \f
4020 /* ARG is a tree that is known to contain just arithmetic operations and
4021 comparisons. Evaluate the operations in the tree substituting NEW0 for
4022 any occurrence of OLD0 as an operand of a comparison and likewise for
4023 NEW1 and OLD1. */
4024
4025 static tree
4026 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4027 tree old1, tree new1)
4028 {
4029 tree type = TREE_TYPE (arg);
4030 enum tree_code code = TREE_CODE (arg);
4031 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4032
4033 /* We can handle some of the tcc_expression cases here. */
4034 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4035 tclass = tcc_unary;
4036 else if (tclass == tcc_expression
4037 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4038 tclass = tcc_binary;
4039
4040 switch (tclass)
4041 {
4042 case tcc_unary:
4043 return fold_build1_loc (loc, code, type,
4044 eval_subst (loc, TREE_OPERAND (arg, 0),
4045 old0, new0, old1, new1));
4046
4047 case tcc_binary:
4048 return fold_build2_loc (loc, code, type,
4049 eval_subst (loc, TREE_OPERAND (arg, 0),
4050 old0, new0, old1, new1),
4051 eval_subst (loc, TREE_OPERAND (arg, 1),
4052 old0, new0, old1, new1));
4053
4054 case tcc_expression:
4055 switch (code)
4056 {
4057 case SAVE_EXPR:
4058 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4059 old1, new1);
4060
4061 case COMPOUND_EXPR:
4062 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4063 old1, new1);
4064
4065 case COND_EXPR:
4066 return fold_build3_loc (loc, code, type,
4067 eval_subst (loc, TREE_OPERAND (arg, 0),
4068 old0, new0, old1, new1),
4069 eval_subst (loc, TREE_OPERAND (arg, 1),
4070 old0, new0, old1, new1),
4071 eval_subst (loc, TREE_OPERAND (arg, 2),
4072 old0, new0, old1, new1));
4073 default:
4074 break;
4075 }
4076 /* Fall through - ??? */
4077
4078 case tcc_comparison:
4079 {
4080 tree arg0 = TREE_OPERAND (arg, 0);
4081 tree arg1 = TREE_OPERAND (arg, 1);
4082
4083 /* We need to check both for exact equality and tree equality. The
4084 former will be true if the operand has a side-effect. In that
4085 case, we know the operand occurred exactly once. */
4086
4087 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4088 arg0 = new0;
4089 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4090 arg0 = new1;
4091
4092 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4093 arg1 = new0;
4094 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4095 arg1 = new1;
4096
4097 return fold_build2_loc (loc, code, type, arg0, arg1);
4098 }
4099
4100 default:
4101 return arg;
4102 }
4103 }
4104 \f
4105 /* Return a tree for the case when the result of an expression is RESULT
4106 converted to TYPE and OMITTED was previously an operand of the expression
4107 but is now not needed (e.g., we folded OMITTED * 0).
4108
4109 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4110 the conversion of RESULT to TYPE. */
4111
4112 tree
4113 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4114 {
4115 tree t = fold_convert_loc (loc, type, result);
4116
4117 /* If the resulting operand is an empty statement, just return the omitted
4118 statement casted to void. */
4119 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4120 return build1_loc (loc, NOP_EXPR, void_type_node,
4121 fold_ignored_result (omitted));
4122
4123 if (TREE_SIDE_EFFECTS (omitted))
4124 return build2_loc (loc, COMPOUND_EXPR, type,
4125 fold_ignored_result (omitted), t);
4126
4127 return non_lvalue_loc (loc, t);
4128 }
4129
4130 /* Return a tree for the case when the result of an expression is RESULT
4131 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4132 of the expression but are now not needed.
4133
4134 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4135 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4136 evaluated before OMITTED2. Otherwise, if neither has side effects,
4137 just do the conversion of RESULT to TYPE. */
4138
4139 tree
4140 omit_two_operands_loc (location_t loc, tree type, tree result,
4141 tree omitted1, tree omitted2)
4142 {
4143 tree t = fold_convert_loc (loc, type, result);
4144
4145 if (TREE_SIDE_EFFECTS (omitted2))
4146 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4147 if (TREE_SIDE_EFFECTS (omitted1))
4148 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4149
4150 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4151 }
4152
4153 \f
4154 /* Return a simplified tree node for the truth-negation of ARG. This
4155 never alters ARG itself. We assume that ARG is an operation that
4156 returns a truth value (0 or 1).
4157
4158 FIXME: one would think we would fold the result, but it causes
4159 problems with the dominator optimizer. */
4160
4161 static tree
4162 fold_truth_not_expr (location_t loc, tree arg)
4163 {
4164 tree type = TREE_TYPE (arg);
4165 enum tree_code code = TREE_CODE (arg);
4166 location_t loc1, loc2;
4167
4168 /* If this is a comparison, we can simply invert it, except for
4169 floating-point non-equality comparisons, in which case we just
4170 enclose a TRUTH_NOT_EXPR around what we have. */
4171
4172 if (TREE_CODE_CLASS (code) == tcc_comparison)
4173 {
4174 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4175 if (FLOAT_TYPE_P (op_type)
4176 && flag_trapping_math
4177 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4178 && code != NE_EXPR && code != EQ_EXPR)
4179 return NULL_TREE;
4180
4181 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4182 if (code == ERROR_MARK)
4183 return NULL_TREE;
4184
4185 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4186 TREE_OPERAND (arg, 1));
4187 if (TREE_NO_WARNING (arg))
4188 TREE_NO_WARNING (ret) = 1;
4189 return ret;
4190 }
4191
4192 switch (code)
4193 {
4194 case INTEGER_CST:
4195 return constant_boolean_node (integer_zerop (arg), type);
4196
4197 case TRUTH_AND_EXPR:
4198 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4199 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4200 return build2_loc (loc, TRUTH_OR_EXPR, type,
4201 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4202 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4203
4204 case TRUTH_OR_EXPR:
4205 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4206 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4207 return build2_loc (loc, TRUTH_AND_EXPR, type,
4208 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4209 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4210
4211 case TRUTH_XOR_EXPR:
4212 /* Here we can invert either operand. We invert the first operand
4213 unless the second operand is a TRUTH_NOT_EXPR in which case our
4214 result is the XOR of the first operand with the inside of the
4215 negation of the second operand. */
4216
4217 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4218 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4219 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4220 else
4221 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4222 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4223 TREE_OPERAND (arg, 1));
4224
4225 case TRUTH_ANDIF_EXPR:
4226 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4227 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4228 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4229 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4230 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4231
4232 case TRUTH_ORIF_EXPR:
4233 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4234 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4235 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4236 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4237 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4238
4239 case TRUTH_NOT_EXPR:
4240 return TREE_OPERAND (arg, 0);
4241
4242 case COND_EXPR:
4243 {
4244 tree arg1 = TREE_OPERAND (arg, 1);
4245 tree arg2 = TREE_OPERAND (arg, 2);
4246
4247 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4248 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4249
4250 /* A COND_EXPR may have a throw as one operand, which
4251 then has void type. Just leave void operands
4252 as they are. */
4253 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4254 VOID_TYPE_P (TREE_TYPE (arg1))
4255 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4256 VOID_TYPE_P (TREE_TYPE (arg2))
4257 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4258 }
4259
4260 case COMPOUND_EXPR:
4261 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4262 return build2_loc (loc, COMPOUND_EXPR, type,
4263 TREE_OPERAND (arg, 0),
4264 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4265
4266 case NON_LVALUE_EXPR:
4267 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4268 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4269
4270 CASE_CONVERT:
4271 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4272 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4273
4274 /* fall through */
4275
4276 case FLOAT_EXPR:
4277 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4278 return build1_loc (loc, TREE_CODE (arg), type,
4279 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4280
4281 case BIT_AND_EXPR:
4282 if (!integer_onep (TREE_OPERAND (arg, 1)))
4283 return NULL_TREE;
4284 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4285
4286 case SAVE_EXPR:
4287 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4288
4289 case CLEANUP_POINT_EXPR:
4290 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4291 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4292 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4293
4294 default:
4295 return NULL_TREE;
4296 }
4297 }
4298
4299 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4300 assume that ARG is an operation that returns a truth value (0 or 1
4301 for scalars, 0 or -1 for vectors). Return the folded expression if
4302 folding is successful. Otherwise, return NULL_TREE. */
4303
4304 static tree
4305 fold_invert_truthvalue (location_t loc, tree arg)
4306 {
4307 tree type = TREE_TYPE (arg);
4308 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4309 ? BIT_NOT_EXPR
4310 : TRUTH_NOT_EXPR,
4311 type, arg);
4312 }
4313
4314 /* Return a simplified tree node for the truth-negation of ARG. This
4315 never alters ARG itself. We assume that ARG is an operation that
4316 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4317
4318 tree
4319 invert_truthvalue_loc (location_t loc, tree arg)
4320 {
4321 if (TREE_CODE (arg) == ERROR_MARK)
4322 return arg;
4323
4324 tree type = TREE_TYPE (arg);
4325 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4326 ? BIT_NOT_EXPR
4327 : TRUTH_NOT_EXPR,
4328 type, arg);
4329 }
4330 \f
4331 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4332 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4333 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4334 is the original memory reference used to preserve the alias set of
4335 the access. */
4336
4337 static tree
4338 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4339 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4340 int unsignedp, int reversep)
4341 {
4342 tree result, bftype;
4343
4344 /* Attempt not to lose the access path if possible. */
4345 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4346 {
4347 tree ninner = TREE_OPERAND (orig_inner, 0);
4348 machine_mode nmode;
4349 poly_int64 nbitsize, nbitpos;
4350 tree noffset;
4351 int nunsignedp, nreversep, nvolatilep = 0;
4352 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4353 &noffset, &nmode, &nunsignedp,
4354 &nreversep, &nvolatilep);
4355 if (base == inner
4356 && noffset == NULL_TREE
4357 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4358 && !reversep
4359 && !nreversep
4360 && !nvolatilep)
4361 {
4362 inner = ninner;
4363 bitpos -= nbitpos;
4364 }
4365 }
4366
4367 alias_set_type iset = get_alias_set (orig_inner);
4368 if (iset == 0 && get_alias_set (inner) != iset)
4369 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4370 build_fold_addr_expr (inner),
4371 build_int_cst (ptr_type_node, 0));
4372
4373 if (known_eq (bitpos, 0) && !reversep)
4374 {
4375 tree size = TYPE_SIZE (TREE_TYPE (inner));
4376 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4377 || POINTER_TYPE_P (TREE_TYPE (inner)))
4378 && tree_fits_shwi_p (size)
4379 && tree_to_shwi (size) == bitsize)
4380 return fold_convert_loc (loc, type, inner);
4381 }
4382
4383 bftype = type;
4384 if (TYPE_PRECISION (bftype) != bitsize
4385 || TYPE_UNSIGNED (bftype) == !unsignedp)
4386 bftype = build_nonstandard_integer_type (bitsize, 0);
4387
4388 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4389 bitsize_int (bitsize), bitsize_int (bitpos));
4390 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4391
4392 if (bftype != type)
4393 result = fold_convert_loc (loc, type, result);
4394
4395 return result;
4396 }
4397
4398 /* Optimize a bit-field compare.
4399
4400 There are two cases: First is a compare against a constant and the
4401 second is a comparison of two items where the fields are at the same
4402 bit position relative to the start of a chunk (byte, halfword, word)
4403 large enough to contain it. In these cases we can avoid the shift
4404 implicit in bitfield extractions.
4405
4406 For constants, we emit a compare of the shifted constant with the
4407 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4408 compared. For two fields at the same position, we do the ANDs with the
4409 similar mask and compare the result of the ANDs.
4410
4411 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4412 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4413 are the left and right operands of the comparison, respectively.
4414
4415 If the optimization described above can be done, we return the resulting
4416 tree. Otherwise we return zero. */
4417
4418 static tree
4419 optimize_bit_field_compare (location_t loc, enum tree_code code,
4420 tree compare_type, tree lhs, tree rhs)
4421 {
4422 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4423 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4424 tree type = TREE_TYPE (lhs);
4425 tree unsigned_type;
4426 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4427 machine_mode lmode, rmode;
4428 scalar_int_mode nmode;
4429 int lunsignedp, runsignedp;
4430 int lreversep, rreversep;
4431 int lvolatilep = 0, rvolatilep = 0;
4432 tree linner, rinner = NULL_TREE;
4433 tree mask;
4434 tree offset;
4435
4436 /* Get all the information about the extractions being done. If the bit size
4437 is the same as the size of the underlying object, we aren't doing an
4438 extraction at all and so can do nothing. We also don't want to
4439 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4440 then will no longer be able to replace it. */
4441 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4442 &lunsignedp, &lreversep, &lvolatilep);
4443 if (linner == lhs
4444 || !known_size_p (plbitsize)
4445 || !plbitsize.is_constant (&lbitsize)
4446 || !plbitpos.is_constant (&lbitpos)
4447 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4448 || offset != 0
4449 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4450 || lvolatilep)
4451 return 0;
4452
4453 if (const_p)
4454 rreversep = lreversep;
4455 else
4456 {
4457 /* If this is not a constant, we can only do something if bit positions,
4458 sizes, signedness and storage order are the same. */
4459 rinner
4460 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4461 &runsignedp, &rreversep, &rvolatilep);
4462
4463 if (rinner == rhs
4464 || maybe_ne (lbitpos, rbitpos)
4465 || maybe_ne (lbitsize, rbitsize)
4466 || lunsignedp != runsignedp
4467 || lreversep != rreversep
4468 || offset != 0
4469 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4470 || rvolatilep)
4471 return 0;
4472 }
4473
4474 /* Honor the C++ memory model and mimic what RTL expansion does. */
4475 poly_uint64 bitstart = 0;
4476 poly_uint64 bitend = 0;
4477 if (TREE_CODE (lhs) == COMPONENT_REF)
4478 {
4479 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4480 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4481 return 0;
4482 }
4483
4484 /* See if we can find a mode to refer to this field. We should be able to,
4485 but fail if we can't. */
4486 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4487 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4488 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4489 TYPE_ALIGN (TREE_TYPE (rinner))),
4490 BITS_PER_WORD, false, &nmode))
4491 return 0;
4492
4493 /* Set signed and unsigned types of the precision of this mode for the
4494 shifts below. */
4495 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4496
4497 /* Compute the bit position and size for the new reference and our offset
4498 within it. If the new reference is the same size as the original, we
4499 won't optimize anything, so return zero. */
4500 nbitsize = GET_MODE_BITSIZE (nmode);
4501 nbitpos = lbitpos & ~ (nbitsize - 1);
4502 lbitpos -= nbitpos;
4503 if (nbitsize == lbitsize)
4504 return 0;
4505
4506 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4507 lbitpos = nbitsize - lbitsize - lbitpos;
4508
4509 /* Make the mask to be used against the extracted field. */
4510 mask = build_int_cst_type (unsigned_type, -1);
4511 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4512 mask = const_binop (RSHIFT_EXPR, mask,
4513 size_int (nbitsize - lbitsize - lbitpos));
4514
4515 if (! const_p)
4516 {
4517 if (nbitpos < 0)
4518 return 0;
4519
4520 /* If not comparing with constant, just rework the comparison
4521 and return. */
4522 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4523 nbitsize, nbitpos, 1, lreversep);
4524 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4525 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4526 nbitsize, nbitpos, 1, rreversep);
4527 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4528 return fold_build2_loc (loc, code, compare_type, t1, t2);
4529 }
4530
4531 /* Otherwise, we are handling the constant case. See if the constant is too
4532 big for the field. Warn and return a tree for 0 (false) if so. We do
4533 this not only for its own sake, but to avoid having to test for this
4534 error case below. If we didn't, we might generate wrong code.
4535
4536 For unsigned fields, the constant shifted right by the field length should
4537 be all zero. For signed fields, the high-order bits should agree with
4538 the sign bit. */
4539
4540 if (lunsignedp)
4541 {
4542 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4543 {
4544 warning (0, "comparison is always %d due to width of bit-field",
4545 code == NE_EXPR);
4546 return constant_boolean_node (code == NE_EXPR, compare_type);
4547 }
4548 }
4549 else
4550 {
4551 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4552 if (tem != 0 && tem != -1)
4553 {
4554 warning (0, "comparison is always %d due to width of bit-field",
4555 code == NE_EXPR);
4556 return constant_boolean_node (code == NE_EXPR, compare_type);
4557 }
4558 }
4559
4560 if (nbitpos < 0)
4561 return 0;
4562
4563 /* Single-bit compares should always be against zero. */
4564 if (lbitsize == 1 && ! integer_zerop (rhs))
4565 {
4566 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4567 rhs = build_int_cst (type, 0);
4568 }
4569
4570 /* Make a new bitfield reference, shift the constant over the
4571 appropriate number of bits and mask it with the computed mask
4572 (in case this was a signed field). If we changed it, make a new one. */
4573 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4574 nbitsize, nbitpos, 1, lreversep);
4575
4576 rhs = const_binop (BIT_AND_EXPR,
4577 const_binop (LSHIFT_EXPR,
4578 fold_convert_loc (loc, unsigned_type, rhs),
4579 size_int (lbitpos)),
4580 mask);
4581
4582 lhs = build2_loc (loc, code, compare_type,
4583 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4584 return lhs;
4585 }
4586 \f
4587 /* Subroutine for fold_truth_andor_1: decode a field reference.
4588
4589 If EXP is a comparison reference, we return the innermost reference.
4590
4591 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4592 set to the starting bit number.
4593
4594 If the innermost field can be completely contained in a mode-sized
4595 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4596
4597 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4598 otherwise it is not changed.
4599
4600 *PUNSIGNEDP is set to the signedness of the field.
4601
4602 *PREVERSEP is set to the storage order of the field.
4603
4604 *PMASK is set to the mask used. This is either contained in a
4605 BIT_AND_EXPR or derived from the width of the field.
4606
4607 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4608
4609 Return 0 if this is not a component reference or is one that we can't
4610 do anything with. */
4611
4612 static tree
4613 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4614 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4615 int *punsignedp, int *preversep, int *pvolatilep,
4616 tree *pmask, tree *pand_mask)
4617 {
4618 tree exp = *exp_;
4619 tree outer_type = 0;
4620 tree and_mask = 0;
4621 tree mask, inner, offset;
4622 tree unsigned_type;
4623 unsigned int precision;
4624
4625 /* All the optimizations using this function assume integer fields.
4626 There are problems with FP fields since the type_for_size call
4627 below can fail for, e.g., XFmode. */
4628 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4629 return NULL_TREE;
4630
4631 /* We are interested in the bare arrangement of bits, so strip everything
4632 that doesn't affect the machine mode. However, record the type of the
4633 outermost expression if it may matter below. */
4634 if (CONVERT_EXPR_P (exp)
4635 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4636 outer_type = TREE_TYPE (exp);
4637 STRIP_NOPS (exp);
4638
4639 if (TREE_CODE (exp) == BIT_AND_EXPR)
4640 {
4641 and_mask = TREE_OPERAND (exp, 1);
4642 exp = TREE_OPERAND (exp, 0);
4643 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4644 if (TREE_CODE (and_mask) != INTEGER_CST)
4645 return NULL_TREE;
4646 }
4647
4648 poly_int64 poly_bitsize, poly_bitpos;
4649 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4650 pmode, punsignedp, preversep, pvolatilep);
4651 if ((inner == exp && and_mask == 0)
4652 || !poly_bitsize.is_constant (pbitsize)
4653 || !poly_bitpos.is_constant (pbitpos)
4654 || *pbitsize < 0
4655 || offset != 0
4656 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4657 /* Reject out-of-bound accesses (PR79731). */
4658 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4659 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4660 *pbitpos + *pbitsize) < 0))
4661 return NULL_TREE;
4662
4663 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4664 if (unsigned_type == NULL_TREE)
4665 return NULL_TREE;
4666
4667 *exp_ = exp;
4668
4669 /* If the number of bits in the reference is the same as the bitsize of
4670 the outer type, then the outer type gives the signedness. Otherwise
4671 (in case of a small bitfield) the signedness is unchanged. */
4672 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4673 *punsignedp = TYPE_UNSIGNED (outer_type);
4674
4675 /* Compute the mask to access the bitfield. */
4676 precision = TYPE_PRECISION (unsigned_type);
4677
4678 mask = build_int_cst_type (unsigned_type, -1);
4679
4680 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4681 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4682
4683 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4684 if (and_mask != 0)
4685 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4686 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4687
4688 *pmask = mask;
4689 *pand_mask = and_mask;
4690 return inner;
4691 }
4692
4693 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4694 bit positions and MASK is SIGNED. */
4695
4696 static bool
4697 all_ones_mask_p (const_tree mask, unsigned int size)
4698 {
4699 tree type = TREE_TYPE (mask);
4700 unsigned int precision = TYPE_PRECISION (type);
4701
4702 /* If this function returns true when the type of the mask is
4703 UNSIGNED, then there will be errors. In particular see
4704 gcc.c-torture/execute/990326-1.c. There does not appear to be
4705 any documentation paper trail as to why this is so. But the pre
4706 wide-int worked with that restriction and it has been preserved
4707 here. */
4708 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4709 return false;
4710
4711 return wi::mask (size, false, precision) == wi::to_wide (mask);
4712 }
4713
4714 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4715 represents the sign bit of EXP's type. If EXP represents a sign
4716 or zero extension, also test VAL against the unextended type.
4717 The return value is the (sub)expression whose sign bit is VAL,
4718 or NULL_TREE otherwise. */
4719
4720 tree
4721 sign_bit_p (tree exp, const_tree val)
4722 {
4723 int width;
4724 tree t;
4725
4726 /* Tree EXP must have an integral type. */
4727 t = TREE_TYPE (exp);
4728 if (! INTEGRAL_TYPE_P (t))
4729 return NULL_TREE;
4730
4731 /* Tree VAL must be an integer constant. */
4732 if (TREE_CODE (val) != INTEGER_CST
4733 || TREE_OVERFLOW (val))
4734 return NULL_TREE;
4735
4736 width = TYPE_PRECISION (t);
4737 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4738 return exp;
4739
4740 /* Handle extension from a narrower type. */
4741 if (TREE_CODE (exp) == NOP_EXPR
4742 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4743 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4744
4745 return NULL_TREE;
4746 }
4747
4748 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4749 to be evaluated unconditionally. */
4750
4751 static bool
4752 simple_operand_p (const_tree exp)
4753 {
4754 /* Strip any conversions that don't change the machine mode. */
4755 STRIP_NOPS (exp);
4756
4757 return (CONSTANT_CLASS_P (exp)
4758 || TREE_CODE (exp) == SSA_NAME
4759 || (DECL_P (exp)
4760 && ! TREE_ADDRESSABLE (exp)
4761 && ! TREE_THIS_VOLATILE (exp)
4762 && ! DECL_NONLOCAL (exp)
4763 /* Don't regard global variables as simple. They may be
4764 allocated in ways unknown to the compiler (shared memory,
4765 #pragma weak, etc). */
4766 && ! TREE_PUBLIC (exp)
4767 && ! DECL_EXTERNAL (exp)
4768 /* Weakrefs are not safe to be read, since they can be NULL.
4769 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4770 have DECL_WEAK flag set. */
4771 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4772 /* Loading a static variable is unduly expensive, but global
4773 registers aren't expensive. */
4774 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4775 }
4776
4777 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4778 to be evaluated unconditionally.
4779 I addition to simple_operand_p, we assume that comparisons, conversions,
4780 and logic-not operations are simple, if their operands are simple, too. */
4781
4782 static bool
4783 simple_operand_p_2 (tree exp)
4784 {
4785 enum tree_code code;
4786
4787 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
4788 return false;
4789
4790 while (CONVERT_EXPR_P (exp))
4791 exp = TREE_OPERAND (exp, 0);
4792
4793 code = TREE_CODE (exp);
4794
4795 if (TREE_CODE_CLASS (code) == tcc_comparison)
4796 return (simple_operand_p (TREE_OPERAND (exp, 0))
4797 && simple_operand_p (TREE_OPERAND (exp, 1)));
4798
4799 if (code == TRUTH_NOT_EXPR)
4800 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4801
4802 return simple_operand_p (exp);
4803 }
4804
4805 \f
4806 /* The following functions are subroutines to fold_range_test and allow it to
4807 try to change a logical combination of comparisons into a range test.
4808
4809 For example, both
4810 X == 2 || X == 3 || X == 4 || X == 5
4811 and
4812 X >= 2 && X <= 5
4813 are converted to
4814 (unsigned) (X - 2) <= 3
4815
4816 We describe each set of comparisons as being either inside or outside
4817 a range, using a variable named like IN_P, and then describe the
4818 range with a lower and upper bound. If one of the bounds is omitted,
4819 it represents either the highest or lowest value of the type.
4820
4821 In the comments below, we represent a range by two numbers in brackets
4822 preceded by a "+" to designate being inside that range, or a "-" to
4823 designate being outside that range, so the condition can be inverted by
4824 flipping the prefix. An omitted bound is represented by a "-". For
4825 example, "- [-, 10]" means being outside the range starting at the lowest
4826 possible value and ending at 10, in other words, being greater than 10.
4827 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4828 always false.
4829
4830 We set up things so that the missing bounds are handled in a consistent
4831 manner so neither a missing bound nor "true" and "false" need to be
4832 handled using a special case. */
4833
4834 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4835 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4836 and UPPER1_P are nonzero if the respective argument is an upper bound
4837 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4838 must be specified for a comparison. ARG1 will be converted to ARG0's
4839 type if both are specified. */
4840
4841 static tree
4842 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4843 tree arg1, int upper1_p)
4844 {
4845 tree tem;
4846 int result;
4847 int sgn0, sgn1;
4848
4849 /* If neither arg represents infinity, do the normal operation.
4850 Else, if not a comparison, return infinity. Else handle the special
4851 comparison rules. Note that most of the cases below won't occur, but
4852 are handled for consistency. */
4853
4854 if (arg0 != 0 && arg1 != 0)
4855 {
4856 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4857 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4858 STRIP_NOPS (tem);
4859 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4860 }
4861
4862 if (TREE_CODE_CLASS (code) != tcc_comparison)
4863 return 0;
4864
4865 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4866 for neither. In real maths, we cannot assume open ended ranges are
4867 the same. But, this is computer arithmetic, where numbers are finite.
4868 We can therefore make the transformation of any unbounded range with
4869 the value Z, Z being greater than any representable number. This permits
4870 us to treat unbounded ranges as equal. */
4871 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4872 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4873 switch (code)
4874 {
4875 case EQ_EXPR:
4876 result = sgn0 == sgn1;
4877 break;
4878 case NE_EXPR:
4879 result = sgn0 != sgn1;
4880 break;
4881 case LT_EXPR:
4882 result = sgn0 < sgn1;
4883 break;
4884 case LE_EXPR:
4885 result = sgn0 <= sgn1;
4886 break;
4887 case GT_EXPR:
4888 result = sgn0 > sgn1;
4889 break;
4890 case GE_EXPR:
4891 result = sgn0 >= sgn1;
4892 break;
4893 default:
4894 gcc_unreachable ();
4895 }
4896
4897 return constant_boolean_node (result, type);
4898 }
4899 \f
4900 /* Helper routine for make_range. Perform one step for it, return
4901 new expression if the loop should continue or NULL_TREE if it should
4902 stop. */
4903
4904 tree
4905 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4906 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4907 bool *strict_overflow_p)
4908 {
4909 tree arg0_type = TREE_TYPE (arg0);
4910 tree n_low, n_high, low = *p_low, high = *p_high;
4911 int in_p = *p_in_p, n_in_p;
4912
4913 switch (code)
4914 {
4915 case TRUTH_NOT_EXPR:
4916 /* We can only do something if the range is testing for zero. */
4917 if (low == NULL_TREE || high == NULL_TREE
4918 || ! integer_zerop (low) || ! integer_zerop (high))
4919 return NULL_TREE;
4920 *p_in_p = ! in_p;
4921 return arg0;
4922
4923 case EQ_EXPR: case NE_EXPR:
4924 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4925 /* We can only do something if the range is testing for zero
4926 and if the second operand is an integer constant. Note that
4927 saying something is "in" the range we make is done by
4928 complementing IN_P since it will set in the initial case of
4929 being not equal to zero; "out" is leaving it alone. */
4930 if (low == NULL_TREE || high == NULL_TREE
4931 || ! integer_zerop (low) || ! integer_zerop (high)
4932 || TREE_CODE (arg1) != INTEGER_CST)
4933 return NULL_TREE;
4934
4935 switch (code)
4936 {
4937 case NE_EXPR: /* - [c, c] */
4938 low = high = arg1;
4939 break;
4940 case EQ_EXPR: /* + [c, c] */
4941 in_p = ! in_p, low = high = arg1;
4942 break;
4943 case GT_EXPR: /* - [-, c] */
4944 low = 0, high = arg1;
4945 break;
4946 case GE_EXPR: /* + [c, -] */
4947 in_p = ! in_p, low = arg1, high = 0;
4948 break;
4949 case LT_EXPR: /* - [c, -] */
4950 low = arg1, high = 0;
4951 break;
4952 case LE_EXPR: /* + [-, c] */
4953 in_p = ! in_p, low = 0, high = arg1;
4954 break;
4955 default:
4956 gcc_unreachable ();
4957 }
4958
4959 /* If this is an unsigned comparison, we also know that EXP is
4960 greater than or equal to zero. We base the range tests we make
4961 on that fact, so we record it here so we can parse existing
4962 range tests. We test arg0_type since often the return type
4963 of, e.g. EQ_EXPR, is boolean. */
4964 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4965 {
4966 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4967 in_p, low, high, 1,
4968 build_int_cst (arg0_type, 0),
4969 NULL_TREE))
4970 return NULL_TREE;
4971
4972 in_p = n_in_p, low = n_low, high = n_high;
4973
4974 /* If the high bound is missing, but we have a nonzero low
4975 bound, reverse the range so it goes from zero to the low bound
4976 minus 1. */
4977 if (high == 0 && low && ! integer_zerop (low))
4978 {
4979 in_p = ! in_p;
4980 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4981 build_int_cst (TREE_TYPE (low), 1), 0);
4982 low = build_int_cst (arg0_type, 0);
4983 }
4984 }
4985
4986 *p_low = low;
4987 *p_high = high;
4988 *p_in_p = in_p;
4989 return arg0;
4990
4991 case NEGATE_EXPR:
4992 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4993 low and high are non-NULL, then normalize will DTRT. */
4994 if (!TYPE_UNSIGNED (arg0_type)
4995 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4996 {
4997 if (low == NULL_TREE)
4998 low = TYPE_MIN_VALUE (arg0_type);
4999 if (high == NULL_TREE)
5000 high = TYPE_MAX_VALUE (arg0_type);
5001 }
5002
5003 /* (-x) IN [a,b] -> x in [-b, -a] */
5004 n_low = range_binop (MINUS_EXPR, exp_type,
5005 build_int_cst (exp_type, 0),
5006 0, high, 1);
5007 n_high = range_binop (MINUS_EXPR, exp_type,
5008 build_int_cst (exp_type, 0),
5009 0, low, 0);
5010 if (n_high != 0 && TREE_OVERFLOW (n_high))
5011 return NULL_TREE;
5012 goto normalize;
5013
5014 case BIT_NOT_EXPR:
5015 /* ~ X -> -X - 1 */
5016 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5017 build_int_cst (exp_type, 1));
5018
5019 case PLUS_EXPR:
5020 case MINUS_EXPR:
5021 if (TREE_CODE (arg1) != INTEGER_CST)
5022 return NULL_TREE;
5023
5024 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5025 move a constant to the other side. */
5026 if (!TYPE_UNSIGNED (arg0_type)
5027 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5028 return NULL_TREE;
5029
5030 /* If EXP is signed, any overflow in the computation is undefined,
5031 so we don't worry about it so long as our computations on
5032 the bounds don't overflow. For unsigned, overflow is defined
5033 and this is exactly the right thing. */
5034 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5035 arg0_type, low, 0, arg1, 0);
5036 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5037 arg0_type, high, 1, arg1, 0);
5038 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5039 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5040 return NULL_TREE;
5041
5042 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5043 *strict_overflow_p = true;
5044
5045 normalize:
5046 /* Check for an unsigned range which has wrapped around the maximum
5047 value thus making n_high < n_low, and normalize it. */
5048 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5049 {
5050 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5051 build_int_cst (TREE_TYPE (n_high), 1), 0);
5052 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5053 build_int_cst (TREE_TYPE (n_low), 1), 0);
5054
5055 /* If the range is of the form +/- [ x+1, x ], we won't
5056 be able to normalize it. But then, it represents the
5057 whole range or the empty set, so make it
5058 +/- [ -, - ]. */
5059 if (tree_int_cst_equal (n_low, low)
5060 && tree_int_cst_equal (n_high, high))
5061 low = high = 0;
5062 else
5063 in_p = ! in_p;
5064 }
5065 else
5066 low = n_low, high = n_high;
5067
5068 *p_low = low;
5069 *p_high = high;
5070 *p_in_p = in_p;
5071 return arg0;
5072
5073 CASE_CONVERT:
5074 case NON_LVALUE_EXPR:
5075 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5076 return NULL_TREE;
5077
5078 if (! INTEGRAL_TYPE_P (arg0_type)
5079 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5080 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5081 return NULL_TREE;
5082
5083 n_low = low, n_high = high;
5084
5085 if (n_low != 0)
5086 n_low = fold_convert_loc (loc, arg0_type, n_low);
5087
5088 if (n_high != 0)
5089 n_high = fold_convert_loc (loc, arg0_type, n_high);
5090
5091 /* If we're converting arg0 from an unsigned type, to exp,
5092 a signed type, we will be doing the comparison as unsigned.
5093 The tests above have already verified that LOW and HIGH
5094 are both positive.
5095
5096 So we have to ensure that we will handle large unsigned
5097 values the same way that the current signed bounds treat
5098 negative values. */
5099
5100 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5101 {
5102 tree high_positive;
5103 tree equiv_type;
5104 /* For fixed-point modes, we need to pass the saturating flag
5105 as the 2nd parameter. */
5106 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5107 equiv_type
5108 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5109 TYPE_SATURATING (arg0_type));
5110 else
5111 equiv_type
5112 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5113
5114 /* A range without an upper bound is, naturally, unbounded.
5115 Since convert would have cropped a very large value, use
5116 the max value for the destination type. */
5117 high_positive
5118 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5119 : TYPE_MAX_VALUE (arg0_type);
5120
5121 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5122 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5123 fold_convert_loc (loc, arg0_type,
5124 high_positive),
5125 build_int_cst (arg0_type, 1));
5126
5127 /* If the low bound is specified, "and" the range with the
5128 range for which the original unsigned value will be
5129 positive. */
5130 if (low != 0)
5131 {
5132 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5133 1, fold_convert_loc (loc, arg0_type,
5134 integer_zero_node),
5135 high_positive))
5136 return NULL_TREE;
5137
5138 in_p = (n_in_p == in_p);
5139 }
5140 else
5141 {
5142 /* Otherwise, "or" the range with the range of the input
5143 that will be interpreted as negative. */
5144 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5145 1, fold_convert_loc (loc, arg0_type,
5146 integer_zero_node),
5147 high_positive))
5148 return NULL_TREE;
5149
5150 in_p = (in_p != n_in_p);
5151 }
5152 }
5153
5154 *p_low = n_low;
5155 *p_high = n_high;
5156 *p_in_p = in_p;
5157 return arg0;
5158
5159 default:
5160 return NULL_TREE;
5161 }
5162 }
5163
5164 /* Given EXP, a logical expression, set the range it is testing into
5165 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5166 actually being tested. *PLOW and *PHIGH will be made of the same
5167 type as the returned expression. If EXP is not a comparison, we
5168 will most likely not be returning a useful value and range. Set
5169 *STRICT_OVERFLOW_P to true if the return value is only valid
5170 because signed overflow is undefined; otherwise, do not change
5171 *STRICT_OVERFLOW_P. */
5172
5173 tree
5174 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5175 bool *strict_overflow_p)
5176 {
5177 enum tree_code code;
5178 tree arg0, arg1 = NULL_TREE;
5179 tree exp_type, nexp;
5180 int in_p;
5181 tree low, high;
5182 location_t loc = EXPR_LOCATION (exp);
5183
5184 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5185 and see if we can refine the range. Some of the cases below may not
5186 happen, but it doesn't seem worth worrying about this. We "continue"
5187 the outer loop when we've changed something; otherwise we "break"
5188 the switch, which will "break" the while. */
5189
5190 in_p = 0;
5191 low = high = build_int_cst (TREE_TYPE (exp), 0);
5192
5193 while (1)
5194 {
5195 code = TREE_CODE (exp);
5196 exp_type = TREE_TYPE (exp);
5197 arg0 = NULL_TREE;
5198
5199 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5200 {
5201 if (TREE_OPERAND_LENGTH (exp) > 0)
5202 arg0 = TREE_OPERAND (exp, 0);
5203 if (TREE_CODE_CLASS (code) == tcc_binary
5204 || TREE_CODE_CLASS (code) == tcc_comparison
5205 || (TREE_CODE_CLASS (code) == tcc_expression
5206 && TREE_OPERAND_LENGTH (exp) > 1))
5207 arg1 = TREE_OPERAND (exp, 1);
5208 }
5209 if (arg0 == NULL_TREE)
5210 break;
5211
5212 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5213 &high, &in_p, strict_overflow_p);
5214 if (nexp == NULL_TREE)
5215 break;
5216 exp = nexp;
5217 }
5218
5219 /* If EXP is a constant, we can evaluate whether this is true or false. */
5220 if (TREE_CODE (exp) == INTEGER_CST)
5221 {
5222 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5223 exp, 0, low, 0))
5224 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5225 exp, 1, high, 1)));
5226 low = high = 0;
5227 exp = 0;
5228 }
5229
5230 *pin_p = in_p, *plow = low, *phigh = high;
5231 return exp;
5232 }
5233
5234 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5235 a bitwise check i.e. when
5236 LOW == 0xXX...X00...0
5237 HIGH == 0xXX...X11...1
5238 Return corresponding mask in MASK and stem in VALUE. */
5239
5240 static bool
5241 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5242 tree *value)
5243 {
5244 if (TREE_CODE (low) != INTEGER_CST
5245 || TREE_CODE (high) != INTEGER_CST)
5246 return false;
5247
5248 unsigned prec = TYPE_PRECISION (type);
5249 wide_int lo = wi::to_wide (low, prec);
5250 wide_int hi = wi::to_wide (high, prec);
5251
5252 wide_int end_mask = lo ^ hi;
5253 if ((end_mask & (end_mask + 1)) != 0
5254 || (lo & end_mask) != 0)
5255 return false;
5256
5257 wide_int stem_mask = ~end_mask;
5258 wide_int stem = lo & stem_mask;
5259 if (stem != (hi & stem_mask))
5260 return false;
5261
5262 *mask = wide_int_to_tree (type, stem_mask);
5263 *value = wide_int_to_tree (type, stem);
5264
5265 return true;
5266 }
5267 \f
5268 /* Helper routine for build_range_check and match.pd. Return the type to
5269 perform the check or NULL if it shouldn't be optimized. */
5270
5271 tree
5272 range_check_type (tree etype)
5273 {
5274 /* First make sure that arithmetics in this type is valid, then make sure
5275 that it wraps around. */
5276 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5277 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5278
5279 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5280 {
5281 tree utype, minv, maxv;
5282
5283 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5284 for the type in question, as we rely on this here. */
5285 utype = unsigned_type_for (etype);
5286 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5287 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5288 build_int_cst (TREE_TYPE (maxv), 1), 1);
5289 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5290
5291 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5292 minv, 1, maxv, 1)))
5293 etype = utype;
5294 else
5295 return NULL_TREE;
5296 }
5297 else if (POINTER_TYPE_P (etype))
5298 etype = unsigned_type_for (etype);
5299 return etype;
5300 }
5301
5302 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5303 type, TYPE, return an expression to test if EXP is in (or out of, depending
5304 on IN_P) the range. Return 0 if the test couldn't be created. */
5305
5306 tree
5307 build_range_check (location_t loc, tree type, tree exp, int in_p,
5308 tree low, tree high)
5309 {
5310 tree etype = TREE_TYPE (exp), mask, value;
5311
5312 /* Disable this optimization for function pointer expressions
5313 on targets that require function pointer canonicalization. */
5314 if (targetm.have_canonicalize_funcptr_for_compare ()
5315 && POINTER_TYPE_P (etype)
5316 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5317 return NULL_TREE;
5318
5319 if (! in_p)
5320 {
5321 value = build_range_check (loc, type, exp, 1, low, high);
5322 if (value != 0)
5323 return invert_truthvalue_loc (loc, value);
5324
5325 return 0;
5326 }
5327
5328 if (low == 0 && high == 0)
5329 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5330
5331 if (low == 0)
5332 return fold_build2_loc (loc, LE_EXPR, type, exp,
5333 fold_convert_loc (loc, etype, high));
5334
5335 if (high == 0)
5336 return fold_build2_loc (loc, GE_EXPR, type, exp,
5337 fold_convert_loc (loc, etype, low));
5338
5339 if (operand_equal_p (low, high, 0))
5340 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5341 fold_convert_loc (loc, etype, low));
5342
5343 if (TREE_CODE (exp) == BIT_AND_EXPR
5344 && maskable_range_p (low, high, etype, &mask, &value))
5345 return fold_build2_loc (loc, EQ_EXPR, type,
5346 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5347 exp, mask),
5348 value);
5349
5350 if (integer_zerop (low))
5351 {
5352 if (! TYPE_UNSIGNED (etype))
5353 {
5354 etype = unsigned_type_for (etype);
5355 high = fold_convert_loc (loc, etype, high);
5356 exp = fold_convert_loc (loc, etype, exp);
5357 }
5358 return build_range_check (loc, type, exp, 1, 0, high);
5359 }
5360
5361 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5362 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5363 {
5364 int prec = TYPE_PRECISION (etype);
5365
5366 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5367 {
5368 if (TYPE_UNSIGNED (etype))
5369 {
5370 tree signed_etype = signed_type_for (etype);
5371 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5372 etype
5373 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5374 else
5375 etype = signed_etype;
5376 exp = fold_convert_loc (loc, etype, exp);
5377 }
5378 return fold_build2_loc (loc, GT_EXPR, type, exp,
5379 build_int_cst (etype, 0));
5380 }
5381 }
5382
5383 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5384 This requires wrap-around arithmetics for the type of the expression. */
5385 etype = range_check_type (etype);
5386 if (etype == NULL_TREE)
5387 return NULL_TREE;
5388
5389 high = fold_convert_loc (loc, etype, high);
5390 low = fold_convert_loc (loc, etype, low);
5391 exp = fold_convert_loc (loc, etype, exp);
5392
5393 value = const_binop (MINUS_EXPR, high, low);
5394
5395 if (value != 0 && !TREE_OVERFLOW (value))
5396 return build_range_check (loc, type,
5397 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5398 1, build_int_cst (etype, 0), value);
5399
5400 return 0;
5401 }
5402 \f
5403 /* Return the predecessor of VAL in its type, handling the infinite case. */
5404
5405 static tree
5406 range_predecessor (tree val)
5407 {
5408 tree type = TREE_TYPE (val);
5409
5410 if (INTEGRAL_TYPE_P (type)
5411 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5412 return 0;
5413 else
5414 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5415 build_int_cst (TREE_TYPE (val), 1), 0);
5416 }
5417
5418 /* Return the successor of VAL in its type, handling the infinite case. */
5419
5420 static tree
5421 range_successor (tree val)
5422 {
5423 tree type = TREE_TYPE (val);
5424
5425 if (INTEGRAL_TYPE_P (type)
5426 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5427 return 0;
5428 else
5429 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5430 build_int_cst (TREE_TYPE (val), 1), 0);
5431 }
5432
5433 /* Given two ranges, see if we can merge them into one. Return 1 if we
5434 can, 0 if we can't. Set the output range into the specified parameters. */
5435
5436 bool
5437 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5438 tree high0, int in1_p, tree low1, tree high1)
5439 {
5440 int no_overlap;
5441 int subset;
5442 int temp;
5443 tree tem;
5444 int in_p;
5445 tree low, high;
5446 int lowequal = ((low0 == 0 && low1 == 0)
5447 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5448 low0, 0, low1, 0)));
5449 int highequal = ((high0 == 0 && high1 == 0)
5450 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5451 high0, 1, high1, 1)));
5452
5453 /* Make range 0 be the range that starts first, or ends last if they
5454 start at the same value. Swap them if it isn't. */
5455 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5456 low0, 0, low1, 0))
5457 || (lowequal
5458 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5459 high1, 1, high0, 1))))
5460 {
5461 temp = in0_p, in0_p = in1_p, in1_p = temp;
5462 tem = low0, low0 = low1, low1 = tem;
5463 tem = high0, high0 = high1, high1 = tem;
5464 }
5465
5466 /* If the second range is != high1 where high1 is the type maximum of
5467 the type, try first merging with < high1 range. */
5468 if (low1
5469 && high1
5470 && TREE_CODE (low1) == INTEGER_CST
5471 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5472 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5473 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5474 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5475 && operand_equal_p (low1, high1, 0))
5476 {
5477 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5478 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5479 !in1_p, NULL_TREE, range_predecessor (low1)))
5480 return true;
5481 /* Similarly for the second range != low1 where low1 is the type minimum
5482 of the type, try first merging with > low1 range. */
5483 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5484 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5485 !in1_p, range_successor (low1), NULL_TREE))
5486 return true;
5487 }
5488
5489 /* Now flag two cases, whether the ranges are disjoint or whether the
5490 second range is totally subsumed in the first. Note that the tests
5491 below are simplified by the ones above. */
5492 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5493 high0, 1, low1, 0));
5494 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5495 high1, 1, high0, 1));
5496
5497 /* We now have four cases, depending on whether we are including or
5498 excluding the two ranges. */
5499 if (in0_p && in1_p)
5500 {
5501 /* If they don't overlap, the result is false. If the second range
5502 is a subset it is the result. Otherwise, the range is from the start
5503 of the second to the end of the first. */
5504 if (no_overlap)
5505 in_p = 0, low = high = 0;
5506 else if (subset)
5507 in_p = 1, low = low1, high = high1;
5508 else
5509 in_p = 1, low = low1, high = high0;
5510 }
5511
5512 else if (in0_p && ! in1_p)
5513 {
5514 /* If they don't overlap, the result is the first range. If they are
5515 equal, the result is false. If the second range is a subset of the
5516 first, and the ranges begin at the same place, we go from just after
5517 the end of the second range to the end of the first. If the second
5518 range is not a subset of the first, or if it is a subset and both
5519 ranges end at the same place, the range starts at the start of the
5520 first range and ends just before the second range.
5521 Otherwise, we can't describe this as a single range. */
5522 if (no_overlap)
5523 in_p = 1, low = low0, high = high0;
5524 else if (lowequal && highequal)
5525 in_p = 0, low = high = 0;
5526 else if (subset && lowequal)
5527 {
5528 low = range_successor (high1);
5529 high = high0;
5530 in_p = 1;
5531 if (low == 0)
5532 {
5533 /* We are in the weird situation where high0 > high1 but
5534 high1 has no successor. Punt. */
5535 return 0;
5536 }
5537 }
5538 else if (! subset || highequal)
5539 {
5540 low = low0;
5541 high = range_predecessor (low1);
5542 in_p = 1;
5543 if (high == 0)
5544 {
5545 /* low0 < low1 but low1 has no predecessor. Punt. */
5546 return 0;
5547 }
5548 }
5549 else
5550 return 0;
5551 }
5552
5553 else if (! in0_p && in1_p)
5554 {
5555 /* If they don't overlap, the result is the second range. If the second
5556 is a subset of the first, the result is false. Otherwise,
5557 the range starts just after the first range and ends at the
5558 end of the second. */
5559 if (no_overlap)
5560 in_p = 1, low = low1, high = high1;
5561 else if (subset || highequal)
5562 in_p = 0, low = high = 0;
5563 else
5564 {
5565 low = range_successor (high0);
5566 high = high1;
5567 in_p = 1;
5568 if (low == 0)
5569 {
5570 /* high1 > high0 but high0 has no successor. Punt. */
5571 return 0;
5572 }
5573 }
5574 }
5575
5576 else
5577 {
5578 /* The case where we are excluding both ranges. Here the complex case
5579 is if they don't overlap. In that case, the only time we have a
5580 range is if they are adjacent. If the second is a subset of the
5581 first, the result is the first. Otherwise, the range to exclude
5582 starts at the beginning of the first range and ends at the end of the
5583 second. */
5584 if (no_overlap)
5585 {
5586 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5587 range_successor (high0),
5588 1, low1, 0)))
5589 in_p = 0, low = low0, high = high1;
5590 else
5591 {
5592 /* Canonicalize - [min, x] into - [-, x]. */
5593 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5594 switch (TREE_CODE (TREE_TYPE (low0)))
5595 {
5596 case ENUMERAL_TYPE:
5597 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5598 GET_MODE_BITSIZE
5599 (TYPE_MODE (TREE_TYPE (low0)))))
5600 break;
5601 /* FALLTHROUGH */
5602 case INTEGER_TYPE:
5603 if (tree_int_cst_equal (low0,
5604 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5605 low0 = 0;
5606 break;
5607 case POINTER_TYPE:
5608 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5609 && integer_zerop (low0))
5610 low0 = 0;
5611 break;
5612 default:
5613 break;
5614 }
5615
5616 /* Canonicalize - [x, max] into - [x, -]. */
5617 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5618 switch (TREE_CODE (TREE_TYPE (high1)))
5619 {
5620 case ENUMERAL_TYPE:
5621 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5622 GET_MODE_BITSIZE
5623 (TYPE_MODE (TREE_TYPE (high1)))))
5624 break;
5625 /* FALLTHROUGH */
5626 case INTEGER_TYPE:
5627 if (tree_int_cst_equal (high1,
5628 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5629 high1 = 0;
5630 break;
5631 case POINTER_TYPE:
5632 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5633 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5634 high1, 1,
5635 build_int_cst (TREE_TYPE (high1), 1),
5636 1)))
5637 high1 = 0;
5638 break;
5639 default:
5640 break;
5641 }
5642
5643 /* The ranges might be also adjacent between the maximum and
5644 minimum values of the given type. For
5645 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5646 return + [x + 1, y - 1]. */
5647 if (low0 == 0 && high1 == 0)
5648 {
5649 low = range_successor (high0);
5650 high = range_predecessor (low1);
5651 if (low == 0 || high == 0)
5652 return 0;
5653
5654 in_p = 1;
5655 }
5656 else
5657 return 0;
5658 }
5659 }
5660 else if (subset)
5661 in_p = 0, low = low0, high = high0;
5662 else
5663 in_p = 0, low = low0, high = high1;
5664 }
5665
5666 *pin_p = in_p, *plow = low, *phigh = high;
5667 return 1;
5668 }
5669 \f
5670
5671 /* Subroutine of fold, looking inside expressions of the form
5672 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5673 of the COND_EXPR. This function is being used also to optimize
5674 A op B ? C : A, by reversing the comparison first.
5675
5676 Return a folded expression whose code is not a COND_EXPR
5677 anymore, or NULL_TREE if no folding opportunity is found. */
5678
5679 static tree
5680 fold_cond_expr_with_comparison (location_t loc, tree type,
5681 tree arg0, tree arg1, tree arg2)
5682 {
5683 enum tree_code comp_code = TREE_CODE (arg0);
5684 tree arg00 = TREE_OPERAND (arg0, 0);
5685 tree arg01 = TREE_OPERAND (arg0, 1);
5686 tree arg1_type = TREE_TYPE (arg1);
5687 tree tem;
5688
5689 STRIP_NOPS (arg1);
5690 STRIP_NOPS (arg2);
5691
5692 /* If we have A op 0 ? A : -A, consider applying the following
5693 transformations:
5694
5695 A == 0? A : -A same as -A
5696 A != 0? A : -A same as A
5697 A >= 0? A : -A same as abs (A)
5698 A > 0? A : -A same as abs (A)
5699 A <= 0? A : -A same as -abs (A)
5700 A < 0? A : -A same as -abs (A)
5701
5702 None of these transformations work for modes with signed
5703 zeros. If A is +/-0, the first two transformations will
5704 change the sign of the result (from +0 to -0, or vice
5705 versa). The last four will fix the sign of the result,
5706 even though the original expressions could be positive or
5707 negative, depending on the sign of A.
5708
5709 Note that all these transformations are correct if A is
5710 NaN, since the two alternatives (A and -A) are also NaNs. */
5711 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5712 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5713 ? real_zerop (arg01)
5714 : integer_zerop (arg01))
5715 && ((TREE_CODE (arg2) == NEGATE_EXPR
5716 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5717 /* In the case that A is of the form X-Y, '-A' (arg2) may
5718 have already been folded to Y-X, check for that. */
5719 || (TREE_CODE (arg1) == MINUS_EXPR
5720 && TREE_CODE (arg2) == MINUS_EXPR
5721 && operand_equal_p (TREE_OPERAND (arg1, 0),
5722 TREE_OPERAND (arg2, 1), 0)
5723 && operand_equal_p (TREE_OPERAND (arg1, 1),
5724 TREE_OPERAND (arg2, 0), 0))))
5725 switch (comp_code)
5726 {
5727 case EQ_EXPR:
5728 case UNEQ_EXPR:
5729 tem = fold_convert_loc (loc, arg1_type, arg1);
5730 return fold_convert_loc (loc, type, negate_expr (tem));
5731 case NE_EXPR:
5732 case LTGT_EXPR:
5733 return fold_convert_loc (loc, type, arg1);
5734 case UNGE_EXPR:
5735 case UNGT_EXPR:
5736 if (flag_trapping_math)
5737 break;
5738 /* Fall through. */
5739 case GE_EXPR:
5740 case GT_EXPR:
5741 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5742 break;
5743 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5744 return fold_convert_loc (loc, type, tem);
5745 case UNLE_EXPR:
5746 case UNLT_EXPR:
5747 if (flag_trapping_math)
5748 break;
5749 /* FALLTHRU */
5750 case LE_EXPR:
5751 case LT_EXPR:
5752 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5753 break;
5754 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5755 return negate_expr (fold_convert_loc (loc, type, tem));
5756 default:
5757 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5758 break;
5759 }
5760
5761 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5762 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5763 both transformations are correct when A is NaN: A != 0
5764 is then true, and A == 0 is false. */
5765
5766 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5767 && integer_zerop (arg01) && integer_zerop (arg2))
5768 {
5769 if (comp_code == NE_EXPR)
5770 return fold_convert_loc (loc, type, arg1);
5771 else if (comp_code == EQ_EXPR)
5772 return build_zero_cst (type);
5773 }
5774
5775 /* Try some transformations of A op B ? A : B.
5776
5777 A == B? A : B same as B
5778 A != B? A : B same as A
5779 A >= B? A : B same as max (A, B)
5780 A > B? A : B same as max (B, A)
5781 A <= B? A : B same as min (A, B)
5782 A < B? A : B same as min (B, A)
5783
5784 As above, these transformations don't work in the presence
5785 of signed zeros. For example, if A and B are zeros of
5786 opposite sign, the first two transformations will change
5787 the sign of the result. In the last four, the original
5788 expressions give different results for (A=+0, B=-0) and
5789 (A=-0, B=+0), but the transformed expressions do not.
5790
5791 The first two transformations are correct if either A or B
5792 is a NaN. In the first transformation, the condition will
5793 be false, and B will indeed be chosen. In the case of the
5794 second transformation, the condition A != B will be true,
5795 and A will be chosen.
5796
5797 The conversions to max() and min() are not correct if B is
5798 a number and A is not. The conditions in the original
5799 expressions will be false, so all four give B. The min()
5800 and max() versions would give a NaN instead. */
5801 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5802 && operand_equal_for_comparison_p (arg01, arg2)
5803 /* Avoid these transformations if the COND_EXPR may be used
5804 as an lvalue in the C++ front-end. PR c++/19199. */
5805 && (in_gimple_form
5806 || VECTOR_TYPE_P (type)
5807 || (! lang_GNU_CXX ()
5808 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5809 || ! maybe_lvalue_p (arg1)
5810 || ! maybe_lvalue_p (arg2)))
5811 {
5812 tree comp_op0 = arg00;
5813 tree comp_op1 = arg01;
5814 tree comp_type = TREE_TYPE (comp_op0);
5815
5816 switch (comp_code)
5817 {
5818 case EQ_EXPR:
5819 return fold_convert_loc (loc, type, arg2);
5820 case NE_EXPR:
5821 return fold_convert_loc (loc, type, arg1);
5822 case LE_EXPR:
5823 case LT_EXPR:
5824 case UNLE_EXPR:
5825 case UNLT_EXPR:
5826 /* In C++ a ?: expression can be an lvalue, so put the
5827 operand which will be used if they are equal first
5828 so that we can convert this back to the
5829 corresponding COND_EXPR. */
5830 if (!HONOR_NANS (arg1))
5831 {
5832 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5833 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5834 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5835 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5836 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5837 comp_op1, comp_op0);
5838 return fold_convert_loc (loc, type, tem);
5839 }
5840 break;
5841 case GE_EXPR:
5842 case GT_EXPR:
5843 case UNGE_EXPR:
5844 case UNGT_EXPR:
5845 if (!HONOR_NANS (arg1))
5846 {
5847 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5848 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5849 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5850 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5851 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5852 comp_op1, comp_op0);
5853 return fold_convert_loc (loc, type, tem);
5854 }
5855 break;
5856 case UNEQ_EXPR:
5857 if (!HONOR_NANS (arg1))
5858 return fold_convert_loc (loc, type, arg2);
5859 break;
5860 case LTGT_EXPR:
5861 if (!HONOR_NANS (arg1))
5862 return fold_convert_loc (loc, type, arg1);
5863 break;
5864 default:
5865 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5866 break;
5867 }
5868 }
5869
5870 return NULL_TREE;
5871 }
5872
5873
5874 \f
5875 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5876 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5877 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5878 false) >= 2)
5879 #endif
5880
5881 /* EXP is some logical combination of boolean tests. See if we can
5882 merge it into some range test. Return the new tree if so. */
5883
5884 static tree
5885 fold_range_test (location_t loc, enum tree_code code, tree type,
5886 tree op0, tree op1)
5887 {
5888 int or_op = (code == TRUTH_ORIF_EXPR
5889 || code == TRUTH_OR_EXPR);
5890 int in0_p, in1_p, in_p;
5891 tree low0, low1, low, high0, high1, high;
5892 bool strict_overflow_p = false;
5893 tree tem, lhs, rhs;
5894 const char * const warnmsg = G_("assuming signed overflow does not occur "
5895 "when simplifying range test");
5896
5897 if (!INTEGRAL_TYPE_P (type))
5898 return 0;
5899
5900 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5901 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5902
5903 /* If this is an OR operation, invert both sides; we will invert
5904 again at the end. */
5905 if (or_op)
5906 in0_p = ! in0_p, in1_p = ! in1_p;
5907
5908 /* If both expressions are the same, if we can merge the ranges, and we
5909 can build the range test, return it or it inverted. If one of the
5910 ranges is always true or always false, consider it to be the same
5911 expression as the other. */
5912 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5913 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5914 in1_p, low1, high1)
5915 && (tem = (build_range_check (loc, type,
5916 lhs != 0 ? lhs
5917 : rhs != 0 ? rhs : integer_zero_node,
5918 in_p, low, high))) != 0)
5919 {
5920 if (strict_overflow_p)
5921 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5922 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5923 }
5924
5925 /* On machines where the branch cost is expensive, if this is a
5926 short-circuited branch and the underlying object on both sides
5927 is the same, make a non-short-circuit operation. */
5928 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
5929 if (PARAM_VALUE (PARAM_LOGICAL_OP_NON_SHORT_CIRCUIT) != -1)
5930 logical_op_non_short_circuit
5931 = PARAM_VALUE (PARAM_LOGICAL_OP_NON_SHORT_CIRCUIT);
5932 if (logical_op_non_short_circuit
5933 && !flag_sanitize_coverage
5934 && lhs != 0 && rhs != 0
5935 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5936 && operand_equal_p (lhs, rhs, 0))
5937 {
5938 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5939 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5940 which cases we can't do this. */
5941 if (simple_operand_p (lhs))
5942 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5943 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5944 type, op0, op1);
5945
5946 else if (!lang_hooks.decls.global_bindings_p ()
5947 && !CONTAINS_PLACEHOLDER_P (lhs))
5948 {
5949 tree common = save_expr (lhs);
5950
5951 if ((lhs = build_range_check (loc, type, common,
5952 or_op ? ! in0_p : in0_p,
5953 low0, high0)) != 0
5954 && (rhs = build_range_check (loc, type, common,
5955 or_op ? ! in1_p : in1_p,
5956 low1, high1)) != 0)
5957 {
5958 if (strict_overflow_p)
5959 fold_overflow_warning (warnmsg,
5960 WARN_STRICT_OVERFLOW_COMPARISON);
5961 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5962 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5963 type, lhs, rhs);
5964 }
5965 }
5966 }
5967
5968 return 0;
5969 }
5970 \f
5971 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5972 bit value. Arrange things so the extra bits will be set to zero if and
5973 only if C is signed-extended to its full width. If MASK is nonzero,
5974 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5975
5976 static tree
5977 unextend (tree c, int p, int unsignedp, tree mask)
5978 {
5979 tree type = TREE_TYPE (c);
5980 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
5981 tree temp;
5982
5983 if (p == modesize || unsignedp)
5984 return c;
5985
5986 /* We work by getting just the sign bit into the low-order bit, then
5987 into the high-order bit, then sign-extend. We then XOR that value
5988 with C. */
5989 temp = build_int_cst (TREE_TYPE (c),
5990 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
5991
5992 /* We must use a signed type in order to get an arithmetic right shift.
5993 However, we must also avoid introducing accidental overflows, so that
5994 a subsequent call to integer_zerop will work. Hence we must
5995 do the type conversion here. At this point, the constant is either
5996 zero or one, and the conversion to a signed type can never overflow.
5997 We could get an overflow if this conversion is done anywhere else. */
5998 if (TYPE_UNSIGNED (type))
5999 temp = fold_convert (signed_type_for (type), temp);
6000
6001 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6002 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6003 if (mask != 0)
6004 temp = const_binop (BIT_AND_EXPR, temp,
6005 fold_convert (TREE_TYPE (c), mask));
6006 /* If necessary, convert the type back to match the type of C. */
6007 if (TYPE_UNSIGNED (type))
6008 temp = fold_convert (type, temp);
6009
6010 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6011 }
6012 \f
6013 /* For an expression that has the form
6014 (A && B) || ~B
6015 or
6016 (A || B) && ~B,
6017 we can drop one of the inner expressions and simplify to
6018 A || ~B
6019 or
6020 A && ~B
6021 LOC is the location of the resulting expression. OP is the inner
6022 logical operation; the left-hand side in the examples above, while CMPOP
6023 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6024 removing a condition that guards another, as in
6025 (A != NULL && A->...) || A == NULL
6026 which we must not transform. If RHS_ONLY is true, only eliminate the
6027 right-most operand of the inner logical operation. */
6028
6029 static tree
6030 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6031 bool rhs_only)
6032 {
6033 tree type = TREE_TYPE (cmpop);
6034 enum tree_code code = TREE_CODE (cmpop);
6035 enum tree_code truthop_code = TREE_CODE (op);
6036 tree lhs = TREE_OPERAND (op, 0);
6037 tree rhs = TREE_OPERAND (op, 1);
6038 tree orig_lhs = lhs, orig_rhs = rhs;
6039 enum tree_code rhs_code = TREE_CODE (rhs);
6040 enum tree_code lhs_code = TREE_CODE (lhs);
6041 enum tree_code inv_code;
6042
6043 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6044 return NULL_TREE;
6045
6046 if (TREE_CODE_CLASS (code) != tcc_comparison)
6047 return NULL_TREE;
6048
6049 if (rhs_code == truthop_code)
6050 {
6051 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6052 if (newrhs != NULL_TREE)
6053 {
6054 rhs = newrhs;
6055 rhs_code = TREE_CODE (rhs);
6056 }
6057 }
6058 if (lhs_code == truthop_code && !rhs_only)
6059 {
6060 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6061 if (newlhs != NULL_TREE)
6062 {
6063 lhs = newlhs;
6064 lhs_code = TREE_CODE (lhs);
6065 }
6066 }
6067
6068 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6069 if (inv_code == rhs_code
6070 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6071 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6072 return lhs;
6073 if (!rhs_only && inv_code == lhs_code
6074 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6075 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6076 return rhs;
6077 if (rhs != orig_rhs || lhs != orig_lhs)
6078 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6079 lhs, rhs);
6080 return NULL_TREE;
6081 }
6082
6083 /* Find ways of folding logical expressions of LHS and RHS:
6084 Try to merge two comparisons to the same innermost item.
6085 Look for range tests like "ch >= '0' && ch <= '9'".
6086 Look for combinations of simple terms on machines with expensive branches
6087 and evaluate the RHS unconditionally.
6088
6089 For example, if we have p->a == 2 && p->b == 4 and we can make an
6090 object large enough to span both A and B, we can do this with a comparison
6091 against the object ANDed with the a mask.
6092
6093 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6094 operations to do this with one comparison.
6095
6096 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6097 function and the one above.
6098
6099 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6100 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6101
6102 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6103 two operands.
6104
6105 We return the simplified tree or 0 if no optimization is possible. */
6106
6107 static tree
6108 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6109 tree lhs, tree rhs)
6110 {
6111 /* If this is the "or" of two comparisons, we can do something if
6112 the comparisons are NE_EXPR. If this is the "and", we can do something
6113 if the comparisons are EQ_EXPR. I.e.,
6114 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6115
6116 WANTED_CODE is this operation code. For single bit fields, we can
6117 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6118 comparison for one-bit fields. */
6119
6120 enum tree_code wanted_code;
6121 enum tree_code lcode, rcode;
6122 tree ll_arg, lr_arg, rl_arg, rr_arg;
6123 tree ll_inner, lr_inner, rl_inner, rr_inner;
6124 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6125 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6126 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6127 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6128 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6129 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6130 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6131 scalar_int_mode lnmode, rnmode;
6132 tree ll_mask, lr_mask, rl_mask, rr_mask;
6133 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6134 tree l_const, r_const;
6135 tree lntype, rntype, result;
6136 HOST_WIDE_INT first_bit, end_bit;
6137 int volatilep;
6138
6139 /* Start by getting the comparison codes. Fail if anything is volatile.
6140 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6141 it were surrounded with a NE_EXPR. */
6142
6143 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6144 return 0;
6145
6146 lcode = TREE_CODE (lhs);
6147 rcode = TREE_CODE (rhs);
6148
6149 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6150 {
6151 lhs = build2 (NE_EXPR, truth_type, lhs,
6152 build_int_cst (TREE_TYPE (lhs), 0));
6153 lcode = NE_EXPR;
6154 }
6155
6156 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6157 {
6158 rhs = build2 (NE_EXPR, truth_type, rhs,
6159 build_int_cst (TREE_TYPE (rhs), 0));
6160 rcode = NE_EXPR;
6161 }
6162
6163 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6164 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6165 return 0;
6166
6167 ll_arg = TREE_OPERAND (lhs, 0);
6168 lr_arg = TREE_OPERAND (lhs, 1);
6169 rl_arg = TREE_OPERAND (rhs, 0);
6170 rr_arg = TREE_OPERAND (rhs, 1);
6171
6172 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6173 if (simple_operand_p (ll_arg)
6174 && simple_operand_p (lr_arg))
6175 {
6176 if (operand_equal_p (ll_arg, rl_arg, 0)
6177 && operand_equal_p (lr_arg, rr_arg, 0))
6178 {
6179 result = combine_comparisons (loc, code, lcode, rcode,
6180 truth_type, ll_arg, lr_arg);
6181 if (result)
6182 return result;
6183 }
6184 else if (operand_equal_p (ll_arg, rr_arg, 0)
6185 && operand_equal_p (lr_arg, rl_arg, 0))
6186 {
6187 result = combine_comparisons (loc, code, lcode,
6188 swap_tree_comparison (rcode),
6189 truth_type, ll_arg, lr_arg);
6190 if (result)
6191 return result;
6192 }
6193 }
6194
6195 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6196 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6197
6198 /* If the RHS can be evaluated unconditionally and its operands are
6199 simple, it wins to evaluate the RHS unconditionally on machines
6200 with expensive branches. In this case, this isn't a comparison
6201 that can be merged. */
6202
6203 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6204 false) >= 2
6205 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6206 && simple_operand_p (rl_arg)
6207 && simple_operand_p (rr_arg))
6208 {
6209 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6210 if (code == TRUTH_OR_EXPR
6211 && lcode == NE_EXPR && integer_zerop (lr_arg)
6212 && rcode == NE_EXPR && integer_zerop (rr_arg)
6213 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6214 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6215 return build2_loc (loc, NE_EXPR, truth_type,
6216 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6217 ll_arg, rl_arg),
6218 build_int_cst (TREE_TYPE (ll_arg), 0));
6219
6220 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6221 if (code == TRUTH_AND_EXPR
6222 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6223 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6224 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6225 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6226 return build2_loc (loc, EQ_EXPR, truth_type,
6227 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6228 ll_arg, rl_arg),
6229 build_int_cst (TREE_TYPE (ll_arg), 0));
6230 }
6231
6232 /* See if the comparisons can be merged. Then get all the parameters for
6233 each side. */
6234
6235 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6236 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6237 return 0;
6238
6239 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6240 volatilep = 0;
6241 ll_inner = decode_field_reference (loc, &ll_arg,
6242 &ll_bitsize, &ll_bitpos, &ll_mode,
6243 &ll_unsignedp, &ll_reversep, &volatilep,
6244 &ll_mask, &ll_and_mask);
6245 lr_inner = decode_field_reference (loc, &lr_arg,
6246 &lr_bitsize, &lr_bitpos, &lr_mode,
6247 &lr_unsignedp, &lr_reversep, &volatilep,
6248 &lr_mask, &lr_and_mask);
6249 rl_inner = decode_field_reference (loc, &rl_arg,
6250 &rl_bitsize, &rl_bitpos, &rl_mode,
6251 &rl_unsignedp, &rl_reversep, &volatilep,
6252 &rl_mask, &rl_and_mask);
6253 rr_inner = decode_field_reference (loc, &rr_arg,
6254 &rr_bitsize, &rr_bitpos, &rr_mode,
6255 &rr_unsignedp, &rr_reversep, &volatilep,
6256 &rr_mask, &rr_and_mask);
6257
6258 /* It must be true that the inner operation on the lhs of each
6259 comparison must be the same if we are to be able to do anything.
6260 Then see if we have constants. If not, the same must be true for
6261 the rhs's. */
6262 if (volatilep
6263 || ll_reversep != rl_reversep
6264 || ll_inner == 0 || rl_inner == 0
6265 || ! operand_equal_p (ll_inner, rl_inner, 0))
6266 return 0;
6267
6268 if (TREE_CODE (lr_arg) == INTEGER_CST
6269 && TREE_CODE (rr_arg) == INTEGER_CST)
6270 {
6271 l_const = lr_arg, r_const = rr_arg;
6272 lr_reversep = ll_reversep;
6273 }
6274 else if (lr_reversep != rr_reversep
6275 || lr_inner == 0 || rr_inner == 0
6276 || ! operand_equal_p (lr_inner, rr_inner, 0))
6277 return 0;
6278 else
6279 l_const = r_const = 0;
6280
6281 /* If either comparison code is not correct for our logical operation,
6282 fail. However, we can convert a one-bit comparison against zero into
6283 the opposite comparison against that bit being set in the field. */
6284
6285 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6286 if (lcode != wanted_code)
6287 {
6288 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6289 {
6290 /* Make the left operand unsigned, since we are only interested
6291 in the value of one bit. Otherwise we are doing the wrong
6292 thing below. */
6293 ll_unsignedp = 1;
6294 l_const = ll_mask;
6295 }
6296 else
6297 return 0;
6298 }
6299
6300 /* This is analogous to the code for l_const above. */
6301 if (rcode != wanted_code)
6302 {
6303 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6304 {
6305 rl_unsignedp = 1;
6306 r_const = rl_mask;
6307 }
6308 else
6309 return 0;
6310 }
6311
6312 /* See if we can find a mode that contains both fields being compared on
6313 the left. If we can't, fail. Otherwise, update all constants and masks
6314 to be relative to a field of that size. */
6315 first_bit = MIN (ll_bitpos, rl_bitpos);
6316 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6317 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6318 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6319 volatilep, &lnmode))
6320 return 0;
6321
6322 lnbitsize = GET_MODE_BITSIZE (lnmode);
6323 lnbitpos = first_bit & ~ (lnbitsize - 1);
6324 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6325 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6326
6327 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6328 {
6329 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6330 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6331 }
6332
6333 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6334 size_int (xll_bitpos));
6335 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6336 size_int (xrl_bitpos));
6337
6338 if (l_const)
6339 {
6340 l_const = fold_convert_loc (loc, lntype, l_const);
6341 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6342 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6343 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6344 fold_build1_loc (loc, BIT_NOT_EXPR,
6345 lntype, ll_mask))))
6346 {
6347 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6348
6349 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6350 }
6351 }
6352 if (r_const)
6353 {
6354 r_const = fold_convert_loc (loc, lntype, r_const);
6355 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6356 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6357 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6358 fold_build1_loc (loc, BIT_NOT_EXPR,
6359 lntype, rl_mask))))
6360 {
6361 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6362
6363 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6364 }
6365 }
6366
6367 /* If the right sides are not constant, do the same for it. Also,
6368 disallow this optimization if a size, signedness or storage order
6369 mismatch occurs between the left and right sides. */
6370 if (l_const == 0)
6371 {
6372 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6373 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6374 || ll_reversep != lr_reversep
6375 /* Make sure the two fields on the right
6376 correspond to the left without being swapped. */
6377 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6378 return 0;
6379
6380 first_bit = MIN (lr_bitpos, rr_bitpos);
6381 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6382 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6383 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6384 volatilep, &rnmode))
6385 return 0;
6386
6387 rnbitsize = GET_MODE_BITSIZE (rnmode);
6388 rnbitpos = first_bit & ~ (rnbitsize - 1);
6389 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6390 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6391
6392 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6393 {
6394 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6395 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6396 }
6397
6398 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6399 rntype, lr_mask),
6400 size_int (xlr_bitpos));
6401 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6402 rntype, rr_mask),
6403 size_int (xrr_bitpos));
6404
6405 /* Make a mask that corresponds to both fields being compared.
6406 Do this for both items being compared. If the operands are the
6407 same size and the bits being compared are in the same position
6408 then we can do this by masking both and comparing the masked
6409 results. */
6410 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6411 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6412 if (lnbitsize == rnbitsize
6413 && xll_bitpos == xlr_bitpos
6414 && lnbitpos >= 0
6415 && rnbitpos >= 0)
6416 {
6417 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6418 lntype, lnbitsize, lnbitpos,
6419 ll_unsignedp || rl_unsignedp, ll_reversep);
6420 if (! all_ones_mask_p (ll_mask, lnbitsize))
6421 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6422
6423 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6424 rntype, rnbitsize, rnbitpos,
6425 lr_unsignedp || rr_unsignedp, lr_reversep);
6426 if (! all_ones_mask_p (lr_mask, rnbitsize))
6427 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6428
6429 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6430 }
6431
6432 /* There is still another way we can do something: If both pairs of
6433 fields being compared are adjacent, we may be able to make a wider
6434 field containing them both.
6435
6436 Note that we still must mask the lhs/rhs expressions. Furthermore,
6437 the mask must be shifted to account for the shift done by
6438 make_bit_field_ref. */
6439 if (((ll_bitsize + ll_bitpos == rl_bitpos
6440 && lr_bitsize + lr_bitpos == rr_bitpos)
6441 || (ll_bitpos == rl_bitpos + rl_bitsize
6442 && lr_bitpos == rr_bitpos + rr_bitsize))
6443 && ll_bitpos >= 0
6444 && rl_bitpos >= 0
6445 && lr_bitpos >= 0
6446 && rr_bitpos >= 0)
6447 {
6448 tree type;
6449
6450 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6451 ll_bitsize + rl_bitsize,
6452 MIN (ll_bitpos, rl_bitpos),
6453 ll_unsignedp, ll_reversep);
6454 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6455 lr_bitsize + rr_bitsize,
6456 MIN (lr_bitpos, rr_bitpos),
6457 lr_unsignedp, lr_reversep);
6458
6459 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6460 size_int (MIN (xll_bitpos, xrl_bitpos)));
6461 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6462 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6463
6464 /* Convert to the smaller type before masking out unwanted bits. */
6465 type = lntype;
6466 if (lntype != rntype)
6467 {
6468 if (lnbitsize > rnbitsize)
6469 {
6470 lhs = fold_convert_loc (loc, rntype, lhs);
6471 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6472 type = rntype;
6473 }
6474 else if (lnbitsize < rnbitsize)
6475 {
6476 rhs = fold_convert_loc (loc, lntype, rhs);
6477 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6478 type = lntype;
6479 }
6480 }
6481
6482 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6483 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6484
6485 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6486 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6487
6488 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6489 }
6490
6491 return 0;
6492 }
6493
6494 /* Handle the case of comparisons with constants. If there is something in
6495 common between the masks, those bits of the constants must be the same.
6496 If not, the condition is always false. Test for this to avoid generating
6497 incorrect code below. */
6498 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6499 if (! integer_zerop (result)
6500 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6501 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6502 {
6503 if (wanted_code == NE_EXPR)
6504 {
6505 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6506 return constant_boolean_node (true, truth_type);
6507 }
6508 else
6509 {
6510 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6511 return constant_boolean_node (false, truth_type);
6512 }
6513 }
6514
6515 if (lnbitpos < 0)
6516 return 0;
6517
6518 /* Construct the expression we will return. First get the component
6519 reference we will make. Unless the mask is all ones the width of
6520 that field, perform the mask operation. Then compare with the
6521 merged constant. */
6522 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6523 lntype, lnbitsize, lnbitpos,
6524 ll_unsignedp || rl_unsignedp, ll_reversep);
6525
6526 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6527 if (! all_ones_mask_p (ll_mask, lnbitsize))
6528 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6529
6530 return build2_loc (loc, wanted_code, truth_type, result,
6531 const_binop (BIT_IOR_EXPR, l_const, r_const));
6532 }
6533 \f
6534 /* T is an integer expression that is being multiplied, divided, or taken a
6535 modulus (CODE says which and what kind of divide or modulus) by a
6536 constant C. See if we can eliminate that operation by folding it with
6537 other operations already in T. WIDE_TYPE, if non-null, is a type that
6538 should be used for the computation if wider than our type.
6539
6540 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6541 (X * 2) + (Y * 4). We must, however, be assured that either the original
6542 expression would not overflow or that overflow is undefined for the type
6543 in the language in question.
6544
6545 If we return a non-null expression, it is an equivalent form of the
6546 original computation, but need not be in the original type.
6547
6548 We set *STRICT_OVERFLOW_P to true if the return values depends on
6549 signed overflow being undefined. Otherwise we do not change
6550 *STRICT_OVERFLOW_P. */
6551
6552 static tree
6553 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6554 bool *strict_overflow_p)
6555 {
6556 /* To avoid exponential search depth, refuse to allow recursion past
6557 three levels. Beyond that (1) it's highly unlikely that we'll find
6558 something interesting and (2) we've probably processed it before
6559 when we built the inner expression. */
6560
6561 static int depth;
6562 tree ret;
6563
6564 if (depth > 3)
6565 return NULL;
6566
6567 depth++;
6568 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6569 depth--;
6570
6571 return ret;
6572 }
6573
6574 static tree
6575 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6576 bool *strict_overflow_p)
6577 {
6578 tree type = TREE_TYPE (t);
6579 enum tree_code tcode = TREE_CODE (t);
6580 tree ctype = (wide_type != 0
6581 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6582 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6583 ? wide_type : type);
6584 tree t1, t2;
6585 int same_p = tcode == code;
6586 tree op0 = NULL_TREE, op1 = NULL_TREE;
6587 bool sub_strict_overflow_p;
6588
6589 /* Don't deal with constants of zero here; they confuse the code below. */
6590 if (integer_zerop (c))
6591 return NULL_TREE;
6592
6593 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6594 op0 = TREE_OPERAND (t, 0);
6595
6596 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6597 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6598
6599 /* Note that we need not handle conditional operations here since fold
6600 already handles those cases. So just do arithmetic here. */
6601 switch (tcode)
6602 {
6603 case INTEGER_CST:
6604 /* For a constant, we can always simplify if we are a multiply
6605 or (for divide and modulus) if it is a multiple of our constant. */
6606 if (code == MULT_EXPR
6607 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6608 TYPE_SIGN (type)))
6609 {
6610 tree tem = const_binop (code, fold_convert (ctype, t),
6611 fold_convert (ctype, c));
6612 /* If the multiplication overflowed, we lost information on it.
6613 See PR68142 and PR69845. */
6614 if (TREE_OVERFLOW (tem))
6615 return NULL_TREE;
6616 return tem;
6617 }
6618 break;
6619
6620 CASE_CONVERT: case NON_LVALUE_EXPR:
6621 /* If op0 is an expression ... */
6622 if ((COMPARISON_CLASS_P (op0)
6623 || UNARY_CLASS_P (op0)
6624 || BINARY_CLASS_P (op0)
6625 || VL_EXP_CLASS_P (op0)
6626 || EXPRESSION_CLASS_P (op0))
6627 /* ... and has wrapping overflow, and its type is smaller
6628 than ctype, then we cannot pass through as widening. */
6629 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6630 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6631 && (TYPE_PRECISION (ctype)
6632 > TYPE_PRECISION (TREE_TYPE (op0))))
6633 /* ... or this is a truncation (t is narrower than op0),
6634 then we cannot pass through this narrowing. */
6635 || (TYPE_PRECISION (type)
6636 < TYPE_PRECISION (TREE_TYPE (op0)))
6637 /* ... or signedness changes for division or modulus,
6638 then we cannot pass through this conversion. */
6639 || (code != MULT_EXPR
6640 && (TYPE_UNSIGNED (ctype)
6641 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6642 /* ... or has undefined overflow while the converted to
6643 type has not, we cannot do the operation in the inner type
6644 as that would introduce undefined overflow. */
6645 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6646 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6647 && !TYPE_OVERFLOW_UNDEFINED (type))))
6648 break;
6649
6650 /* Pass the constant down and see if we can make a simplification. If
6651 we can, replace this expression with the inner simplification for
6652 possible later conversion to our or some other type. */
6653 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6654 && TREE_CODE (t2) == INTEGER_CST
6655 && !TREE_OVERFLOW (t2)
6656 && (t1 = extract_muldiv (op0, t2, code,
6657 code == MULT_EXPR ? ctype : NULL_TREE,
6658 strict_overflow_p)) != 0)
6659 return t1;
6660 break;
6661
6662 case ABS_EXPR:
6663 /* If widening the type changes it from signed to unsigned, then we
6664 must avoid building ABS_EXPR itself as unsigned. */
6665 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6666 {
6667 tree cstype = (*signed_type_for) (ctype);
6668 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6669 != 0)
6670 {
6671 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6672 return fold_convert (ctype, t1);
6673 }
6674 break;
6675 }
6676 /* If the constant is negative, we cannot simplify this. */
6677 if (tree_int_cst_sgn (c) == -1)
6678 break;
6679 /* FALLTHROUGH */
6680 case NEGATE_EXPR:
6681 /* For division and modulus, type can't be unsigned, as e.g.
6682 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6683 For signed types, even with wrapping overflow, this is fine. */
6684 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6685 break;
6686 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6687 != 0)
6688 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6689 break;
6690
6691 case MIN_EXPR: case MAX_EXPR:
6692 /* If widening the type changes the signedness, then we can't perform
6693 this optimization as that changes the result. */
6694 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6695 break;
6696
6697 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6698 sub_strict_overflow_p = false;
6699 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6700 &sub_strict_overflow_p)) != 0
6701 && (t2 = extract_muldiv (op1, c, code, wide_type,
6702 &sub_strict_overflow_p)) != 0)
6703 {
6704 if (tree_int_cst_sgn (c) < 0)
6705 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6706 if (sub_strict_overflow_p)
6707 *strict_overflow_p = true;
6708 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6709 fold_convert (ctype, t2));
6710 }
6711 break;
6712
6713 case LSHIFT_EXPR: case RSHIFT_EXPR:
6714 /* If the second operand is constant, this is a multiplication
6715 or floor division, by a power of two, so we can treat it that
6716 way unless the multiplier or divisor overflows. Signed
6717 left-shift overflow is implementation-defined rather than
6718 undefined in C90, so do not convert signed left shift into
6719 multiplication. */
6720 if (TREE_CODE (op1) == INTEGER_CST
6721 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6722 /* const_binop may not detect overflow correctly,
6723 so check for it explicitly here. */
6724 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6725 wi::to_wide (op1))
6726 && (t1 = fold_convert (ctype,
6727 const_binop (LSHIFT_EXPR, size_one_node,
6728 op1))) != 0
6729 && !TREE_OVERFLOW (t1))
6730 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6731 ? MULT_EXPR : FLOOR_DIV_EXPR,
6732 ctype,
6733 fold_convert (ctype, op0),
6734 t1),
6735 c, code, wide_type, strict_overflow_p);
6736 break;
6737
6738 case PLUS_EXPR: case MINUS_EXPR:
6739 /* See if we can eliminate the operation on both sides. If we can, we
6740 can return a new PLUS or MINUS. If we can't, the only remaining
6741 cases where we can do anything are if the second operand is a
6742 constant. */
6743 sub_strict_overflow_p = false;
6744 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6745 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6746 if (t1 != 0 && t2 != 0
6747 && TYPE_OVERFLOW_WRAPS (ctype)
6748 && (code == MULT_EXPR
6749 /* If not multiplication, we can only do this if both operands
6750 are divisible by c. */
6751 || (multiple_of_p (ctype, op0, c)
6752 && multiple_of_p (ctype, op1, c))))
6753 {
6754 if (sub_strict_overflow_p)
6755 *strict_overflow_p = true;
6756 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6757 fold_convert (ctype, t2));
6758 }
6759
6760 /* If this was a subtraction, negate OP1 and set it to be an addition.
6761 This simplifies the logic below. */
6762 if (tcode == MINUS_EXPR)
6763 {
6764 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6765 /* If OP1 was not easily negatable, the constant may be OP0. */
6766 if (TREE_CODE (op0) == INTEGER_CST)
6767 {
6768 std::swap (op0, op1);
6769 std::swap (t1, t2);
6770 }
6771 }
6772
6773 if (TREE_CODE (op1) != INTEGER_CST)
6774 break;
6775
6776 /* If either OP1 or C are negative, this optimization is not safe for
6777 some of the division and remainder types while for others we need
6778 to change the code. */
6779 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6780 {
6781 if (code == CEIL_DIV_EXPR)
6782 code = FLOOR_DIV_EXPR;
6783 else if (code == FLOOR_DIV_EXPR)
6784 code = CEIL_DIV_EXPR;
6785 else if (code != MULT_EXPR
6786 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6787 break;
6788 }
6789
6790 /* If it's a multiply or a division/modulus operation of a multiple
6791 of our constant, do the operation and verify it doesn't overflow. */
6792 if (code == MULT_EXPR
6793 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6794 TYPE_SIGN (type)))
6795 {
6796 op1 = const_binop (code, fold_convert (ctype, op1),
6797 fold_convert (ctype, c));
6798 /* We allow the constant to overflow with wrapping semantics. */
6799 if (op1 == 0
6800 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6801 break;
6802 }
6803 else
6804 break;
6805
6806 /* If we have an unsigned type, we cannot widen the operation since it
6807 will change the result if the original computation overflowed. */
6808 if (TYPE_UNSIGNED (ctype) && ctype != type)
6809 break;
6810
6811 /* The last case is if we are a multiply. In that case, we can
6812 apply the distributive law to commute the multiply and addition
6813 if the multiplication of the constants doesn't overflow
6814 and overflow is defined. With undefined overflow
6815 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6816 But fold_plusminus_mult_expr would factor back any power-of-two
6817 value so do not distribute in the first place in this case. */
6818 if (code == MULT_EXPR
6819 && TYPE_OVERFLOW_WRAPS (ctype)
6820 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
6821 return fold_build2 (tcode, ctype,
6822 fold_build2 (code, ctype,
6823 fold_convert (ctype, op0),
6824 fold_convert (ctype, c)),
6825 op1);
6826
6827 break;
6828
6829 case MULT_EXPR:
6830 /* We have a special case here if we are doing something like
6831 (C * 8) % 4 since we know that's zero. */
6832 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6833 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6834 /* If the multiplication can overflow we cannot optimize this. */
6835 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6836 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6837 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6838 TYPE_SIGN (type)))
6839 {
6840 *strict_overflow_p = true;
6841 return omit_one_operand (type, integer_zero_node, op0);
6842 }
6843
6844 /* ... fall through ... */
6845
6846 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6847 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6848 /* If we can extract our operation from the LHS, do so and return a
6849 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6850 do something only if the second operand is a constant. */
6851 if (same_p
6852 && TYPE_OVERFLOW_WRAPS (ctype)
6853 && (t1 = extract_muldiv (op0, c, code, wide_type,
6854 strict_overflow_p)) != 0)
6855 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6856 fold_convert (ctype, op1));
6857 else if (tcode == MULT_EXPR && code == MULT_EXPR
6858 && TYPE_OVERFLOW_WRAPS (ctype)
6859 && (t1 = extract_muldiv (op1, c, code, wide_type,
6860 strict_overflow_p)) != 0)
6861 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6862 fold_convert (ctype, t1));
6863 else if (TREE_CODE (op1) != INTEGER_CST)
6864 return 0;
6865
6866 /* If these are the same operation types, we can associate them
6867 assuming no overflow. */
6868 if (tcode == code)
6869 {
6870 bool overflow_p = false;
6871 wi::overflow_type overflow_mul;
6872 signop sign = TYPE_SIGN (ctype);
6873 unsigned prec = TYPE_PRECISION (ctype);
6874 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6875 wi::to_wide (c, prec),
6876 sign, &overflow_mul);
6877 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6878 if (overflow_mul
6879 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6880 overflow_p = true;
6881 if (!overflow_p)
6882 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6883 wide_int_to_tree (ctype, mul));
6884 }
6885
6886 /* If these operations "cancel" each other, we have the main
6887 optimizations of this pass, which occur when either constant is a
6888 multiple of the other, in which case we replace this with either an
6889 operation or CODE or TCODE.
6890
6891 If we have an unsigned type, we cannot do this since it will change
6892 the result if the original computation overflowed. */
6893 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6894 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6895 || (tcode == MULT_EXPR
6896 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6897 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6898 && code != MULT_EXPR)))
6899 {
6900 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6901 TYPE_SIGN (type)))
6902 {
6903 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6904 *strict_overflow_p = true;
6905 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6906 fold_convert (ctype,
6907 const_binop (TRUNC_DIV_EXPR,
6908 op1, c)));
6909 }
6910 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6911 TYPE_SIGN (type)))
6912 {
6913 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6914 *strict_overflow_p = true;
6915 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6916 fold_convert (ctype,
6917 const_binop (TRUNC_DIV_EXPR,
6918 c, op1)));
6919 }
6920 }
6921 break;
6922
6923 default:
6924 break;
6925 }
6926
6927 return 0;
6928 }
6929 \f
6930 /* Return a node which has the indicated constant VALUE (either 0 or
6931 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6932 and is of the indicated TYPE. */
6933
6934 tree
6935 constant_boolean_node (bool value, tree type)
6936 {
6937 if (type == integer_type_node)
6938 return value ? integer_one_node : integer_zero_node;
6939 else if (type == boolean_type_node)
6940 return value ? boolean_true_node : boolean_false_node;
6941 else if (TREE_CODE (type) == VECTOR_TYPE)
6942 return build_vector_from_val (type,
6943 build_int_cst (TREE_TYPE (type),
6944 value ? -1 : 0));
6945 else
6946 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6947 }
6948
6949
6950 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6951 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6952 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6953 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6954 COND is the first argument to CODE; otherwise (as in the example
6955 given here), it is the second argument. TYPE is the type of the
6956 original expression. Return NULL_TREE if no simplification is
6957 possible. */
6958
6959 static tree
6960 fold_binary_op_with_conditional_arg (location_t loc,
6961 enum tree_code code,
6962 tree type, tree op0, tree op1,
6963 tree cond, tree arg, int cond_first_p)
6964 {
6965 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6966 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6967 tree test, true_value, false_value;
6968 tree lhs = NULL_TREE;
6969 tree rhs = NULL_TREE;
6970 enum tree_code cond_code = COND_EXPR;
6971
6972 /* Do not move possibly trapping operations into the conditional as this
6973 pessimizes code and causes gimplification issues when applied late. */
6974 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
6975 ANY_INTEGRAL_TYPE_P (type)
6976 && TYPE_OVERFLOW_TRAPS (type), op1))
6977 return NULL_TREE;
6978
6979 if (TREE_CODE (cond) == COND_EXPR
6980 || TREE_CODE (cond) == VEC_COND_EXPR)
6981 {
6982 test = TREE_OPERAND (cond, 0);
6983 true_value = TREE_OPERAND (cond, 1);
6984 false_value = TREE_OPERAND (cond, 2);
6985 /* If this operand throws an expression, then it does not make
6986 sense to try to perform a logical or arithmetic operation
6987 involving it. */
6988 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6989 lhs = true_value;
6990 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6991 rhs = false_value;
6992 }
6993 else if (!(TREE_CODE (type) != VECTOR_TYPE
6994 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6995 {
6996 tree testtype = TREE_TYPE (cond);
6997 test = cond;
6998 true_value = constant_boolean_node (true, testtype);
6999 false_value = constant_boolean_node (false, testtype);
7000 }
7001 else
7002 /* Detect the case of mixing vector and scalar types - bail out. */
7003 return NULL_TREE;
7004
7005 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
7006 cond_code = VEC_COND_EXPR;
7007
7008 /* This transformation is only worthwhile if we don't have to wrap ARG
7009 in a SAVE_EXPR and the operation can be simplified without recursing
7010 on at least one of the branches once its pushed inside the COND_EXPR. */
7011 if (!TREE_CONSTANT (arg)
7012 && (TREE_SIDE_EFFECTS (arg)
7013 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7014 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7015 return NULL_TREE;
7016
7017 arg = fold_convert_loc (loc, arg_type, arg);
7018 if (lhs == 0)
7019 {
7020 true_value = fold_convert_loc (loc, cond_type, true_value);
7021 if (cond_first_p)
7022 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7023 else
7024 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7025 }
7026 if (rhs == 0)
7027 {
7028 false_value = fold_convert_loc (loc, cond_type, false_value);
7029 if (cond_first_p)
7030 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7031 else
7032 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7033 }
7034
7035 /* Check that we have simplified at least one of the branches. */
7036 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7037 return NULL_TREE;
7038
7039 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7040 }
7041
7042 \f
7043 /* Subroutine of fold() that checks for the addition of +/- 0.0.
7044
7045 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
7046 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
7047 ADDEND is the same as X.
7048
7049 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7050 and finite. The problematic cases are when X is zero, and its mode
7051 has signed zeros. In the case of rounding towards -infinity,
7052 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7053 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7054
7055 bool
7056 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
7057 {
7058 if (!real_zerop (addend))
7059 return false;
7060
7061 /* Don't allow the fold with -fsignaling-nans. */
7062 if (HONOR_SNANS (type))
7063 return false;
7064
7065 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7066 if (!HONOR_SIGNED_ZEROS (type))
7067 return true;
7068
7069 /* There is no case that is safe for all rounding modes. */
7070 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7071 return false;
7072
7073 /* In a vector or complex, we would need to check the sign of all zeros. */
7074 if (TREE_CODE (addend) == VECTOR_CST)
7075 addend = uniform_vector_p (addend);
7076 if (!addend || TREE_CODE (addend) != REAL_CST)
7077 return false;
7078
7079 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7080 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
7081 negate = !negate;
7082
7083 /* The mode has signed zeros, and we have to honor their sign.
7084 In this situation, there is only one case we can return true for.
7085 X - 0 is the same as X with default rounding. */
7086 return negate;
7087 }
7088
7089 /* Subroutine of match.pd that optimizes comparisons of a division by
7090 a nonzero integer constant against an integer constant, i.e.
7091 X/C1 op C2.
7092
7093 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7094 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7095
7096 enum tree_code
7097 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7098 tree *hi, bool *neg_overflow)
7099 {
7100 tree prod, tmp, type = TREE_TYPE (c1);
7101 signop sign = TYPE_SIGN (type);
7102 wi::overflow_type overflow;
7103
7104 /* We have to do this the hard way to detect unsigned overflow.
7105 prod = int_const_binop (MULT_EXPR, c1, c2); */
7106 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7107 prod = force_fit_type (type, val, -1, overflow);
7108 *neg_overflow = false;
7109
7110 if (sign == UNSIGNED)
7111 {
7112 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7113 *lo = prod;
7114
7115 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7116 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7117 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7118 }
7119 else if (tree_int_cst_sgn (c1) >= 0)
7120 {
7121 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7122 switch (tree_int_cst_sgn (c2))
7123 {
7124 case -1:
7125 *neg_overflow = true;
7126 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7127 *hi = prod;
7128 break;
7129
7130 case 0:
7131 *lo = fold_negate_const (tmp, type);
7132 *hi = tmp;
7133 break;
7134
7135 case 1:
7136 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7137 *lo = prod;
7138 break;
7139
7140 default:
7141 gcc_unreachable ();
7142 }
7143 }
7144 else
7145 {
7146 /* A negative divisor reverses the relational operators. */
7147 code = swap_tree_comparison (code);
7148
7149 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7150 switch (tree_int_cst_sgn (c2))
7151 {
7152 case -1:
7153 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7154 *lo = prod;
7155 break;
7156
7157 case 0:
7158 *hi = fold_negate_const (tmp, type);
7159 *lo = tmp;
7160 break;
7161
7162 case 1:
7163 *neg_overflow = true;
7164 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7165 *hi = prod;
7166 break;
7167
7168 default:
7169 gcc_unreachable ();
7170 }
7171 }
7172
7173 if (code != EQ_EXPR && code != NE_EXPR)
7174 return code;
7175
7176 if (TREE_OVERFLOW (*lo)
7177 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7178 *lo = NULL_TREE;
7179 if (TREE_OVERFLOW (*hi)
7180 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7181 *hi = NULL_TREE;
7182
7183 return code;
7184 }
7185
7186
7187 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7188 equality/inequality test, then return a simplified form of the test
7189 using a sign testing. Otherwise return NULL. TYPE is the desired
7190 result type. */
7191
7192 static tree
7193 fold_single_bit_test_into_sign_test (location_t loc,
7194 enum tree_code code, tree arg0, tree arg1,
7195 tree result_type)
7196 {
7197 /* If this is testing a single bit, we can optimize the test. */
7198 if ((code == NE_EXPR || code == EQ_EXPR)
7199 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7200 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7201 {
7202 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7203 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7204 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7205
7206 if (arg00 != NULL_TREE
7207 /* This is only a win if casting to a signed type is cheap,
7208 i.e. when arg00's type is not a partial mode. */
7209 && type_has_mode_precision_p (TREE_TYPE (arg00)))
7210 {
7211 tree stype = signed_type_for (TREE_TYPE (arg00));
7212 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7213 result_type,
7214 fold_convert_loc (loc, stype, arg00),
7215 build_int_cst (stype, 0));
7216 }
7217 }
7218
7219 return NULL_TREE;
7220 }
7221
7222 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7223 equality/inequality test, then return a simplified form of
7224 the test using shifts and logical operations. Otherwise return
7225 NULL. TYPE is the desired result type. */
7226
7227 tree
7228 fold_single_bit_test (location_t loc, enum tree_code code,
7229 tree arg0, tree arg1, tree result_type)
7230 {
7231 /* If this is testing a single bit, we can optimize the test. */
7232 if ((code == NE_EXPR || code == EQ_EXPR)
7233 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7234 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7235 {
7236 tree inner = TREE_OPERAND (arg0, 0);
7237 tree type = TREE_TYPE (arg0);
7238 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7239 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
7240 int ops_unsigned;
7241 tree signed_type, unsigned_type, intermediate_type;
7242 tree tem, one;
7243
7244 /* First, see if we can fold the single bit test into a sign-bit
7245 test. */
7246 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7247 result_type);
7248 if (tem)
7249 return tem;
7250
7251 /* Otherwise we have (A & C) != 0 where C is a single bit,
7252 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7253 Similarly for (A & C) == 0. */
7254
7255 /* If INNER is a right shift of a constant and it plus BITNUM does
7256 not overflow, adjust BITNUM and INNER. */
7257 if (TREE_CODE (inner) == RSHIFT_EXPR
7258 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7259 && bitnum < TYPE_PRECISION (type)
7260 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
7261 TYPE_PRECISION (type) - bitnum))
7262 {
7263 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
7264 inner = TREE_OPERAND (inner, 0);
7265 }
7266
7267 /* If we are going to be able to omit the AND below, we must do our
7268 operations as unsigned. If we must use the AND, we have a choice.
7269 Normally unsigned is faster, but for some machines signed is. */
7270 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
7271 && !flag_syntax_only) ? 0 : 1;
7272
7273 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7274 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7275 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7276 inner = fold_convert_loc (loc, intermediate_type, inner);
7277
7278 if (bitnum != 0)
7279 inner = build2 (RSHIFT_EXPR, intermediate_type,
7280 inner, size_int (bitnum));
7281
7282 one = build_int_cst (intermediate_type, 1);
7283
7284 if (code == EQ_EXPR)
7285 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7286
7287 /* Put the AND last so it can combine with more things. */
7288 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7289
7290 /* Make sure to return the proper type. */
7291 inner = fold_convert_loc (loc, result_type, inner);
7292
7293 return inner;
7294 }
7295 return NULL_TREE;
7296 }
7297
7298 /* Test whether it is preferable two swap two operands, ARG0 and
7299 ARG1, for example because ARG0 is an integer constant and ARG1
7300 isn't. */
7301
7302 bool
7303 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7304 {
7305 if (CONSTANT_CLASS_P (arg1))
7306 return 0;
7307 if (CONSTANT_CLASS_P (arg0))
7308 return 1;
7309
7310 STRIP_NOPS (arg0);
7311 STRIP_NOPS (arg1);
7312
7313 if (TREE_CONSTANT (arg1))
7314 return 0;
7315 if (TREE_CONSTANT (arg0))
7316 return 1;
7317
7318 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7319 for commutative and comparison operators. Ensuring a canonical
7320 form allows the optimizers to find additional redundancies without
7321 having to explicitly check for both orderings. */
7322 if (TREE_CODE (arg0) == SSA_NAME
7323 && TREE_CODE (arg1) == SSA_NAME
7324 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7325 return 1;
7326
7327 /* Put SSA_NAMEs last. */
7328 if (TREE_CODE (arg1) == SSA_NAME)
7329 return 0;
7330 if (TREE_CODE (arg0) == SSA_NAME)
7331 return 1;
7332
7333 /* Put variables last. */
7334 if (DECL_P (arg1))
7335 return 0;
7336 if (DECL_P (arg0))
7337 return 1;
7338
7339 return 0;
7340 }
7341
7342
7343 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7344 means A >= Y && A != MAX, but in this case we know that
7345 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7346
7347 static tree
7348 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7349 {
7350 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7351
7352 if (TREE_CODE (bound) == LT_EXPR)
7353 a = TREE_OPERAND (bound, 0);
7354 else if (TREE_CODE (bound) == GT_EXPR)
7355 a = TREE_OPERAND (bound, 1);
7356 else
7357 return NULL_TREE;
7358
7359 typea = TREE_TYPE (a);
7360 if (!INTEGRAL_TYPE_P (typea)
7361 && !POINTER_TYPE_P (typea))
7362 return NULL_TREE;
7363
7364 if (TREE_CODE (ineq) == LT_EXPR)
7365 {
7366 a1 = TREE_OPERAND (ineq, 1);
7367 y = TREE_OPERAND (ineq, 0);
7368 }
7369 else if (TREE_CODE (ineq) == GT_EXPR)
7370 {
7371 a1 = TREE_OPERAND (ineq, 0);
7372 y = TREE_OPERAND (ineq, 1);
7373 }
7374 else
7375 return NULL_TREE;
7376
7377 if (TREE_TYPE (a1) != typea)
7378 return NULL_TREE;
7379
7380 if (POINTER_TYPE_P (typea))
7381 {
7382 /* Convert the pointer types into integer before taking the difference. */
7383 tree ta = fold_convert_loc (loc, ssizetype, a);
7384 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7385 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7386 }
7387 else
7388 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7389
7390 if (!diff || !integer_onep (diff))
7391 return NULL_TREE;
7392
7393 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7394 }
7395
7396 /* Fold a sum or difference of at least one multiplication.
7397 Returns the folded tree or NULL if no simplification could be made. */
7398
7399 static tree
7400 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7401 tree arg0, tree arg1)
7402 {
7403 tree arg00, arg01, arg10, arg11;
7404 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7405
7406 /* (A * C) +- (B * C) -> (A+-B) * C.
7407 (A * C) +- A -> A * (C+-1).
7408 We are most concerned about the case where C is a constant,
7409 but other combinations show up during loop reduction. Since
7410 it is not difficult, try all four possibilities. */
7411
7412 if (TREE_CODE (arg0) == MULT_EXPR)
7413 {
7414 arg00 = TREE_OPERAND (arg0, 0);
7415 arg01 = TREE_OPERAND (arg0, 1);
7416 }
7417 else if (TREE_CODE (arg0) == INTEGER_CST)
7418 {
7419 arg00 = build_one_cst (type);
7420 arg01 = arg0;
7421 }
7422 else
7423 {
7424 /* We cannot generate constant 1 for fract. */
7425 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7426 return NULL_TREE;
7427 arg00 = arg0;
7428 arg01 = build_one_cst (type);
7429 }
7430 if (TREE_CODE (arg1) == MULT_EXPR)
7431 {
7432 arg10 = TREE_OPERAND (arg1, 0);
7433 arg11 = TREE_OPERAND (arg1, 1);
7434 }
7435 else if (TREE_CODE (arg1) == INTEGER_CST)
7436 {
7437 arg10 = build_one_cst (type);
7438 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7439 the purpose of this canonicalization. */
7440 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7441 && negate_expr_p (arg1)
7442 && code == PLUS_EXPR)
7443 {
7444 arg11 = negate_expr (arg1);
7445 code = MINUS_EXPR;
7446 }
7447 else
7448 arg11 = arg1;
7449 }
7450 else
7451 {
7452 /* We cannot generate constant 1 for fract. */
7453 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7454 return NULL_TREE;
7455 arg10 = arg1;
7456 arg11 = build_one_cst (type);
7457 }
7458 same = NULL_TREE;
7459
7460 /* Prefer factoring a common non-constant. */
7461 if (operand_equal_p (arg00, arg10, 0))
7462 same = arg00, alt0 = arg01, alt1 = arg11;
7463 else if (operand_equal_p (arg01, arg11, 0))
7464 same = arg01, alt0 = arg00, alt1 = arg10;
7465 else if (operand_equal_p (arg00, arg11, 0))
7466 same = arg00, alt0 = arg01, alt1 = arg10;
7467 else if (operand_equal_p (arg01, arg10, 0))
7468 same = arg01, alt0 = arg00, alt1 = arg11;
7469
7470 /* No identical multiplicands; see if we can find a common
7471 power-of-two factor in non-power-of-two multiplies. This
7472 can help in multi-dimensional array access. */
7473 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7474 {
7475 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7476 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7477 HOST_WIDE_INT tmp;
7478 bool swap = false;
7479 tree maybe_same;
7480
7481 /* Move min of absolute values to int11. */
7482 if (absu_hwi (int01) < absu_hwi (int11))
7483 {
7484 tmp = int01, int01 = int11, int11 = tmp;
7485 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7486 maybe_same = arg01;
7487 swap = true;
7488 }
7489 else
7490 maybe_same = arg11;
7491
7492 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7493 if (factor > 1
7494 && pow2p_hwi (factor)
7495 && (int01 & (factor - 1)) == 0
7496 /* The remainder should not be a constant, otherwise we
7497 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7498 increased the number of multiplications necessary. */
7499 && TREE_CODE (arg10) != INTEGER_CST)
7500 {
7501 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7502 build_int_cst (TREE_TYPE (arg00),
7503 int01 / int11));
7504 alt1 = arg10;
7505 same = maybe_same;
7506 if (swap)
7507 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7508 }
7509 }
7510
7511 if (!same)
7512 return NULL_TREE;
7513
7514 if (! ANY_INTEGRAL_TYPE_P (type)
7515 || TYPE_OVERFLOW_WRAPS (type)
7516 /* We are neither factoring zero nor minus one. */
7517 || TREE_CODE (same) == INTEGER_CST)
7518 return fold_build2_loc (loc, MULT_EXPR, type,
7519 fold_build2_loc (loc, code, type,
7520 fold_convert_loc (loc, type, alt0),
7521 fold_convert_loc (loc, type, alt1)),
7522 fold_convert_loc (loc, type, same));
7523
7524 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7525 same may be minus one and thus the multiplication may overflow. Perform
7526 the sum operation in an unsigned type. */
7527 tree utype = unsigned_type_for (type);
7528 tree tem = fold_build2_loc (loc, code, utype,
7529 fold_convert_loc (loc, utype, alt0),
7530 fold_convert_loc (loc, utype, alt1));
7531 /* If the sum evaluated to a constant that is not -INF the multiplication
7532 cannot overflow. */
7533 if (TREE_CODE (tem) == INTEGER_CST
7534 && (wi::to_wide (tem)
7535 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7536 return fold_build2_loc (loc, MULT_EXPR, type,
7537 fold_convert (type, tem), same);
7538
7539 /* Do not resort to unsigned multiplication because
7540 we lose the no-overflow property of the expression. */
7541 return NULL_TREE;
7542 }
7543
7544 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7545 specified by EXPR into the buffer PTR of length LEN bytes.
7546 Return the number of bytes placed in the buffer, or zero
7547 upon failure. */
7548
7549 static int
7550 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7551 {
7552 tree type = TREE_TYPE (expr);
7553 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7554 int byte, offset, word, words;
7555 unsigned char value;
7556
7557 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7558 return 0;
7559 if (off == -1)
7560 off = 0;
7561
7562 if (ptr == NULL)
7563 /* Dry run. */
7564 return MIN (len, total_bytes - off);
7565
7566 words = total_bytes / UNITS_PER_WORD;
7567
7568 for (byte = 0; byte < total_bytes; byte++)
7569 {
7570 int bitpos = byte * BITS_PER_UNIT;
7571 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7572 number of bytes. */
7573 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7574
7575 if (total_bytes > UNITS_PER_WORD)
7576 {
7577 word = byte / UNITS_PER_WORD;
7578 if (WORDS_BIG_ENDIAN)
7579 word = (words - 1) - word;
7580 offset = word * UNITS_PER_WORD;
7581 if (BYTES_BIG_ENDIAN)
7582 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7583 else
7584 offset += byte % UNITS_PER_WORD;
7585 }
7586 else
7587 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7588 if (offset >= off && offset - off < len)
7589 ptr[offset - off] = value;
7590 }
7591 return MIN (len, total_bytes - off);
7592 }
7593
7594
7595 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7596 specified by EXPR into the buffer PTR of length LEN bytes.
7597 Return the number of bytes placed in the buffer, or zero
7598 upon failure. */
7599
7600 static int
7601 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7602 {
7603 tree type = TREE_TYPE (expr);
7604 scalar_mode mode = SCALAR_TYPE_MODE (type);
7605 int total_bytes = GET_MODE_SIZE (mode);
7606 FIXED_VALUE_TYPE value;
7607 tree i_value, i_type;
7608
7609 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7610 return 0;
7611
7612 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7613
7614 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7615 return 0;
7616
7617 value = TREE_FIXED_CST (expr);
7618 i_value = double_int_to_tree (i_type, value.data);
7619
7620 return native_encode_int (i_value, ptr, len, off);
7621 }
7622
7623
7624 /* Subroutine of native_encode_expr. Encode the REAL_CST
7625 specified by EXPR into the buffer PTR of length LEN bytes.
7626 Return the number of bytes placed in the buffer, or zero
7627 upon failure. */
7628
7629 static int
7630 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7631 {
7632 tree type = TREE_TYPE (expr);
7633 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7634 int byte, offset, word, words, bitpos;
7635 unsigned char value;
7636
7637 /* There are always 32 bits in each long, no matter the size of
7638 the hosts long. We handle floating point representations with
7639 up to 192 bits. */
7640 long tmp[6];
7641
7642 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7643 return 0;
7644 if (off == -1)
7645 off = 0;
7646
7647 if (ptr == NULL)
7648 /* Dry run. */
7649 return MIN (len, total_bytes - off);
7650
7651 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7652
7653 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7654
7655 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7656 bitpos += BITS_PER_UNIT)
7657 {
7658 byte = (bitpos / BITS_PER_UNIT) & 3;
7659 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7660
7661 if (UNITS_PER_WORD < 4)
7662 {
7663 word = byte / UNITS_PER_WORD;
7664 if (WORDS_BIG_ENDIAN)
7665 word = (words - 1) - word;
7666 offset = word * UNITS_PER_WORD;
7667 if (BYTES_BIG_ENDIAN)
7668 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7669 else
7670 offset += byte % UNITS_PER_WORD;
7671 }
7672 else
7673 {
7674 offset = byte;
7675 if (BYTES_BIG_ENDIAN)
7676 {
7677 /* Reverse bytes within each long, or within the entire float
7678 if it's smaller than a long (for HFmode). */
7679 offset = MIN (3, total_bytes - 1) - offset;
7680 gcc_assert (offset >= 0);
7681 }
7682 }
7683 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7684 if (offset >= off
7685 && offset - off < len)
7686 ptr[offset - off] = value;
7687 }
7688 return MIN (len, total_bytes - off);
7689 }
7690
7691 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7692 specified by EXPR into the buffer PTR of length LEN bytes.
7693 Return the number of bytes placed in the buffer, or zero
7694 upon failure. */
7695
7696 static int
7697 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7698 {
7699 int rsize, isize;
7700 tree part;
7701
7702 part = TREE_REALPART (expr);
7703 rsize = native_encode_expr (part, ptr, len, off);
7704 if (off == -1 && rsize == 0)
7705 return 0;
7706 part = TREE_IMAGPART (expr);
7707 if (off != -1)
7708 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7709 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7710 len - rsize, off);
7711 if (off == -1 && isize != rsize)
7712 return 0;
7713 return rsize + isize;
7714 }
7715
7716
7717 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7718 specified by EXPR into the buffer PTR of length LEN bytes.
7719 Return the number of bytes placed in the buffer, or zero
7720 upon failure. */
7721
7722 static int
7723 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7724 {
7725 unsigned HOST_WIDE_INT i, count;
7726 int size, offset;
7727 tree itype, elem;
7728
7729 offset = 0;
7730 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7731 return 0;
7732 itype = TREE_TYPE (TREE_TYPE (expr));
7733 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7734 for (i = 0; i < count; i++)
7735 {
7736 if (off >= size)
7737 {
7738 off -= size;
7739 continue;
7740 }
7741 elem = VECTOR_CST_ELT (expr, i);
7742 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7743 len - offset, off);
7744 if ((off == -1 && res != size) || res == 0)
7745 return 0;
7746 offset += res;
7747 if (offset >= len)
7748 return (off == -1 && i < count - 1) ? 0 : offset;
7749 if (off != -1)
7750 off = 0;
7751 }
7752 return offset;
7753 }
7754
7755
7756 /* Subroutine of native_encode_expr. Encode the STRING_CST
7757 specified by EXPR into the buffer PTR of length LEN bytes.
7758 Return the number of bytes placed in the buffer, or zero
7759 upon failure. */
7760
7761 static int
7762 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7763 {
7764 tree type = TREE_TYPE (expr);
7765
7766 /* Wide-char strings are encoded in target byte-order so native
7767 encoding them is trivial. */
7768 if (BITS_PER_UNIT != CHAR_BIT
7769 || TREE_CODE (type) != ARRAY_TYPE
7770 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7771 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7772 return 0;
7773
7774 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7775 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7776 return 0;
7777 if (off == -1)
7778 off = 0;
7779 if (ptr == NULL)
7780 /* Dry run. */;
7781 else if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7782 {
7783 int written = 0;
7784 if (off < TREE_STRING_LENGTH (expr))
7785 {
7786 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7787 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7788 }
7789 memset (ptr + written, 0,
7790 MIN (total_bytes - written, len - written));
7791 }
7792 else
7793 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7794 return MIN (total_bytes - off, len);
7795 }
7796
7797
7798 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7799 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7800 buffer PTR of length LEN bytes. If PTR is NULL, don't actually store
7801 anything, just do a dry run. If OFF is not -1 then start
7802 the encoding at byte offset OFF and encode at most LEN bytes.
7803 Return the number of bytes placed in the buffer, or zero upon failure. */
7804
7805 int
7806 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7807 {
7808 /* We don't support starting at negative offset and -1 is special. */
7809 if (off < -1)
7810 return 0;
7811
7812 switch (TREE_CODE (expr))
7813 {
7814 case INTEGER_CST:
7815 return native_encode_int (expr, ptr, len, off);
7816
7817 case REAL_CST:
7818 return native_encode_real (expr, ptr, len, off);
7819
7820 case FIXED_CST:
7821 return native_encode_fixed (expr, ptr, len, off);
7822
7823 case COMPLEX_CST:
7824 return native_encode_complex (expr, ptr, len, off);
7825
7826 case VECTOR_CST:
7827 return native_encode_vector (expr, ptr, len, off);
7828
7829 case STRING_CST:
7830 return native_encode_string (expr, ptr, len, off);
7831
7832 default:
7833 return 0;
7834 }
7835 }
7836
7837
7838 /* Subroutine of native_interpret_expr. Interpret the contents of
7839 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7840 If the buffer cannot be interpreted, return NULL_TREE. */
7841
7842 static tree
7843 native_interpret_int (tree type, const unsigned char *ptr, int len)
7844 {
7845 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7846
7847 if (total_bytes > len
7848 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7849 return NULL_TREE;
7850
7851 wide_int result = wi::from_buffer (ptr, total_bytes);
7852
7853 return wide_int_to_tree (type, result);
7854 }
7855
7856
7857 /* Subroutine of native_interpret_expr. Interpret the contents of
7858 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7859 If the buffer cannot be interpreted, return NULL_TREE. */
7860
7861 static tree
7862 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7863 {
7864 scalar_mode mode = SCALAR_TYPE_MODE (type);
7865 int total_bytes = GET_MODE_SIZE (mode);
7866 double_int result;
7867 FIXED_VALUE_TYPE fixed_value;
7868
7869 if (total_bytes > len
7870 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7871 return NULL_TREE;
7872
7873 result = double_int::from_buffer (ptr, total_bytes);
7874 fixed_value = fixed_from_double_int (result, mode);
7875
7876 return build_fixed (type, fixed_value);
7877 }
7878
7879
7880 /* Subroutine of native_interpret_expr. Interpret the contents of
7881 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7882 If the buffer cannot be interpreted, return NULL_TREE. */
7883
7884 static tree
7885 native_interpret_real (tree type, const unsigned char *ptr, int len)
7886 {
7887 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
7888 int total_bytes = GET_MODE_SIZE (mode);
7889 unsigned char value;
7890 /* There are always 32 bits in each long, no matter the size of
7891 the hosts long. We handle floating point representations with
7892 up to 192 bits. */
7893 REAL_VALUE_TYPE r;
7894 long tmp[6];
7895
7896 if (total_bytes > len || total_bytes > 24)
7897 return NULL_TREE;
7898 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7899
7900 memset (tmp, 0, sizeof (tmp));
7901 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7902 bitpos += BITS_PER_UNIT)
7903 {
7904 /* Both OFFSET and BYTE index within a long;
7905 bitpos indexes the whole float. */
7906 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7907 if (UNITS_PER_WORD < 4)
7908 {
7909 int word = byte / UNITS_PER_WORD;
7910 if (WORDS_BIG_ENDIAN)
7911 word = (words - 1) - word;
7912 offset = word * UNITS_PER_WORD;
7913 if (BYTES_BIG_ENDIAN)
7914 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7915 else
7916 offset += byte % UNITS_PER_WORD;
7917 }
7918 else
7919 {
7920 offset = byte;
7921 if (BYTES_BIG_ENDIAN)
7922 {
7923 /* Reverse bytes within each long, or within the entire float
7924 if it's smaller than a long (for HFmode). */
7925 offset = MIN (3, total_bytes - 1) - offset;
7926 gcc_assert (offset >= 0);
7927 }
7928 }
7929 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7930
7931 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7932 }
7933
7934 real_from_target (&r, tmp, mode);
7935 return build_real (type, r);
7936 }
7937
7938
7939 /* Subroutine of native_interpret_expr. Interpret the contents of
7940 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7941 If the buffer cannot be interpreted, return NULL_TREE. */
7942
7943 static tree
7944 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7945 {
7946 tree etype, rpart, ipart;
7947 int size;
7948
7949 etype = TREE_TYPE (type);
7950 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7951 if (size * 2 > len)
7952 return NULL_TREE;
7953 rpart = native_interpret_expr (etype, ptr, size);
7954 if (!rpart)
7955 return NULL_TREE;
7956 ipart = native_interpret_expr (etype, ptr+size, size);
7957 if (!ipart)
7958 return NULL_TREE;
7959 return build_complex (type, rpart, ipart);
7960 }
7961
7962
7963 /* Subroutine of native_interpret_expr. Interpret the contents of
7964 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7965 If the buffer cannot be interpreted, return NULL_TREE. */
7966
7967 static tree
7968 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
7969 {
7970 tree etype, elem;
7971 unsigned int i, size;
7972 unsigned HOST_WIDE_INT count;
7973
7974 etype = TREE_TYPE (type);
7975 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7976 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
7977 || size * count > len)
7978 return NULL_TREE;
7979
7980 tree_vector_builder elements (type, count, 1);
7981 for (i = 0; i < count; ++i)
7982 {
7983 elem = native_interpret_expr (etype, ptr+(i*size), size);
7984 if (!elem)
7985 return NULL_TREE;
7986 elements.quick_push (elem);
7987 }
7988 return elements.build ();
7989 }
7990
7991
7992 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7993 the buffer PTR of length LEN as a constant of type TYPE. For
7994 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7995 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7996 return NULL_TREE. */
7997
7998 tree
7999 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8000 {
8001 switch (TREE_CODE (type))
8002 {
8003 case INTEGER_TYPE:
8004 case ENUMERAL_TYPE:
8005 case BOOLEAN_TYPE:
8006 case POINTER_TYPE:
8007 case REFERENCE_TYPE:
8008 return native_interpret_int (type, ptr, len);
8009
8010 case REAL_TYPE:
8011 return native_interpret_real (type, ptr, len);
8012
8013 case FIXED_POINT_TYPE:
8014 return native_interpret_fixed (type, ptr, len);
8015
8016 case COMPLEX_TYPE:
8017 return native_interpret_complex (type, ptr, len);
8018
8019 case VECTOR_TYPE:
8020 return native_interpret_vector (type, ptr, len);
8021
8022 default:
8023 return NULL_TREE;
8024 }
8025 }
8026
8027 /* Returns true if we can interpret the contents of a native encoding
8028 as TYPE. */
8029
8030 static bool
8031 can_native_interpret_type_p (tree type)
8032 {
8033 switch (TREE_CODE (type))
8034 {
8035 case INTEGER_TYPE:
8036 case ENUMERAL_TYPE:
8037 case BOOLEAN_TYPE:
8038 case POINTER_TYPE:
8039 case REFERENCE_TYPE:
8040 case FIXED_POINT_TYPE:
8041 case REAL_TYPE:
8042 case COMPLEX_TYPE:
8043 case VECTOR_TYPE:
8044 return true;
8045 default:
8046 return false;
8047 }
8048 }
8049
8050
8051 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8052 TYPE at compile-time. If we're unable to perform the conversion
8053 return NULL_TREE. */
8054
8055 static tree
8056 fold_view_convert_expr (tree type, tree expr)
8057 {
8058 /* We support up to 512-bit values (for V8DFmode). */
8059 unsigned char buffer[64];
8060 int len;
8061
8062 /* Check that the host and target are sane. */
8063 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
8064 return NULL_TREE;
8065
8066 len = native_encode_expr (expr, buffer, sizeof (buffer));
8067 if (len == 0)
8068 return NULL_TREE;
8069
8070 return native_interpret_expr (type, buffer, len);
8071 }
8072
8073 /* Build an expression for the address of T. Folds away INDIRECT_REF
8074 to avoid confusing the gimplify process. */
8075
8076 tree
8077 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
8078 {
8079 /* The size of the object is not relevant when talking about its address. */
8080 if (TREE_CODE (t) == WITH_SIZE_EXPR)
8081 t = TREE_OPERAND (t, 0);
8082
8083 if (TREE_CODE (t) == INDIRECT_REF)
8084 {
8085 t = TREE_OPERAND (t, 0);
8086
8087 if (TREE_TYPE (t) != ptrtype)
8088 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
8089 }
8090 else if (TREE_CODE (t) == MEM_REF
8091 && integer_zerop (TREE_OPERAND (t, 1)))
8092 return TREE_OPERAND (t, 0);
8093 else if (TREE_CODE (t) == MEM_REF
8094 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
8095 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
8096 TREE_OPERAND (t, 0),
8097 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
8098 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8099 {
8100 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
8101
8102 if (TREE_TYPE (t) != ptrtype)
8103 t = fold_convert_loc (loc, ptrtype, t);
8104 }
8105 else
8106 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
8107
8108 return t;
8109 }
8110
8111 /* Build an expression for the address of T. */
8112
8113 tree
8114 build_fold_addr_expr_loc (location_t loc, tree t)
8115 {
8116 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8117
8118 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
8119 }
8120
8121 /* Fold a unary expression of code CODE and type TYPE with operand
8122 OP0. Return the folded expression if folding is successful.
8123 Otherwise, return NULL_TREE. */
8124
8125 tree
8126 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8127 {
8128 tree tem;
8129 tree arg0;
8130 enum tree_code_class kind = TREE_CODE_CLASS (code);
8131
8132 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8133 && TREE_CODE_LENGTH (code) == 1);
8134
8135 arg0 = op0;
8136 if (arg0)
8137 {
8138 if (CONVERT_EXPR_CODE_P (code)
8139 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
8140 {
8141 /* Don't use STRIP_NOPS, because signedness of argument type
8142 matters. */
8143 STRIP_SIGN_NOPS (arg0);
8144 }
8145 else
8146 {
8147 /* Strip any conversions that don't change the mode. This
8148 is safe for every expression, except for a comparison
8149 expression because its signedness is derived from its
8150 operands.
8151
8152 Note that this is done as an internal manipulation within
8153 the constant folder, in order to find the simplest
8154 representation of the arguments so that their form can be
8155 studied. In any cases, the appropriate type conversions
8156 should be put back in the tree that will get out of the
8157 constant folder. */
8158 STRIP_NOPS (arg0);
8159 }
8160
8161 if (CONSTANT_CLASS_P (arg0))
8162 {
8163 tree tem = const_unop (code, type, arg0);
8164 if (tem)
8165 {
8166 if (TREE_TYPE (tem) != type)
8167 tem = fold_convert_loc (loc, type, tem);
8168 return tem;
8169 }
8170 }
8171 }
8172
8173 tem = generic_simplify (loc, code, type, op0);
8174 if (tem)
8175 return tem;
8176
8177 if (TREE_CODE_CLASS (code) == tcc_unary)
8178 {
8179 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8180 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8181 fold_build1_loc (loc, code, type,
8182 fold_convert_loc (loc, TREE_TYPE (op0),
8183 TREE_OPERAND (arg0, 1))));
8184 else if (TREE_CODE (arg0) == COND_EXPR)
8185 {
8186 tree arg01 = TREE_OPERAND (arg0, 1);
8187 tree arg02 = TREE_OPERAND (arg0, 2);
8188 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8189 arg01 = fold_build1_loc (loc, code, type,
8190 fold_convert_loc (loc,
8191 TREE_TYPE (op0), arg01));
8192 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8193 arg02 = fold_build1_loc (loc, code, type,
8194 fold_convert_loc (loc,
8195 TREE_TYPE (op0), arg02));
8196 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
8197 arg01, arg02);
8198
8199 /* If this was a conversion, and all we did was to move into
8200 inside the COND_EXPR, bring it back out. But leave it if
8201 it is a conversion from integer to integer and the
8202 result precision is no wider than a word since such a
8203 conversion is cheap and may be optimized away by combine,
8204 while it couldn't if it were outside the COND_EXPR. Then return
8205 so we don't get into an infinite recursion loop taking the
8206 conversion out and then back in. */
8207
8208 if ((CONVERT_EXPR_CODE_P (code)
8209 || code == NON_LVALUE_EXPR)
8210 && TREE_CODE (tem) == COND_EXPR
8211 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8212 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8213 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8214 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8215 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8216 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8217 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8218 && (INTEGRAL_TYPE_P
8219 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8220 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8221 || flag_syntax_only))
8222 tem = build1_loc (loc, code, type,
8223 build3 (COND_EXPR,
8224 TREE_TYPE (TREE_OPERAND
8225 (TREE_OPERAND (tem, 1), 0)),
8226 TREE_OPERAND (tem, 0),
8227 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8228 TREE_OPERAND (TREE_OPERAND (tem, 2),
8229 0)));
8230 return tem;
8231 }
8232 }
8233
8234 switch (code)
8235 {
8236 case NON_LVALUE_EXPR:
8237 if (!maybe_lvalue_p (op0))
8238 return fold_convert_loc (loc, type, op0);
8239 return NULL_TREE;
8240
8241 CASE_CONVERT:
8242 case FLOAT_EXPR:
8243 case FIX_TRUNC_EXPR:
8244 if (COMPARISON_CLASS_P (op0))
8245 {
8246 /* If we have (type) (a CMP b) and type is an integral type, return
8247 new expression involving the new type. Canonicalize
8248 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
8249 non-integral type.
8250 Do not fold the result as that would not simplify further, also
8251 folding again results in recursions. */
8252 if (TREE_CODE (type) == BOOLEAN_TYPE)
8253 return build2_loc (loc, TREE_CODE (op0), type,
8254 TREE_OPERAND (op0, 0),
8255 TREE_OPERAND (op0, 1));
8256 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
8257 && TREE_CODE (type) != VECTOR_TYPE)
8258 return build3_loc (loc, COND_EXPR, type, op0,
8259 constant_boolean_node (true, type),
8260 constant_boolean_node (false, type));
8261 }
8262
8263 /* Handle (T *)&A.B.C for A being of type T and B and C
8264 living at offset zero. This occurs frequently in
8265 C++ upcasting and then accessing the base. */
8266 if (TREE_CODE (op0) == ADDR_EXPR
8267 && POINTER_TYPE_P (type)
8268 && handled_component_p (TREE_OPERAND (op0, 0)))
8269 {
8270 poly_int64 bitsize, bitpos;
8271 tree offset;
8272 machine_mode mode;
8273 int unsignedp, reversep, volatilep;
8274 tree base
8275 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
8276 &offset, &mode, &unsignedp, &reversep,
8277 &volatilep);
8278 /* If the reference was to a (constant) zero offset, we can use
8279 the address of the base if it has the same base type
8280 as the result type and the pointer type is unqualified. */
8281 if (!offset
8282 && known_eq (bitpos, 0)
8283 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8284 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8285 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8286 return fold_convert_loc (loc, type,
8287 build_fold_addr_expr_loc (loc, base));
8288 }
8289
8290 if (TREE_CODE (op0) == MODIFY_EXPR
8291 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8292 /* Detect assigning a bitfield. */
8293 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8294 && DECL_BIT_FIELD
8295 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8296 {
8297 /* Don't leave an assignment inside a conversion
8298 unless assigning a bitfield. */
8299 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8300 /* First do the assignment, then return converted constant. */
8301 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8302 TREE_NO_WARNING (tem) = 1;
8303 TREE_USED (tem) = 1;
8304 return tem;
8305 }
8306
8307 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8308 constants (if x has signed type, the sign bit cannot be set
8309 in c). This folds extension into the BIT_AND_EXPR.
8310 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8311 very likely don't have maximal range for their precision and this
8312 transformation effectively doesn't preserve non-maximal ranges. */
8313 if (TREE_CODE (type) == INTEGER_TYPE
8314 && TREE_CODE (op0) == BIT_AND_EXPR
8315 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8316 {
8317 tree and_expr = op0;
8318 tree and0 = TREE_OPERAND (and_expr, 0);
8319 tree and1 = TREE_OPERAND (and_expr, 1);
8320 int change = 0;
8321
8322 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8323 || (TYPE_PRECISION (type)
8324 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8325 change = 1;
8326 else if (TYPE_PRECISION (TREE_TYPE (and1))
8327 <= HOST_BITS_PER_WIDE_INT
8328 && tree_fits_uhwi_p (and1))
8329 {
8330 unsigned HOST_WIDE_INT cst;
8331
8332 cst = tree_to_uhwi (and1);
8333 cst &= HOST_WIDE_INT_M1U
8334 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8335 change = (cst == 0);
8336 if (change
8337 && !flag_syntax_only
8338 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
8339 == ZERO_EXTEND))
8340 {
8341 tree uns = unsigned_type_for (TREE_TYPE (and0));
8342 and0 = fold_convert_loc (loc, uns, and0);
8343 and1 = fold_convert_loc (loc, uns, and1);
8344 }
8345 }
8346 if (change)
8347 {
8348 tem = force_fit_type (type, wi::to_widest (and1), 0,
8349 TREE_OVERFLOW (and1));
8350 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8351 fold_convert_loc (loc, type, and0), tem);
8352 }
8353 }
8354
8355 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
8356 cast (T1)X will fold away. We assume that this happens when X itself
8357 is a cast. */
8358 if (POINTER_TYPE_P (type)
8359 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8360 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
8361 {
8362 tree arg00 = TREE_OPERAND (arg0, 0);
8363 tree arg01 = TREE_OPERAND (arg0, 1);
8364
8365 return fold_build_pointer_plus_loc
8366 (loc, fold_convert_loc (loc, type, arg00), arg01);
8367 }
8368
8369 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8370 of the same precision, and X is an integer type not narrower than
8371 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8372 if (INTEGRAL_TYPE_P (type)
8373 && TREE_CODE (op0) == BIT_NOT_EXPR
8374 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8375 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8376 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8377 {
8378 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8379 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8380 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8381 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8382 fold_convert_loc (loc, type, tem));
8383 }
8384
8385 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8386 type of X and Y (integer types only). */
8387 if (INTEGRAL_TYPE_P (type)
8388 && TREE_CODE (op0) == MULT_EXPR
8389 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8390 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8391 {
8392 /* Be careful not to introduce new overflows. */
8393 tree mult_type;
8394 if (TYPE_OVERFLOW_WRAPS (type))
8395 mult_type = type;
8396 else
8397 mult_type = unsigned_type_for (type);
8398
8399 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8400 {
8401 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8402 fold_convert_loc (loc, mult_type,
8403 TREE_OPERAND (op0, 0)),
8404 fold_convert_loc (loc, mult_type,
8405 TREE_OPERAND (op0, 1)));
8406 return fold_convert_loc (loc, type, tem);
8407 }
8408 }
8409
8410 return NULL_TREE;
8411
8412 case VIEW_CONVERT_EXPR:
8413 if (TREE_CODE (op0) == MEM_REF)
8414 {
8415 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
8416 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
8417 tem = fold_build2_loc (loc, MEM_REF, type,
8418 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8419 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
8420 return tem;
8421 }
8422
8423 return NULL_TREE;
8424
8425 case NEGATE_EXPR:
8426 tem = fold_negate_expr (loc, arg0);
8427 if (tem)
8428 return fold_convert_loc (loc, type, tem);
8429 return NULL_TREE;
8430
8431 case ABS_EXPR:
8432 /* Convert fabs((double)float) into (double)fabsf(float). */
8433 if (TREE_CODE (arg0) == NOP_EXPR
8434 && TREE_CODE (type) == REAL_TYPE)
8435 {
8436 tree targ0 = strip_float_extensions (arg0);
8437 if (targ0 != arg0)
8438 return fold_convert_loc (loc, type,
8439 fold_build1_loc (loc, ABS_EXPR,
8440 TREE_TYPE (targ0),
8441 targ0));
8442 }
8443 return NULL_TREE;
8444
8445 case BIT_NOT_EXPR:
8446 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8447 if (TREE_CODE (arg0) == BIT_XOR_EXPR
8448 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8449 fold_convert_loc (loc, type,
8450 TREE_OPERAND (arg0, 0)))))
8451 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8452 fold_convert_loc (loc, type,
8453 TREE_OPERAND (arg0, 1)));
8454 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8455 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8456 fold_convert_loc (loc, type,
8457 TREE_OPERAND (arg0, 1)))))
8458 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8459 fold_convert_loc (loc, type,
8460 TREE_OPERAND (arg0, 0)), tem);
8461
8462 return NULL_TREE;
8463
8464 case TRUTH_NOT_EXPR:
8465 /* Note that the operand of this must be an int
8466 and its values must be 0 or 1.
8467 ("true" is a fixed value perhaps depending on the language,
8468 but we don't handle values other than 1 correctly yet.) */
8469 tem = fold_truth_not_expr (loc, arg0);
8470 if (!tem)
8471 return NULL_TREE;
8472 return fold_convert_loc (loc, type, tem);
8473
8474 case INDIRECT_REF:
8475 /* Fold *&X to X if X is an lvalue. */
8476 if (TREE_CODE (op0) == ADDR_EXPR)
8477 {
8478 tree op00 = TREE_OPERAND (op0, 0);
8479 if ((VAR_P (op00)
8480 || TREE_CODE (op00) == PARM_DECL
8481 || TREE_CODE (op00) == RESULT_DECL)
8482 && !TREE_READONLY (op00))
8483 return op00;
8484 }
8485 return NULL_TREE;
8486
8487 default:
8488 return NULL_TREE;
8489 } /* switch (code) */
8490 }
8491
8492
8493 /* If the operation was a conversion do _not_ mark a resulting constant
8494 with TREE_OVERFLOW if the original constant was not. These conversions
8495 have implementation defined behavior and retaining the TREE_OVERFLOW
8496 flag here would confuse later passes such as VRP. */
8497 tree
8498 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8499 tree type, tree op0)
8500 {
8501 tree res = fold_unary_loc (loc, code, type, op0);
8502 if (res
8503 && TREE_CODE (res) == INTEGER_CST
8504 && TREE_CODE (op0) == INTEGER_CST
8505 && CONVERT_EXPR_CODE_P (code))
8506 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8507
8508 return res;
8509 }
8510
8511 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8512 operands OP0 and OP1. LOC is the location of the resulting expression.
8513 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8514 Return the folded expression if folding is successful. Otherwise,
8515 return NULL_TREE. */
8516 static tree
8517 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8518 tree arg0, tree arg1, tree op0, tree op1)
8519 {
8520 tree tem;
8521
8522 /* We only do these simplifications if we are optimizing. */
8523 if (!optimize)
8524 return NULL_TREE;
8525
8526 /* Check for things like (A || B) && (A || C). We can convert this
8527 to A || (B && C). Note that either operator can be any of the four
8528 truth and/or operations and the transformation will still be
8529 valid. Also note that we only care about order for the
8530 ANDIF and ORIF operators. If B contains side effects, this
8531 might change the truth-value of A. */
8532 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8533 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8534 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8535 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8536 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8537 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8538 {
8539 tree a00 = TREE_OPERAND (arg0, 0);
8540 tree a01 = TREE_OPERAND (arg0, 1);
8541 tree a10 = TREE_OPERAND (arg1, 0);
8542 tree a11 = TREE_OPERAND (arg1, 1);
8543 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8544 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8545 && (code == TRUTH_AND_EXPR
8546 || code == TRUTH_OR_EXPR));
8547
8548 if (operand_equal_p (a00, a10, 0))
8549 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8550 fold_build2_loc (loc, code, type, a01, a11));
8551 else if (commutative && operand_equal_p (a00, a11, 0))
8552 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8553 fold_build2_loc (loc, code, type, a01, a10));
8554 else if (commutative && operand_equal_p (a01, a10, 0))
8555 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8556 fold_build2_loc (loc, code, type, a00, a11));
8557
8558 /* This case if tricky because we must either have commutative
8559 operators or else A10 must not have side-effects. */
8560
8561 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8562 && operand_equal_p (a01, a11, 0))
8563 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8564 fold_build2_loc (loc, code, type, a00, a10),
8565 a01);
8566 }
8567
8568 /* See if we can build a range comparison. */
8569 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
8570 return tem;
8571
8572 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8573 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8574 {
8575 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8576 if (tem)
8577 return fold_build2_loc (loc, code, type, tem, arg1);
8578 }
8579
8580 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8581 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8582 {
8583 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8584 if (tem)
8585 return fold_build2_loc (loc, code, type, arg0, tem);
8586 }
8587
8588 /* Check for the possibility of merging component references. If our
8589 lhs is another similar operation, try to merge its rhs with our
8590 rhs. Then try to merge our lhs and rhs. */
8591 if (TREE_CODE (arg0) == code
8592 && (tem = fold_truth_andor_1 (loc, code, type,
8593 TREE_OPERAND (arg0, 1), arg1)) != 0)
8594 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8595
8596 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8597 return tem;
8598
8599 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
8600 if (PARAM_VALUE (PARAM_LOGICAL_OP_NON_SHORT_CIRCUIT) != -1)
8601 logical_op_non_short_circuit
8602 = PARAM_VALUE (PARAM_LOGICAL_OP_NON_SHORT_CIRCUIT);
8603 if (logical_op_non_short_circuit
8604 && !flag_sanitize_coverage
8605 && (code == TRUTH_AND_EXPR
8606 || code == TRUTH_ANDIF_EXPR
8607 || code == TRUTH_OR_EXPR
8608 || code == TRUTH_ORIF_EXPR))
8609 {
8610 enum tree_code ncode, icode;
8611
8612 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8613 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8614 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8615
8616 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8617 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8618 We don't want to pack more than two leafs to a non-IF AND/OR
8619 expression.
8620 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8621 equal to IF-CODE, then we don't want to add right-hand operand.
8622 If the inner right-hand side of left-hand operand has
8623 side-effects, or isn't simple, then we can't add to it,
8624 as otherwise we might destroy if-sequence. */
8625 if (TREE_CODE (arg0) == icode
8626 && simple_operand_p_2 (arg1)
8627 /* Needed for sequence points to handle trappings, and
8628 side-effects. */
8629 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8630 {
8631 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8632 arg1);
8633 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8634 tem);
8635 }
8636 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8637 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8638 else if (TREE_CODE (arg1) == icode
8639 && simple_operand_p_2 (arg0)
8640 /* Needed for sequence points to handle trappings, and
8641 side-effects. */
8642 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8643 {
8644 tem = fold_build2_loc (loc, ncode, type,
8645 arg0, TREE_OPERAND (arg1, 0));
8646 return fold_build2_loc (loc, icode, type, tem,
8647 TREE_OPERAND (arg1, 1));
8648 }
8649 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8650 into (A OR B).
8651 For sequence point consistancy, we need to check for trapping,
8652 and side-effects. */
8653 else if (code == icode && simple_operand_p_2 (arg0)
8654 && simple_operand_p_2 (arg1))
8655 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8656 }
8657
8658 return NULL_TREE;
8659 }
8660
8661 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8662 by changing CODE to reduce the magnitude of constants involved in
8663 ARG0 of the comparison.
8664 Returns a canonicalized comparison tree if a simplification was
8665 possible, otherwise returns NULL_TREE.
8666 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8667 valid if signed overflow is undefined. */
8668
8669 static tree
8670 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8671 tree arg0, tree arg1,
8672 bool *strict_overflow_p)
8673 {
8674 enum tree_code code0 = TREE_CODE (arg0);
8675 tree t, cst0 = NULL_TREE;
8676 int sgn0;
8677
8678 /* Match A +- CST code arg1. We can change this only if overflow
8679 is undefined. */
8680 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8681 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8682 /* In principle pointers also have undefined overflow behavior,
8683 but that causes problems elsewhere. */
8684 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8685 && (code0 == MINUS_EXPR
8686 || code0 == PLUS_EXPR)
8687 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8688 return NULL_TREE;
8689
8690 /* Identify the constant in arg0 and its sign. */
8691 cst0 = TREE_OPERAND (arg0, 1);
8692 sgn0 = tree_int_cst_sgn (cst0);
8693
8694 /* Overflowed constants and zero will cause problems. */
8695 if (integer_zerop (cst0)
8696 || TREE_OVERFLOW (cst0))
8697 return NULL_TREE;
8698
8699 /* See if we can reduce the magnitude of the constant in
8700 arg0 by changing the comparison code. */
8701 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8702 if (code == LT_EXPR
8703 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8704 code = LE_EXPR;
8705 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8706 else if (code == GT_EXPR
8707 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8708 code = GE_EXPR;
8709 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8710 else if (code == LE_EXPR
8711 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8712 code = LT_EXPR;
8713 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8714 else if (code == GE_EXPR
8715 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8716 code = GT_EXPR;
8717 else
8718 return NULL_TREE;
8719 *strict_overflow_p = true;
8720
8721 /* Now build the constant reduced in magnitude. But not if that
8722 would produce one outside of its types range. */
8723 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8724 && ((sgn0 == 1
8725 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8726 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8727 || (sgn0 == -1
8728 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8729 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8730 return NULL_TREE;
8731
8732 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8733 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8734 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8735 t = fold_convert (TREE_TYPE (arg1), t);
8736
8737 return fold_build2_loc (loc, code, type, t, arg1);
8738 }
8739
8740 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8741 overflow further. Try to decrease the magnitude of constants involved
8742 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8743 and put sole constants at the second argument position.
8744 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8745
8746 static tree
8747 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8748 tree arg0, tree arg1)
8749 {
8750 tree t;
8751 bool strict_overflow_p;
8752 const char * const warnmsg = G_("assuming signed overflow does not occur "
8753 "when reducing constant in comparison");
8754
8755 /* Try canonicalization by simplifying arg0. */
8756 strict_overflow_p = false;
8757 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8758 &strict_overflow_p);
8759 if (t)
8760 {
8761 if (strict_overflow_p)
8762 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8763 return t;
8764 }
8765
8766 /* Try canonicalization by simplifying arg1 using the swapped
8767 comparison. */
8768 code = swap_tree_comparison (code);
8769 strict_overflow_p = false;
8770 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8771 &strict_overflow_p);
8772 if (t && strict_overflow_p)
8773 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8774 return t;
8775 }
8776
8777 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8778 space. This is used to avoid issuing overflow warnings for
8779 expressions like &p->x which cannot wrap. */
8780
8781 static bool
8782 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
8783 {
8784 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8785 return true;
8786
8787 if (maybe_lt (bitpos, 0))
8788 return true;
8789
8790 poly_wide_int wi_offset;
8791 int precision = TYPE_PRECISION (TREE_TYPE (base));
8792 if (offset == NULL_TREE)
8793 wi_offset = wi::zero (precision);
8794 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
8795 return true;
8796 else
8797 wi_offset = wi::to_poly_wide (offset);
8798
8799 wi::overflow_type overflow;
8800 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
8801 precision);
8802 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8803 if (overflow)
8804 return true;
8805
8806 poly_uint64 total_hwi, size;
8807 if (!total.to_uhwi (&total_hwi)
8808 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
8809 &size)
8810 || known_eq (size, 0U))
8811 return true;
8812
8813 if (known_le (total_hwi, size))
8814 return false;
8815
8816 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8817 array. */
8818 if (TREE_CODE (base) == ADDR_EXPR
8819 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
8820 &size)
8821 && maybe_ne (size, 0U)
8822 && known_le (total_hwi, size))
8823 return false;
8824
8825 return true;
8826 }
8827
8828 /* Return a positive integer when the symbol DECL is known to have
8829 a nonzero address, zero when it's known not to (e.g., it's a weak
8830 symbol), and a negative integer when the symbol is not yet in the
8831 symbol table and so whether or not its address is zero is unknown.
8832 For function local objects always return positive integer. */
8833 static int
8834 maybe_nonzero_address (tree decl)
8835 {
8836 if (DECL_P (decl) && decl_in_symtab_p (decl))
8837 if (struct symtab_node *symbol = symtab_node::get_create (decl))
8838 return symbol->nonzero_address ();
8839
8840 /* Function local objects are never NULL. */
8841 if (DECL_P (decl)
8842 && (DECL_CONTEXT (decl)
8843 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8844 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8845 return 1;
8846
8847 return -1;
8848 }
8849
8850 /* Subroutine of fold_binary. This routine performs all of the
8851 transformations that are common to the equality/inequality
8852 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8853 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8854 fold_binary should call fold_binary. Fold a comparison with
8855 tree code CODE and type TYPE with operands OP0 and OP1. Return
8856 the folded comparison or NULL_TREE. */
8857
8858 static tree
8859 fold_comparison (location_t loc, enum tree_code code, tree type,
8860 tree op0, tree op1)
8861 {
8862 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8863 tree arg0, arg1, tem;
8864
8865 arg0 = op0;
8866 arg1 = op1;
8867
8868 STRIP_SIGN_NOPS (arg0);
8869 STRIP_SIGN_NOPS (arg1);
8870
8871 /* For comparisons of pointers we can decompose it to a compile time
8872 comparison of the base objects and the offsets into the object.
8873 This requires at least one operand being an ADDR_EXPR or a
8874 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8875 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8876 && (TREE_CODE (arg0) == ADDR_EXPR
8877 || TREE_CODE (arg1) == ADDR_EXPR
8878 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8879 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8880 {
8881 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8882 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
8883 machine_mode mode;
8884 int volatilep, reversep, unsignedp;
8885 bool indirect_base0 = false, indirect_base1 = false;
8886
8887 /* Get base and offset for the access. Strip ADDR_EXPR for
8888 get_inner_reference, but put it back by stripping INDIRECT_REF
8889 off the base object if possible. indirect_baseN will be true
8890 if baseN is not an address but refers to the object itself. */
8891 base0 = arg0;
8892 if (TREE_CODE (arg0) == ADDR_EXPR)
8893 {
8894 base0
8895 = get_inner_reference (TREE_OPERAND (arg0, 0),
8896 &bitsize, &bitpos0, &offset0, &mode,
8897 &unsignedp, &reversep, &volatilep);
8898 if (TREE_CODE (base0) == INDIRECT_REF)
8899 base0 = TREE_OPERAND (base0, 0);
8900 else
8901 indirect_base0 = true;
8902 }
8903 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8904 {
8905 base0 = TREE_OPERAND (arg0, 0);
8906 STRIP_SIGN_NOPS (base0);
8907 if (TREE_CODE (base0) == ADDR_EXPR)
8908 {
8909 base0
8910 = get_inner_reference (TREE_OPERAND (base0, 0),
8911 &bitsize, &bitpos0, &offset0, &mode,
8912 &unsignedp, &reversep, &volatilep);
8913 if (TREE_CODE (base0) == INDIRECT_REF)
8914 base0 = TREE_OPERAND (base0, 0);
8915 else
8916 indirect_base0 = true;
8917 }
8918 if (offset0 == NULL_TREE || integer_zerop (offset0))
8919 offset0 = TREE_OPERAND (arg0, 1);
8920 else
8921 offset0 = size_binop (PLUS_EXPR, offset0,
8922 TREE_OPERAND (arg0, 1));
8923 if (poly_int_tree_p (offset0))
8924 {
8925 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
8926 TYPE_PRECISION (sizetype));
8927 tem <<= LOG2_BITS_PER_UNIT;
8928 tem += bitpos0;
8929 if (tem.to_shwi (&bitpos0))
8930 offset0 = NULL_TREE;
8931 }
8932 }
8933
8934 base1 = arg1;
8935 if (TREE_CODE (arg1) == ADDR_EXPR)
8936 {
8937 base1
8938 = get_inner_reference (TREE_OPERAND (arg1, 0),
8939 &bitsize, &bitpos1, &offset1, &mode,
8940 &unsignedp, &reversep, &volatilep);
8941 if (TREE_CODE (base1) == INDIRECT_REF)
8942 base1 = TREE_OPERAND (base1, 0);
8943 else
8944 indirect_base1 = true;
8945 }
8946 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8947 {
8948 base1 = TREE_OPERAND (arg1, 0);
8949 STRIP_SIGN_NOPS (base1);
8950 if (TREE_CODE (base1) == ADDR_EXPR)
8951 {
8952 base1
8953 = get_inner_reference (TREE_OPERAND (base1, 0),
8954 &bitsize, &bitpos1, &offset1, &mode,
8955 &unsignedp, &reversep, &volatilep);
8956 if (TREE_CODE (base1) == INDIRECT_REF)
8957 base1 = TREE_OPERAND (base1, 0);
8958 else
8959 indirect_base1 = true;
8960 }
8961 if (offset1 == NULL_TREE || integer_zerop (offset1))
8962 offset1 = TREE_OPERAND (arg1, 1);
8963 else
8964 offset1 = size_binop (PLUS_EXPR, offset1,
8965 TREE_OPERAND (arg1, 1));
8966 if (poly_int_tree_p (offset1))
8967 {
8968 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
8969 TYPE_PRECISION (sizetype));
8970 tem <<= LOG2_BITS_PER_UNIT;
8971 tem += bitpos1;
8972 if (tem.to_shwi (&bitpos1))
8973 offset1 = NULL_TREE;
8974 }
8975 }
8976
8977 /* If we have equivalent bases we might be able to simplify. */
8978 if (indirect_base0 == indirect_base1
8979 && operand_equal_p (base0, base1,
8980 indirect_base0 ? OEP_ADDRESS_OF : 0))
8981 {
8982 /* We can fold this expression to a constant if the non-constant
8983 offset parts are equal. */
8984 if ((offset0 == offset1
8985 || (offset0 && offset1
8986 && operand_equal_p (offset0, offset1, 0)))
8987 && (equality_code
8988 || (indirect_base0
8989 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8990 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8991 {
8992 if (!equality_code
8993 && maybe_ne (bitpos0, bitpos1)
8994 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8995 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8996 fold_overflow_warning (("assuming pointer wraparound does not "
8997 "occur when comparing P +- C1 with "
8998 "P +- C2"),
8999 WARN_STRICT_OVERFLOW_CONDITIONAL);
9000
9001 switch (code)
9002 {
9003 case EQ_EXPR:
9004 if (known_eq (bitpos0, bitpos1))
9005 return constant_boolean_node (true, type);
9006 if (known_ne (bitpos0, bitpos1))
9007 return constant_boolean_node (false, type);
9008 break;
9009 case NE_EXPR:
9010 if (known_ne (bitpos0, bitpos1))
9011 return constant_boolean_node (true, type);
9012 if (known_eq (bitpos0, bitpos1))
9013 return constant_boolean_node (false, type);
9014 break;
9015 case LT_EXPR:
9016 if (known_lt (bitpos0, bitpos1))
9017 return constant_boolean_node (true, type);
9018 if (known_ge (bitpos0, bitpos1))
9019 return constant_boolean_node (false, type);
9020 break;
9021 case LE_EXPR:
9022 if (known_le (bitpos0, bitpos1))
9023 return constant_boolean_node (true, type);
9024 if (known_gt (bitpos0, bitpos1))
9025 return constant_boolean_node (false, type);
9026 break;
9027 case GE_EXPR:
9028 if (known_ge (bitpos0, bitpos1))
9029 return constant_boolean_node (true, type);
9030 if (known_lt (bitpos0, bitpos1))
9031 return constant_boolean_node (false, type);
9032 break;
9033 case GT_EXPR:
9034 if (known_gt (bitpos0, bitpos1))
9035 return constant_boolean_node (true, type);
9036 if (known_le (bitpos0, bitpos1))
9037 return constant_boolean_node (false, type);
9038 break;
9039 default:;
9040 }
9041 }
9042 /* We can simplify the comparison to a comparison of the variable
9043 offset parts if the constant offset parts are equal.
9044 Be careful to use signed sizetype here because otherwise we
9045 mess with array offsets in the wrong way. This is possible
9046 because pointer arithmetic is restricted to retain within an
9047 object and overflow on pointer differences is undefined as of
9048 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9049 else if (known_eq (bitpos0, bitpos1)
9050 && (equality_code
9051 || (indirect_base0
9052 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
9053 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9054 {
9055 /* By converting to signed sizetype we cover middle-end pointer
9056 arithmetic which operates on unsigned pointer types of size
9057 type size and ARRAY_REF offsets which are properly sign or
9058 zero extended from their type in case it is narrower than
9059 sizetype. */
9060 if (offset0 == NULL_TREE)
9061 offset0 = build_int_cst (ssizetype, 0);
9062 else
9063 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9064 if (offset1 == NULL_TREE)
9065 offset1 = build_int_cst (ssizetype, 0);
9066 else
9067 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9068
9069 if (!equality_code
9070 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9071 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9072 fold_overflow_warning (("assuming pointer wraparound does not "
9073 "occur when comparing P +- C1 with "
9074 "P +- C2"),
9075 WARN_STRICT_OVERFLOW_COMPARISON);
9076
9077 return fold_build2_loc (loc, code, type, offset0, offset1);
9078 }
9079 }
9080 /* For equal offsets we can simplify to a comparison of the
9081 base addresses. */
9082 else if (known_eq (bitpos0, bitpos1)
9083 && (indirect_base0
9084 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9085 && (indirect_base1
9086 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9087 && ((offset0 == offset1)
9088 || (offset0 && offset1
9089 && operand_equal_p (offset0, offset1, 0))))
9090 {
9091 if (indirect_base0)
9092 base0 = build_fold_addr_expr_loc (loc, base0);
9093 if (indirect_base1)
9094 base1 = build_fold_addr_expr_loc (loc, base1);
9095 return fold_build2_loc (loc, code, type, base0, base1);
9096 }
9097 /* Comparison between an ordinary (non-weak) symbol and a null
9098 pointer can be eliminated since such symbols must have a non
9099 null address. In C, relational expressions between pointers
9100 to objects and null pointers are undefined. The results
9101 below follow the C++ rules with the additional property that
9102 every object pointer compares greater than a null pointer.
9103 */
9104 else if (((DECL_P (base0)
9105 && maybe_nonzero_address (base0) > 0
9106 /* Avoid folding references to struct members at offset 0 to
9107 prevent tests like '&ptr->firstmember == 0' from getting
9108 eliminated. When ptr is null, although the -> expression
9109 is strictly speaking invalid, GCC retains it as a matter
9110 of QoI. See PR c/44555. */
9111 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
9112 || CONSTANT_CLASS_P (base0))
9113 && indirect_base0
9114 /* The caller guarantees that when one of the arguments is
9115 constant (i.e., null in this case) it is second. */
9116 && integer_zerop (arg1))
9117 {
9118 switch (code)
9119 {
9120 case EQ_EXPR:
9121 case LE_EXPR:
9122 case LT_EXPR:
9123 return constant_boolean_node (false, type);
9124 case GE_EXPR:
9125 case GT_EXPR:
9126 case NE_EXPR:
9127 return constant_boolean_node (true, type);
9128 default:
9129 gcc_unreachable ();
9130 }
9131 }
9132 }
9133
9134 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9135 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9136 the resulting offset is smaller in absolute value than the
9137 original one and has the same sign. */
9138 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9139 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9140 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9141 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9142 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9143 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9144 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9145 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9146 {
9147 tree const1 = TREE_OPERAND (arg0, 1);
9148 tree const2 = TREE_OPERAND (arg1, 1);
9149 tree variable1 = TREE_OPERAND (arg0, 0);
9150 tree variable2 = TREE_OPERAND (arg1, 0);
9151 tree cst;
9152 const char * const warnmsg = G_("assuming signed overflow does not "
9153 "occur when combining constants around "
9154 "a comparison");
9155
9156 /* Put the constant on the side where it doesn't overflow and is
9157 of lower absolute value and of same sign than before. */
9158 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9159 ? MINUS_EXPR : PLUS_EXPR,
9160 const2, const1);
9161 if (!TREE_OVERFLOW (cst)
9162 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9163 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9164 {
9165 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9166 return fold_build2_loc (loc, code, type,
9167 variable1,
9168 fold_build2_loc (loc, TREE_CODE (arg1),
9169 TREE_TYPE (arg1),
9170 variable2, cst));
9171 }
9172
9173 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9174 ? MINUS_EXPR : PLUS_EXPR,
9175 const1, const2);
9176 if (!TREE_OVERFLOW (cst)
9177 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9178 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9179 {
9180 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9181 return fold_build2_loc (loc, code, type,
9182 fold_build2_loc (loc, TREE_CODE (arg0),
9183 TREE_TYPE (arg0),
9184 variable1, cst),
9185 variable2);
9186 }
9187 }
9188
9189 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9190 if (tem)
9191 return tem;
9192
9193 /* If we are comparing an expression that just has comparisons
9194 of two integer values, arithmetic expressions of those comparisons,
9195 and constants, we can simplify it. There are only three cases
9196 to check: the two values can either be equal, the first can be
9197 greater, or the second can be greater. Fold the expression for
9198 those three values. Since each value must be 0 or 1, we have
9199 eight possibilities, each of which corresponds to the constant 0
9200 or 1 or one of the six possible comparisons.
9201
9202 This handles common cases like (a > b) == 0 but also handles
9203 expressions like ((x > y) - (y > x)) > 0, which supposedly
9204 occur in macroized code. */
9205
9206 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9207 {
9208 tree cval1 = 0, cval2 = 0;
9209
9210 if (twoval_comparison_p (arg0, &cval1, &cval2)
9211 /* Don't handle degenerate cases here; they should already
9212 have been handled anyway. */
9213 && cval1 != 0 && cval2 != 0
9214 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9215 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9216 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9217 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9218 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9219 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9220 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9221 {
9222 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9223 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9224
9225 /* We can't just pass T to eval_subst in case cval1 or cval2
9226 was the same as ARG1. */
9227
9228 tree high_result
9229 = fold_build2_loc (loc, code, type,
9230 eval_subst (loc, arg0, cval1, maxval,
9231 cval2, minval),
9232 arg1);
9233 tree equal_result
9234 = fold_build2_loc (loc, code, type,
9235 eval_subst (loc, arg0, cval1, maxval,
9236 cval2, maxval),
9237 arg1);
9238 tree low_result
9239 = fold_build2_loc (loc, code, type,
9240 eval_subst (loc, arg0, cval1, minval,
9241 cval2, maxval),
9242 arg1);
9243
9244 /* All three of these results should be 0 or 1. Confirm they are.
9245 Then use those values to select the proper code to use. */
9246
9247 if (TREE_CODE (high_result) == INTEGER_CST
9248 && TREE_CODE (equal_result) == INTEGER_CST
9249 && TREE_CODE (low_result) == INTEGER_CST)
9250 {
9251 /* Make a 3-bit mask with the high-order bit being the
9252 value for `>', the next for '=', and the low for '<'. */
9253 switch ((integer_onep (high_result) * 4)
9254 + (integer_onep (equal_result) * 2)
9255 + integer_onep (low_result))
9256 {
9257 case 0:
9258 /* Always false. */
9259 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9260 case 1:
9261 code = LT_EXPR;
9262 break;
9263 case 2:
9264 code = EQ_EXPR;
9265 break;
9266 case 3:
9267 code = LE_EXPR;
9268 break;
9269 case 4:
9270 code = GT_EXPR;
9271 break;
9272 case 5:
9273 code = NE_EXPR;
9274 break;
9275 case 6:
9276 code = GE_EXPR;
9277 break;
9278 case 7:
9279 /* Always true. */
9280 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9281 }
9282
9283 return fold_build2_loc (loc, code, type, cval1, cval2);
9284 }
9285 }
9286 }
9287
9288 return NULL_TREE;
9289 }
9290
9291
9292 /* Subroutine of fold_binary. Optimize complex multiplications of the
9293 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9294 argument EXPR represents the expression "z" of type TYPE. */
9295
9296 static tree
9297 fold_mult_zconjz (location_t loc, tree type, tree expr)
9298 {
9299 tree itype = TREE_TYPE (type);
9300 tree rpart, ipart, tem;
9301
9302 if (TREE_CODE (expr) == COMPLEX_EXPR)
9303 {
9304 rpart = TREE_OPERAND (expr, 0);
9305 ipart = TREE_OPERAND (expr, 1);
9306 }
9307 else if (TREE_CODE (expr) == COMPLEX_CST)
9308 {
9309 rpart = TREE_REALPART (expr);
9310 ipart = TREE_IMAGPART (expr);
9311 }
9312 else
9313 {
9314 expr = save_expr (expr);
9315 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9316 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9317 }
9318
9319 rpart = save_expr (rpart);
9320 ipart = save_expr (ipart);
9321 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9322 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9323 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9324 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9325 build_zero_cst (itype));
9326 }
9327
9328
9329 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9330 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
9331 true if successful. */
9332
9333 static bool
9334 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
9335 {
9336 unsigned HOST_WIDE_INT i, nunits;
9337
9338 if (TREE_CODE (arg) == VECTOR_CST
9339 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
9340 {
9341 for (i = 0; i < nunits; ++i)
9342 elts[i] = VECTOR_CST_ELT (arg, i);
9343 }
9344 else if (TREE_CODE (arg) == CONSTRUCTOR)
9345 {
9346 constructor_elt *elt;
9347
9348 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9349 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9350 return false;
9351 else
9352 elts[i] = elt->value;
9353 }
9354 else
9355 return false;
9356 for (; i < nelts; i++)
9357 elts[i]
9358 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9359 return true;
9360 }
9361
9362 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9363 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9364 NULL_TREE otherwise. */
9365
9366 tree
9367 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
9368 {
9369 unsigned int i;
9370 unsigned HOST_WIDE_INT nelts;
9371 bool need_ctor = false;
9372
9373 if (!sel.length ().is_constant (&nelts))
9374 return NULL_TREE;
9375 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
9376 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
9377 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
9378 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9379 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9380 return NULL_TREE;
9381
9382 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
9383 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
9384 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
9385 return NULL_TREE;
9386
9387 tree_vector_builder out_elts (type, nelts, 1);
9388 for (i = 0; i < nelts; i++)
9389 {
9390 HOST_WIDE_INT index;
9391 if (!sel[i].is_constant (&index))
9392 return NULL_TREE;
9393 if (!CONSTANT_CLASS_P (in_elts[index]))
9394 need_ctor = true;
9395 out_elts.quick_push (unshare_expr (in_elts[index]));
9396 }
9397
9398 if (need_ctor)
9399 {
9400 vec<constructor_elt, va_gc> *v;
9401 vec_alloc (v, nelts);
9402 for (i = 0; i < nelts; i++)
9403 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
9404 return build_constructor (type, v);
9405 }
9406 else
9407 return out_elts.build ();
9408 }
9409
9410 /* Try to fold a pointer difference of type TYPE two address expressions of
9411 array references AREF0 and AREF1 using location LOC. Return a
9412 simplified expression for the difference or NULL_TREE. */
9413
9414 static tree
9415 fold_addr_of_array_ref_difference (location_t loc, tree type,
9416 tree aref0, tree aref1,
9417 bool use_pointer_diff)
9418 {
9419 tree base0 = TREE_OPERAND (aref0, 0);
9420 tree base1 = TREE_OPERAND (aref1, 0);
9421 tree base_offset = build_int_cst (type, 0);
9422
9423 /* If the bases are array references as well, recurse. If the bases
9424 are pointer indirections compute the difference of the pointers.
9425 If the bases are equal, we are set. */
9426 if ((TREE_CODE (base0) == ARRAY_REF
9427 && TREE_CODE (base1) == ARRAY_REF
9428 && (base_offset
9429 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
9430 use_pointer_diff)))
9431 || (INDIRECT_REF_P (base0)
9432 && INDIRECT_REF_P (base1)
9433 && (base_offset
9434 = use_pointer_diff
9435 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
9436 TREE_OPERAND (base0, 0),
9437 TREE_OPERAND (base1, 0))
9438 : fold_binary_loc (loc, MINUS_EXPR, type,
9439 fold_convert (type,
9440 TREE_OPERAND (base0, 0)),
9441 fold_convert (type,
9442 TREE_OPERAND (base1, 0)))))
9443 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
9444 {
9445 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9446 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9447 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9448 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
9449 return fold_build2_loc (loc, PLUS_EXPR, type,
9450 base_offset,
9451 fold_build2_loc (loc, MULT_EXPR, type,
9452 diff, esz));
9453 }
9454 return NULL_TREE;
9455 }
9456
9457 /* If the real or vector real constant CST of type TYPE has an exact
9458 inverse, return it, else return NULL. */
9459
9460 tree
9461 exact_inverse (tree type, tree cst)
9462 {
9463 REAL_VALUE_TYPE r;
9464 tree unit_type;
9465 machine_mode mode;
9466
9467 switch (TREE_CODE (cst))
9468 {
9469 case REAL_CST:
9470 r = TREE_REAL_CST (cst);
9471
9472 if (exact_real_inverse (TYPE_MODE (type), &r))
9473 return build_real (type, r);
9474
9475 return NULL_TREE;
9476
9477 case VECTOR_CST:
9478 {
9479 unit_type = TREE_TYPE (type);
9480 mode = TYPE_MODE (unit_type);
9481
9482 tree_vector_builder elts;
9483 if (!elts.new_unary_operation (type, cst, false))
9484 return NULL_TREE;
9485 unsigned int count = elts.encoded_nelts ();
9486 for (unsigned int i = 0; i < count; ++i)
9487 {
9488 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9489 if (!exact_real_inverse (mode, &r))
9490 return NULL_TREE;
9491 elts.quick_push (build_real (unit_type, r));
9492 }
9493
9494 return elts.build ();
9495 }
9496
9497 default:
9498 return NULL_TREE;
9499 }
9500 }
9501
9502 /* Mask out the tz least significant bits of X of type TYPE where
9503 tz is the number of trailing zeroes in Y. */
9504 static wide_int
9505 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9506 {
9507 int tz = wi::ctz (y);
9508 if (tz > 0)
9509 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9510 return x;
9511 }
9512
9513 /* Return true when T is an address and is known to be nonzero.
9514 For floating point we further ensure that T is not denormal.
9515 Similar logic is present in nonzero_address in rtlanal.h.
9516
9517 If the return value is based on the assumption that signed overflow
9518 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9519 change *STRICT_OVERFLOW_P. */
9520
9521 static bool
9522 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9523 {
9524 tree type = TREE_TYPE (t);
9525 enum tree_code code;
9526
9527 /* Doing something useful for floating point would need more work. */
9528 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9529 return false;
9530
9531 code = TREE_CODE (t);
9532 switch (TREE_CODE_CLASS (code))
9533 {
9534 case tcc_unary:
9535 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9536 strict_overflow_p);
9537 case tcc_binary:
9538 case tcc_comparison:
9539 return tree_binary_nonzero_warnv_p (code, type,
9540 TREE_OPERAND (t, 0),
9541 TREE_OPERAND (t, 1),
9542 strict_overflow_p);
9543 case tcc_constant:
9544 case tcc_declaration:
9545 case tcc_reference:
9546 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9547
9548 default:
9549 break;
9550 }
9551
9552 switch (code)
9553 {
9554 case TRUTH_NOT_EXPR:
9555 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9556 strict_overflow_p);
9557
9558 case TRUTH_AND_EXPR:
9559 case TRUTH_OR_EXPR:
9560 case TRUTH_XOR_EXPR:
9561 return tree_binary_nonzero_warnv_p (code, type,
9562 TREE_OPERAND (t, 0),
9563 TREE_OPERAND (t, 1),
9564 strict_overflow_p);
9565
9566 case COND_EXPR:
9567 case CONSTRUCTOR:
9568 case OBJ_TYPE_REF:
9569 case ASSERT_EXPR:
9570 case ADDR_EXPR:
9571 case WITH_SIZE_EXPR:
9572 case SSA_NAME:
9573 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9574
9575 case COMPOUND_EXPR:
9576 case MODIFY_EXPR:
9577 case BIND_EXPR:
9578 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9579 strict_overflow_p);
9580
9581 case SAVE_EXPR:
9582 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9583 strict_overflow_p);
9584
9585 case CALL_EXPR:
9586 {
9587 tree fndecl = get_callee_fndecl (t);
9588 if (!fndecl) return false;
9589 if (flag_delete_null_pointer_checks && !flag_check_new
9590 && DECL_IS_OPERATOR_NEW_P (fndecl)
9591 && !TREE_NOTHROW (fndecl))
9592 return true;
9593 if (flag_delete_null_pointer_checks
9594 && lookup_attribute ("returns_nonnull",
9595 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9596 return true;
9597 return alloca_call_p (t);
9598 }
9599
9600 default:
9601 break;
9602 }
9603 return false;
9604 }
9605
9606 /* Return true when T is an address and is known to be nonzero.
9607 Handle warnings about undefined signed overflow. */
9608
9609 bool
9610 tree_expr_nonzero_p (tree t)
9611 {
9612 bool ret, strict_overflow_p;
9613
9614 strict_overflow_p = false;
9615 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9616 if (strict_overflow_p)
9617 fold_overflow_warning (("assuming signed overflow does not occur when "
9618 "determining that expression is always "
9619 "non-zero"),
9620 WARN_STRICT_OVERFLOW_MISC);
9621 return ret;
9622 }
9623
9624 /* Return true if T is known not to be equal to an integer W. */
9625
9626 bool
9627 expr_not_equal_to (tree t, const wide_int &w)
9628 {
9629 wide_int min, max, nz;
9630 value_range_kind rtype;
9631 switch (TREE_CODE (t))
9632 {
9633 case INTEGER_CST:
9634 return wi::to_wide (t) != w;
9635
9636 case SSA_NAME:
9637 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9638 return false;
9639 rtype = get_range_info (t, &min, &max);
9640 if (rtype == VR_RANGE)
9641 {
9642 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9643 return true;
9644 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9645 return true;
9646 }
9647 else if (rtype == VR_ANTI_RANGE
9648 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9649 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9650 return true;
9651 /* If T has some known zero bits and W has any of those bits set,
9652 then T is known not to be equal to W. */
9653 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9654 TYPE_PRECISION (TREE_TYPE (t))), 0))
9655 return true;
9656 return false;
9657
9658 default:
9659 return false;
9660 }
9661 }
9662
9663 /* Fold a binary expression of code CODE and type TYPE with operands
9664 OP0 and OP1. LOC is the location of the resulting expression.
9665 Return the folded expression if folding is successful. Otherwise,
9666 return NULL_TREE. */
9667
9668 tree
9669 fold_binary_loc (location_t loc, enum tree_code code, tree type,
9670 tree op0, tree op1)
9671 {
9672 enum tree_code_class kind = TREE_CODE_CLASS (code);
9673 tree arg0, arg1, tem;
9674 tree t1 = NULL_TREE;
9675 bool strict_overflow_p;
9676 unsigned int prec;
9677
9678 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9679 && TREE_CODE_LENGTH (code) == 2
9680 && op0 != NULL_TREE
9681 && op1 != NULL_TREE);
9682
9683 arg0 = op0;
9684 arg1 = op1;
9685
9686 /* Strip any conversions that don't change the mode. This is
9687 safe for every expression, except for a comparison expression
9688 because its signedness is derived from its operands. So, in
9689 the latter case, only strip conversions that don't change the
9690 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9691 preserved.
9692
9693 Note that this is done as an internal manipulation within the
9694 constant folder, in order to find the simplest representation
9695 of the arguments so that their form can be studied. In any
9696 cases, the appropriate type conversions should be put back in
9697 the tree that will get out of the constant folder. */
9698
9699 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9700 {
9701 STRIP_SIGN_NOPS (arg0);
9702 STRIP_SIGN_NOPS (arg1);
9703 }
9704 else
9705 {
9706 STRIP_NOPS (arg0);
9707 STRIP_NOPS (arg1);
9708 }
9709
9710 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9711 constant but we can't do arithmetic on them. */
9712 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9713 {
9714 tem = const_binop (code, type, arg0, arg1);
9715 if (tem != NULL_TREE)
9716 {
9717 if (TREE_TYPE (tem) != type)
9718 tem = fold_convert_loc (loc, type, tem);
9719 return tem;
9720 }
9721 }
9722
9723 /* If this is a commutative operation, and ARG0 is a constant, move it
9724 to ARG1 to reduce the number of tests below. */
9725 if (commutative_tree_code (code)
9726 && tree_swap_operands_p (arg0, arg1))
9727 return fold_build2_loc (loc, code, type, op1, op0);
9728
9729 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9730 to ARG1 to reduce the number of tests below. */
9731 if (kind == tcc_comparison
9732 && tree_swap_operands_p (arg0, arg1))
9733 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9734
9735 tem = generic_simplify (loc, code, type, op0, op1);
9736 if (tem)
9737 return tem;
9738
9739 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9740
9741 First check for cases where an arithmetic operation is applied to a
9742 compound, conditional, or comparison operation. Push the arithmetic
9743 operation inside the compound or conditional to see if any folding
9744 can then be done. Convert comparison to conditional for this purpose.
9745 The also optimizes non-constant cases that used to be done in
9746 expand_expr.
9747
9748 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9749 one of the operands is a comparison and the other is a comparison, a
9750 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9751 code below would make the expression more complex. Change it to a
9752 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9753 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9754
9755 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9756 || code == EQ_EXPR || code == NE_EXPR)
9757 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
9758 && ((truth_value_p (TREE_CODE (arg0))
9759 && (truth_value_p (TREE_CODE (arg1))
9760 || (TREE_CODE (arg1) == BIT_AND_EXPR
9761 && integer_onep (TREE_OPERAND (arg1, 1)))))
9762 || (truth_value_p (TREE_CODE (arg1))
9763 && (truth_value_p (TREE_CODE (arg0))
9764 || (TREE_CODE (arg0) == BIT_AND_EXPR
9765 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9766 {
9767 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9768 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9769 : TRUTH_XOR_EXPR,
9770 boolean_type_node,
9771 fold_convert_loc (loc, boolean_type_node, arg0),
9772 fold_convert_loc (loc, boolean_type_node, arg1));
9773
9774 if (code == EQ_EXPR)
9775 tem = invert_truthvalue_loc (loc, tem);
9776
9777 return fold_convert_loc (loc, type, tem);
9778 }
9779
9780 if (TREE_CODE_CLASS (code) == tcc_binary
9781 || TREE_CODE_CLASS (code) == tcc_comparison)
9782 {
9783 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9784 {
9785 tem = fold_build2_loc (loc, code, type,
9786 fold_convert_loc (loc, TREE_TYPE (op0),
9787 TREE_OPERAND (arg0, 1)), op1);
9788 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9789 tem);
9790 }
9791 if (TREE_CODE (arg1) == COMPOUND_EXPR)
9792 {
9793 tem = fold_build2_loc (loc, code, type, op0,
9794 fold_convert_loc (loc, TREE_TYPE (op1),
9795 TREE_OPERAND (arg1, 1)));
9796 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9797 tem);
9798 }
9799
9800 if (TREE_CODE (arg0) == COND_EXPR
9801 || TREE_CODE (arg0) == VEC_COND_EXPR
9802 || COMPARISON_CLASS_P (arg0))
9803 {
9804 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9805 arg0, arg1,
9806 /*cond_first_p=*/1);
9807 if (tem != NULL_TREE)
9808 return tem;
9809 }
9810
9811 if (TREE_CODE (arg1) == COND_EXPR
9812 || TREE_CODE (arg1) == VEC_COND_EXPR
9813 || COMPARISON_CLASS_P (arg1))
9814 {
9815 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9816 arg1, arg0,
9817 /*cond_first_p=*/0);
9818 if (tem != NULL_TREE)
9819 return tem;
9820 }
9821 }
9822
9823 switch (code)
9824 {
9825 case MEM_REF:
9826 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9827 if (TREE_CODE (arg0) == ADDR_EXPR
9828 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9829 {
9830 tree iref = TREE_OPERAND (arg0, 0);
9831 return fold_build2 (MEM_REF, type,
9832 TREE_OPERAND (iref, 0),
9833 int_const_binop (PLUS_EXPR, arg1,
9834 TREE_OPERAND (iref, 1)));
9835 }
9836
9837 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9838 if (TREE_CODE (arg0) == ADDR_EXPR
9839 && handled_component_p (TREE_OPERAND (arg0, 0)))
9840 {
9841 tree base;
9842 poly_int64 coffset;
9843 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9844 &coffset);
9845 if (!base)
9846 return NULL_TREE;
9847 return fold_build2 (MEM_REF, type,
9848 build_fold_addr_expr (base),
9849 int_const_binop (PLUS_EXPR, arg1,
9850 size_int (coffset)));
9851 }
9852
9853 return NULL_TREE;
9854
9855 case POINTER_PLUS_EXPR:
9856 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9857 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9858 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9859 return fold_convert_loc (loc, type,
9860 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9861 fold_convert_loc (loc, sizetype,
9862 arg1),
9863 fold_convert_loc (loc, sizetype,
9864 arg0)));
9865
9866 return NULL_TREE;
9867
9868 case PLUS_EXPR:
9869 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9870 {
9871 /* X + (X / CST) * -CST is X % CST. */
9872 if (TREE_CODE (arg1) == MULT_EXPR
9873 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9874 && operand_equal_p (arg0,
9875 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9876 {
9877 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9878 tree cst1 = TREE_OPERAND (arg1, 1);
9879 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9880 cst1, cst0);
9881 if (sum && integer_zerop (sum))
9882 return fold_convert_loc (loc, type,
9883 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9884 TREE_TYPE (arg0), arg0,
9885 cst0));
9886 }
9887 }
9888
9889 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9890 one. Make sure the type is not saturating and has the signedness of
9891 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9892 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9893 if ((TREE_CODE (arg0) == MULT_EXPR
9894 || TREE_CODE (arg1) == MULT_EXPR)
9895 && !TYPE_SATURATING (type)
9896 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9897 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9898 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9899 {
9900 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9901 if (tem)
9902 return tem;
9903 }
9904
9905 if (! FLOAT_TYPE_P (type))
9906 {
9907 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9908 (plus (plus (mult) (mult)) (foo)) so that we can
9909 take advantage of the factoring cases below. */
9910 if (ANY_INTEGRAL_TYPE_P (type)
9911 && TYPE_OVERFLOW_WRAPS (type)
9912 && (((TREE_CODE (arg0) == PLUS_EXPR
9913 || TREE_CODE (arg0) == MINUS_EXPR)
9914 && TREE_CODE (arg1) == MULT_EXPR)
9915 || ((TREE_CODE (arg1) == PLUS_EXPR
9916 || TREE_CODE (arg1) == MINUS_EXPR)
9917 && TREE_CODE (arg0) == MULT_EXPR)))
9918 {
9919 tree parg0, parg1, parg, marg;
9920 enum tree_code pcode;
9921
9922 if (TREE_CODE (arg1) == MULT_EXPR)
9923 parg = arg0, marg = arg1;
9924 else
9925 parg = arg1, marg = arg0;
9926 pcode = TREE_CODE (parg);
9927 parg0 = TREE_OPERAND (parg, 0);
9928 parg1 = TREE_OPERAND (parg, 1);
9929 STRIP_NOPS (parg0);
9930 STRIP_NOPS (parg1);
9931
9932 if (TREE_CODE (parg0) == MULT_EXPR
9933 && TREE_CODE (parg1) != MULT_EXPR)
9934 return fold_build2_loc (loc, pcode, type,
9935 fold_build2_loc (loc, PLUS_EXPR, type,
9936 fold_convert_loc (loc, type,
9937 parg0),
9938 fold_convert_loc (loc, type,
9939 marg)),
9940 fold_convert_loc (loc, type, parg1));
9941 if (TREE_CODE (parg0) != MULT_EXPR
9942 && TREE_CODE (parg1) == MULT_EXPR)
9943 return
9944 fold_build2_loc (loc, PLUS_EXPR, type,
9945 fold_convert_loc (loc, type, parg0),
9946 fold_build2_loc (loc, pcode, type,
9947 fold_convert_loc (loc, type, marg),
9948 fold_convert_loc (loc, type,
9949 parg1)));
9950 }
9951 }
9952 else
9953 {
9954 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9955 to __complex__ ( x, y ). This is not the same for SNaNs or
9956 if signed zeros are involved. */
9957 if (!HONOR_SNANS (element_mode (arg0))
9958 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9959 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9960 {
9961 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9962 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9963 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9964 bool arg0rz = false, arg0iz = false;
9965 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9966 || (arg0i && (arg0iz = real_zerop (arg0i))))
9967 {
9968 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9969 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9970 if (arg0rz && arg1i && real_zerop (arg1i))
9971 {
9972 tree rp = arg1r ? arg1r
9973 : build1 (REALPART_EXPR, rtype, arg1);
9974 tree ip = arg0i ? arg0i
9975 : build1 (IMAGPART_EXPR, rtype, arg0);
9976 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9977 }
9978 else if (arg0iz && arg1r && real_zerop (arg1r))
9979 {
9980 tree rp = arg0r ? arg0r
9981 : build1 (REALPART_EXPR, rtype, arg0);
9982 tree ip = arg1i ? arg1i
9983 : build1 (IMAGPART_EXPR, rtype, arg1);
9984 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9985 }
9986 }
9987 }
9988
9989 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9990 We associate floats only if the user has specified
9991 -fassociative-math. */
9992 if (flag_associative_math
9993 && TREE_CODE (arg1) == PLUS_EXPR
9994 && TREE_CODE (arg0) != MULT_EXPR)
9995 {
9996 tree tree10 = TREE_OPERAND (arg1, 0);
9997 tree tree11 = TREE_OPERAND (arg1, 1);
9998 if (TREE_CODE (tree11) == MULT_EXPR
9999 && TREE_CODE (tree10) == MULT_EXPR)
10000 {
10001 tree tree0;
10002 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10003 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10004 }
10005 }
10006 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10007 We associate floats only if the user has specified
10008 -fassociative-math. */
10009 if (flag_associative_math
10010 && TREE_CODE (arg0) == PLUS_EXPR
10011 && TREE_CODE (arg1) != MULT_EXPR)
10012 {
10013 tree tree00 = TREE_OPERAND (arg0, 0);
10014 tree tree01 = TREE_OPERAND (arg0, 1);
10015 if (TREE_CODE (tree01) == MULT_EXPR
10016 && TREE_CODE (tree00) == MULT_EXPR)
10017 {
10018 tree tree0;
10019 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10020 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10021 }
10022 }
10023 }
10024
10025 bit_rotate:
10026 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10027 is a rotate of A by C1 bits. */
10028 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10029 is a rotate of A by B bits.
10030 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
10031 though in this case CODE must be | and not + or ^, otherwise
10032 it doesn't return A when B is 0. */
10033 {
10034 enum tree_code code0, code1;
10035 tree rtype;
10036 code0 = TREE_CODE (arg0);
10037 code1 = TREE_CODE (arg1);
10038 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10039 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10040 && operand_equal_p (TREE_OPERAND (arg0, 0),
10041 TREE_OPERAND (arg1, 0), 0)
10042 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10043 TYPE_UNSIGNED (rtype))
10044 /* Only create rotates in complete modes. Other cases are not
10045 expanded properly. */
10046 && (element_precision (rtype)
10047 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
10048 {
10049 tree tree01, tree11;
10050 tree orig_tree01, orig_tree11;
10051 enum tree_code code01, code11;
10052
10053 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
10054 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
10055 STRIP_NOPS (tree01);
10056 STRIP_NOPS (tree11);
10057 code01 = TREE_CODE (tree01);
10058 code11 = TREE_CODE (tree11);
10059 if (code11 != MINUS_EXPR
10060 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
10061 {
10062 std::swap (code0, code1);
10063 std::swap (code01, code11);
10064 std::swap (tree01, tree11);
10065 std::swap (orig_tree01, orig_tree11);
10066 }
10067 if (code01 == INTEGER_CST
10068 && code11 == INTEGER_CST
10069 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10070 == element_precision (rtype)))
10071 {
10072 tem = build2_loc (loc, LROTATE_EXPR,
10073 rtype, TREE_OPERAND (arg0, 0),
10074 code0 == LSHIFT_EXPR
10075 ? orig_tree01 : orig_tree11);
10076 return fold_convert_loc (loc, type, tem);
10077 }
10078 else if (code11 == MINUS_EXPR)
10079 {
10080 tree tree110, tree111;
10081 tree110 = TREE_OPERAND (tree11, 0);
10082 tree111 = TREE_OPERAND (tree11, 1);
10083 STRIP_NOPS (tree110);
10084 STRIP_NOPS (tree111);
10085 if (TREE_CODE (tree110) == INTEGER_CST
10086 && compare_tree_int (tree110,
10087 element_precision (rtype)) == 0
10088 && operand_equal_p (tree01, tree111, 0))
10089 {
10090 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
10091 ? LROTATE_EXPR : RROTATE_EXPR),
10092 rtype, TREE_OPERAND (arg0, 0),
10093 orig_tree01);
10094 return fold_convert_loc (loc, type, tem);
10095 }
10096 }
10097 else if (code == BIT_IOR_EXPR
10098 && code11 == BIT_AND_EXPR
10099 && pow2p_hwi (element_precision (rtype)))
10100 {
10101 tree tree110, tree111;
10102 tree110 = TREE_OPERAND (tree11, 0);
10103 tree111 = TREE_OPERAND (tree11, 1);
10104 STRIP_NOPS (tree110);
10105 STRIP_NOPS (tree111);
10106 if (TREE_CODE (tree110) == NEGATE_EXPR
10107 && TREE_CODE (tree111) == INTEGER_CST
10108 && compare_tree_int (tree111,
10109 element_precision (rtype) - 1) == 0
10110 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
10111 {
10112 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
10113 ? LROTATE_EXPR : RROTATE_EXPR),
10114 rtype, TREE_OPERAND (arg0, 0),
10115 orig_tree01);
10116 return fold_convert_loc (loc, type, tem);
10117 }
10118 }
10119 }
10120 }
10121
10122 associate:
10123 /* In most languages, can't associate operations on floats through
10124 parentheses. Rather than remember where the parentheses were, we
10125 don't associate floats at all, unless the user has specified
10126 -fassociative-math.
10127 And, we need to make sure type is not saturating. */
10128
10129 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10130 && !TYPE_SATURATING (type))
10131 {
10132 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
10133 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
10134 tree atype = type;
10135 bool ok = true;
10136
10137 /* Split both trees into variables, constants, and literals. Then
10138 associate each group together, the constants with literals,
10139 then the result with variables. This increases the chances of
10140 literals being recombined later and of generating relocatable
10141 expressions for the sum of a constant and literal. */
10142 var0 = split_tree (arg0, type, code,
10143 &minus_var0, &con0, &minus_con0,
10144 &lit0, &minus_lit0, 0);
10145 var1 = split_tree (arg1, type, code,
10146 &minus_var1, &con1, &minus_con1,
10147 &lit1, &minus_lit1, code == MINUS_EXPR);
10148
10149 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10150 if (code == MINUS_EXPR)
10151 code = PLUS_EXPR;
10152
10153 /* With undefined overflow prefer doing association in a type
10154 which wraps on overflow, if that is one of the operand types. */
10155 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
10156 && !TYPE_OVERFLOW_WRAPS (type))
10157 {
10158 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10159 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10160 atype = TREE_TYPE (arg0);
10161 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10162 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10163 atype = TREE_TYPE (arg1);
10164 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10165 }
10166
10167 /* With undefined overflow we can only associate constants with one
10168 variable, and constants whose association doesn't overflow. */
10169 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
10170 && !TYPE_OVERFLOW_WRAPS (atype))
10171 {
10172 if ((var0 && var1) || (minus_var0 && minus_var1))
10173 {
10174 /* ??? If split_tree would handle NEGATE_EXPR we could
10175 simply reject these cases and the allowed cases would
10176 be the var0/minus_var1 ones. */
10177 tree tmp0 = var0 ? var0 : minus_var0;
10178 tree tmp1 = var1 ? var1 : minus_var1;
10179 bool one_neg = false;
10180
10181 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10182 {
10183 tmp0 = TREE_OPERAND (tmp0, 0);
10184 one_neg = !one_neg;
10185 }
10186 if (CONVERT_EXPR_P (tmp0)
10187 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10188 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10189 <= TYPE_PRECISION (atype)))
10190 tmp0 = TREE_OPERAND (tmp0, 0);
10191 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10192 {
10193 tmp1 = TREE_OPERAND (tmp1, 0);
10194 one_neg = !one_neg;
10195 }
10196 if (CONVERT_EXPR_P (tmp1)
10197 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10198 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10199 <= TYPE_PRECISION (atype)))
10200 tmp1 = TREE_OPERAND (tmp1, 0);
10201 /* The only case we can still associate with two variables
10202 is if they cancel out. */
10203 if (!one_neg
10204 || !operand_equal_p (tmp0, tmp1, 0))
10205 ok = false;
10206 }
10207 else if ((var0 && minus_var1
10208 && ! operand_equal_p (var0, minus_var1, 0))
10209 || (minus_var0 && var1
10210 && ! operand_equal_p (minus_var0, var1, 0)))
10211 ok = false;
10212 }
10213
10214 /* Only do something if we found more than two objects. Otherwise,
10215 nothing has changed and we risk infinite recursion. */
10216 if (ok
10217 && ((var0 != 0) + (var1 != 0)
10218 + (minus_var0 != 0) + (minus_var1 != 0)
10219 + (con0 != 0) + (con1 != 0)
10220 + (minus_con0 != 0) + (minus_con1 != 0)
10221 + (lit0 != 0) + (lit1 != 0)
10222 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
10223 {
10224 var0 = associate_trees (loc, var0, var1, code, atype);
10225 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
10226 code, atype);
10227 con0 = associate_trees (loc, con0, con1, code, atype);
10228 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
10229 code, atype);
10230 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10231 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10232 code, atype);
10233
10234 if (minus_var0 && var0)
10235 {
10236 var0 = associate_trees (loc, var0, minus_var0,
10237 MINUS_EXPR, atype);
10238 minus_var0 = 0;
10239 }
10240 if (minus_con0 && con0)
10241 {
10242 con0 = associate_trees (loc, con0, minus_con0,
10243 MINUS_EXPR, atype);
10244 minus_con0 = 0;
10245 }
10246
10247 /* Preserve the MINUS_EXPR if the negative part of the literal is
10248 greater than the positive part. Otherwise, the multiplicative
10249 folding code (i.e extract_muldiv) may be fooled in case
10250 unsigned constants are subtracted, like in the following
10251 example: ((X*2 + 4) - 8U)/2. */
10252 if (minus_lit0 && lit0)
10253 {
10254 if (TREE_CODE (lit0) == INTEGER_CST
10255 && TREE_CODE (minus_lit0) == INTEGER_CST
10256 && tree_int_cst_lt (lit0, minus_lit0)
10257 /* But avoid ending up with only negated parts. */
10258 && (var0 || con0))
10259 {
10260 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10261 MINUS_EXPR, atype);
10262 lit0 = 0;
10263 }
10264 else
10265 {
10266 lit0 = associate_trees (loc, lit0, minus_lit0,
10267 MINUS_EXPR, atype);
10268 minus_lit0 = 0;
10269 }
10270 }
10271
10272 /* Don't introduce overflows through reassociation. */
10273 if ((lit0 && TREE_OVERFLOW_P (lit0))
10274 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
10275 return NULL_TREE;
10276
10277 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
10278 con0 = associate_trees (loc, con0, lit0, code, atype);
10279 lit0 = 0;
10280 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
10281 code, atype);
10282 minus_lit0 = 0;
10283
10284 /* Eliminate minus_con0. */
10285 if (minus_con0)
10286 {
10287 if (con0)
10288 con0 = associate_trees (loc, con0, minus_con0,
10289 MINUS_EXPR, atype);
10290 else if (var0)
10291 var0 = associate_trees (loc, var0, minus_con0,
10292 MINUS_EXPR, atype);
10293 else
10294 gcc_unreachable ();
10295 minus_con0 = 0;
10296 }
10297
10298 /* Eliminate minus_var0. */
10299 if (minus_var0)
10300 {
10301 if (con0)
10302 con0 = associate_trees (loc, con0, minus_var0,
10303 MINUS_EXPR, atype);
10304 else
10305 gcc_unreachable ();
10306 minus_var0 = 0;
10307 }
10308
10309 return
10310 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10311 code, atype));
10312 }
10313 }
10314
10315 return NULL_TREE;
10316
10317 case POINTER_DIFF_EXPR:
10318 case MINUS_EXPR:
10319 /* Fold &a[i] - &a[j] to i-j. */
10320 if (TREE_CODE (arg0) == ADDR_EXPR
10321 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10322 && TREE_CODE (arg1) == ADDR_EXPR
10323 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10324 {
10325 tree tem = fold_addr_of_array_ref_difference (loc, type,
10326 TREE_OPERAND (arg0, 0),
10327 TREE_OPERAND (arg1, 0),
10328 code
10329 == POINTER_DIFF_EXPR);
10330 if (tem)
10331 return tem;
10332 }
10333
10334 /* Further transformations are not for pointers. */
10335 if (code == POINTER_DIFF_EXPR)
10336 return NULL_TREE;
10337
10338 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10339 if (TREE_CODE (arg0) == NEGATE_EXPR
10340 && negate_expr_p (op1)
10341 /* If arg0 is e.g. unsigned int and type is int, then this could
10342 introduce UB, because if A is INT_MIN at runtime, the original
10343 expression can be well defined while the latter is not.
10344 See PR83269. */
10345 && !(ANY_INTEGRAL_TYPE_P (type)
10346 && TYPE_OVERFLOW_UNDEFINED (type)
10347 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10348 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10349 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
10350 fold_convert_loc (loc, type,
10351 TREE_OPERAND (arg0, 0)));
10352
10353 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10354 __complex__ ( x, -y ). This is not the same for SNaNs or if
10355 signed zeros are involved. */
10356 if (!HONOR_SNANS (element_mode (arg0))
10357 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10358 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10359 {
10360 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10361 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10362 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10363 bool arg0rz = false, arg0iz = false;
10364 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10365 || (arg0i && (arg0iz = real_zerop (arg0i))))
10366 {
10367 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10368 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10369 if (arg0rz && arg1i && real_zerop (arg1i))
10370 {
10371 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10372 arg1r ? arg1r
10373 : build1 (REALPART_EXPR, rtype, arg1));
10374 tree ip = arg0i ? arg0i
10375 : build1 (IMAGPART_EXPR, rtype, arg0);
10376 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10377 }
10378 else if (arg0iz && arg1r && real_zerop (arg1r))
10379 {
10380 tree rp = arg0r ? arg0r
10381 : build1 (REALPART_EXPR, rtype, arg0);
10382 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10383 arg1i ? arg1i
10384 : build1 (IMAGPART_EXPR, rtype, arg1));
10385 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10386 }
10387 }
10388 }
10389
10390 /* A - B -> A + (-B) if B is easily negatable. */
10391 if (negate_expr_p (op1)
10392 && ! TYPE_OVERFLOW_SANITIZED (type)
10393 && ((FLOAT_TYPE_P (type)
10394 /* Avoid this transformation if B is a positive REAL_CST. */
10395 && (TREE_CODE (op1) != REAL_CST
10396 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
10397 || INTEGRAL_TYPE_P (type)))
10398 return fold_build2_loc (loc, PLUS_EXPR, type,
10399 fold_convert_loc (loc, type, arg0),
10400 negate_expr (op1));
10401
10402 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10403 one. Make sure the type is not saturating and has the signedness of
10404 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10405 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10406 if ((TREE_CODE (arg0) == MULT_EXPR
10407 || TREE_CODE (arg1) == MULT_EXPR)
10408 && !TYPE_SATURATING (type)
10409 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10410 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10411 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10412 {
10413 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10414 if (tem)
10415 return tem;
10416 }
10417
10418 goto associate;
10419
10420 case MULT_EXPR:
10421 if (! FLOAT_TYPE_P (type))
10422 {
10423 /* Transform x * -C into -x * C if x is easily negatable. */
10424 if (TREE_CODE (op1) == INTEGER_CST
10425 && tree_int_cst_sgn (op1) == -1
10426 && negate_expr_p (op0)
10427 && negate_expr_p (op1)
10428 && (tem = negate_expr (op1)) != op1
10429 && ! TREE_OVERFLOW (tem))
10430 return fold_build2_loc (loc, MULT_EXPR, type,
10431 fold_convert_loc (loc, type,
10432 negate_expr (op0)), tem);
10433
10434 strict_overflow_p = false;
10435 if (TREE_CODE (arg1) == INTEGER_CST
10436 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10437 &strict_overflow_p)) != 0)
10438 {
10439 if (strict_overflow_p)
10440 fold_overflow_warning (("assuming signed overflow does not "
10441 "occur when simplifying "
10442 "multiplication"),
10443 WARN_STRICT_OVERFLOW_MISC);
10444 return fold_convert_loc (loc, type, tem);
10445 }
10446
10447 /* Optimize z * conj(z) for integer complex numbers. */
10448 if (TREE_CODE (arg0) == CONJ_EXPR
10449 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10450 return fold_mult_zconjz (loc, type, arg1);
10451 if (TREE_CODE (arg1) == CONJ_EXPR
10452 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10453 return fold_mult_zconjz (loc, type, arg0);
10454 }
10455 else
10456 {
10457 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10458 This is not the same for NaNs or if signed zeros are
10459 involved. */
10460 if (!HONOR_NANS (arg0)
10461 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10462 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10463 && TREE_CODE (arg1) == COMPLEX_CST
10464 && real_zerop (TREE_REALPART (arg1)))
10465 {
10466 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10467 if (real_onep (TREE_IMAGPART (arg1)))
10468 return
10469 fold_build2_loc (loc, COMPLEX_EXPR, type,
10470 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10471 rtype, arg0)),
10472 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10473 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10474 return
10475 fold_build2_loc (loc, COMPLEX_EXPR, type,
10476 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10477 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10478 rtype, arg0)));
10479 }
10480
10481 /* Optimize z * conj(z) for floating point complex numbers.
10482 Guarded by flag_unsafe_math_optimizations as non-finite
10483 imaginary components don't produce scalar results. */
10484 if (flag_unsafe_math_optimizations
10485 && TREE_CODE (arg0) == CONJ_EXPR
10486 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10487 return fold_mult_zconjz (loc, type, arg1);
10488 if (flag_unsafe_math_optimizations
10489 && TREE_CODE (arg1) == CONJ_EXPR
10490 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10491 return fold_mult_zconjz (loc, type, arg0);
10492 }
10493 goto associate;
10494
10495 case BIT_IOR_EXPR:
10496 /* Canonicalize (X & C1) | C2. */
10497 if (TREE_CODE (arg0) == BIT_AND_EXPR
10498 && TREE_CODE (arg1) == INTEGER_CST
10499 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10500 {
10501 int width = TYPE_PRECISION (type), w;
10502 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
10503 wide_int c2 = wi::to_wide (arg1);
10504
10505 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10506 if ((c1 & c2) == c1)
10507 return omit_one_operand_loc (loc, type, arg1,
10508 TREE_OPERAND (arg0, 0));
10509
10510 wide_int msk = wi::mask (width, false,
10511 TYPE_PRECISION (TREE_TYPE (arg1)));
10512
10513 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10514 if (wi::bit_and_not (msk, c1 | c2) == 0)
10515 {
10516 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10517 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10518 }
10519
10520 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10521 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10522 mode which allows further optimizations. */
10523 c1 &= msk;
10524 c2 &= msk;
10525 wide_int c3 = wi::bit_and_not (c1, c2);
10526 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10527 {
10528 wide_int mask = wi::mask (w, false,
10529 TYPE_PRECISION (type));
10530 if (((c1 | c2) & mask) == mask
10531 && wi::bit_and_not (c1, mask) == 0)
10532 {
10533 c3 = mask;
10534 break;
10535 }
10536 }
10537
10538 if (c3 != c1)
10539 {
10540 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10541 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
10542 wide_int_to_tree (type, c3));
10543 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10544 }
10545 }
10546
10547 /* See if this can be simplified into a rotate first. If that
10548 is unsuccessful continue in the association code. */
10549 goto bit_rotate;
10550
10551 case BIT_XOR_EXPR:
10552 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10553 if (TREE_CODE (arg0) == BIT_AND_EXPR
10554 && INTEGRAL_TYPE_P (type)
10555 && integer_onep (TREE_OPERAND (arg0, 1))
10556 && integer_onep (arg1))
10557 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10558 build_zero_cst (TREE_TYPE (arg0)));
10559
10560 /* See if this can be simplified into a rotate first. If that
10561 is unsuccessful continue in the association code. */
10562 goto bit_rotate;
10563
10564 case BIT_AND_EXPR:
10565 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10566 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10567 && INTEGRAL_TYPE_P (type)
10568 && integer_onep (TREE_OPERAND (arg0, 1))
10569 && integer_onep (arg1))
10570 {
10571 tree tem2;
10572 tem = TREE_OPERAND (arg0, 0);
10573 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10574 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10575 tem, tem2);
10576 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10577 build_zero_cst (TREE_TYPE (tem)));
10578 }
10579 /* Fold ~X & 1 as (X & 1) == 0. */
10580 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10581 && INTEGRAL_TYPE_P (type)
10582 && integer_onep (arg1))
10583 {
10584 tree tem2;
10585 tem = TREE_OPERAND (arg0, 0);
10586 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10587 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10588 tem, tem2);
10589 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10590 build_zero_cst (TREE_TYPE (tem)));
10591 }
10592 /* Fold !X & 1 as X == 0. */
10593 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10594 && integer_onep (arg1))
10595 {
10596 tem = TREE_OPERAND (arg0, 0);
10597 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10598 build_zero_cst (TREE_TYPE (tem)));
10599 }
10600
10601 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10602 multiple of 1 << CST. */
10603 if (TREE_CODE (arg1) == INTEGER_CST)
10604 {
10605 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10606 wide_int ncst1 = -cst1;
10607 if ((cst1 & ncst1) == ncst1
10608 && multiple_of_p (type, arg0,
10609 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10610 return fold_convert_loc (loc, type, arg0);
10611 }
10612
10613 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10614 bits from CST2. */
10615 if (TREE_CODE (arg1) == INTEGER_CST
10616 && TREE_CODE (arg0) == MULT_EXPR
10617 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10618 {
10619 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
10620 wide_int masked
10621 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
10622
10623 if (masked == 0)
10624 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10625 arg0, arg1);
10626 else if (masked != warg1)
10627 {
10628 /* Avoid the transform if arg1 is a mask of some
10629 mode which allows further optimizations. */
10630 int pop = wi::popcount (warg1);
10631 if (!(pop >= BITS_PER_UNIT
10632 && pow2p_hwi (pop)
10633 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10634 return fold_build2_loc (loc, code, type, op0,
10635 wide_int_to_tree (type, masked));
10636 }
10637 }
10638
10639 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10640 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10641 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10642 {
10643 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10644
10645 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
10646 if (mask == -1)
10647 return
10648 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10649 }
10650
10651 goto associate;
10652
10653 case RDIV_EXPR:
10654 /* Don't touch a floating-point divide by zero unless the mode
10655 of the constant can represent infinity. */
10656 if (TREE_CODE (arg1) == REAL_CST
10657 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10658 && real_zerop (arg1))
10659 return NULL_TREE;
10660
10661 /* (-A) / (-B) -> A / B */
10662 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10663 return fold_build2_loc (loc, RDIV_EXPR, type,
10664 TREE_OPERAND (arg0, 0),
10665 negate_expr (arg1));
10666 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10667 return fold_build2_loc (loc, RDIV_EXPR, type,
10668 negate_expr (arg0),
10669 TREE_OPERAND (arg1, 0));
10670 return NULL_TREE;
10671
10672 case TRUNC_DIV_EXPR:
10673 /* Fall through */
10674
10675 case FLOOR_DIV_EXPR:
10676 /* Simplify A / (B << N) where A and B are positive and B is
10677 a power of 2, to A >> (N + log2(B)). */
10678 strict_overflow_p = false;
10679 if (TREE_CODE (arg1) == LSHIFT_EXPR
10680 && (TYPE_UNSIGNED (type)
10681 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10682 {
10683 tree sval = TREE_OPERAND (arg1, 0);
10684 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10685 {
10686 tree sh_cnt = TREE_OPERAND (arg1, 1);
10687 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10688 wi::exact_log2 (wi::to_wide (sval)));
10689
10690 if (strict_overflow_p)
10691 fold_overflow_warning (("assuming signed overflow does not "
10692 "occur when simplifying A / (B << N)"),
10693 WARN_STRICT_OVERFLOW_MISC);
10694
10695 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10696 sh_cnt, pow2);
10697 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10698 fold_convert_loc (loc, type, arg0), sh_cnt);
10699 }
10700 }
10701
10702 /* Fall through */
10703
10704 case ROUND_DIV_EXPR:
10705 case CEIL_DIV_EXPR:
10706 case EXACT_DIV_EXPR:
10707 if (integer_zerop (arg1))
10708 return NULL_TREE;
10709
10710 /* Convert -A / -B to A / B when the type is signed and overflow is
10711 undefined. */
10712 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10713 && TREE_CODE (op0) == NEGATE_EXPR
10714 && negate_expr_p (op1))
10715 {
10716 if (INTEGRAL_TYPE_P (type))
10717 fold_overflow_warning (("assuming signed overflow does not occur "
10718 "when distributing negation across "
10719 "division"),
10720 WARN_STRICT_OVERFLOW_MISC);
10721 return fold_build2_loc (loc, code, type,
10722 fold_convert_loc (loc, type,
10723 TREE_OPERAND (arg0, 0)),
10724 negate_expr (op1));
10725 }
10726 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10727 && TREE_CODE (arg1) == NEGATE_EXPR
10728 && negate_expr_p (op0))
10729 {
10730 if (INTEGRAL_TYPE_P (type))
10731 fold_overflow_warning (("assuming signed overflow does not occur "
10732 "when distributing negation across "
10733 "division"),
10734 WARN_STRICT_OVERFLOW_MISC);
10735 return fold_build2_loc (loc, code, type,
10736 negate_expr (op0),
10737 fold_convert_loc (loc, type,
10738 TREE_OPERAND (arg1, 0)));
10739 }
10740
10741 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10742 operation, EXACT_DIV_EXPR.
10743
10744 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10745 At one time others generated faster code, it's not clear if they do
10746 after the last round to changes to the DIV code in expmed.c. */
10747 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10748 && multiple_of_p (type, arg0, arg1))
10749 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10750 fold_convert (type, arg0),
10751 fold_convert (type, arg1));
10752
10753 strict_overflow_p = false;
10754 if (TREE_CODE (arg1) == INTEGER_CST
10755 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10756 &strict_overflow_p)) != 0)
10757 {
10758 if (strict_overflow_p)
10759 fold_overflow_warning (("assuming signed overflow does not occur "
10760 "when simplifying division"),
10761 WARN_STRICT_OVERFLOW_MISC);
10762 return fold_convert_loc (loc, type, tem);
10763 }
10764
10765 return NULL_TREE;
10766
10767 case CEIL_MOD_EXPR:
10768 case FLOOR_MOD_EXPR:
10769 case ROUND_MOD_EXPR:
10770 case TRUNC_MOD_EXPR:
10771 strict_overflow_p = false;
10772 if (TREE_CODE (arg1) == INTEGER_CST
10773 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10774 &strict_overflow_p)) != 0)
10775 {
10776 if (strict_overflow_p)
10777 fold_overflow_warning (("assuming signed overflow does not occur "
10778 "when simplifying modulus"),
10779 WARN_STRICT_OVERFLOW_MISC);
10780 return fold_convert_loc (loc, type, tem);
10781 }
10782
10783 return NULL_TREE;
10784
10785 case LROTATE_EXPR:
10786 case RROTATE_EXPR:
10787 case RSHIFT_EXPR:
10788 case LSHIFT_EXPR:
10789 /* Since negative shift count is not well-defined,
10790 don't try to compute it in the compiler. */
10791 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10792 return NULL_TREE;
10793
10794 prec = element_precision (type);
10795
10796 /* If we have a rotate of a bit operation with the rotate count and
10797 the second operand of the bit operation both constant,
10798 permute the two operations. */
10799 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10800 && (TREE_CODE (arg0) == BIT_AND_EXPR
10801 || TREE_CODE (arg0) == BIT_IOR_EXPR
10802 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10803 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10804 {
10805 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10806 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10807 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10808 fold_build2_loc (loc, code, type,
10809 arg00, arg1),
10810 fold_build2_loc (loc, code, type,
10811 arg01, arg1));
10812 }
10813
10814 /* Two consecutive rotates adding up to the some integer
10815 multiple of the precision of the type can be ignored. */
10816 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10817 && TREE_CODE (arg0) == RROTATE_EXPR
10818 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10819 && wi::umod_trunc (wi::to_wide (arg1)
10820 + wi::to_wide (TREE_OPERAND (arg0, 1)),
10821 prec) == 0)
10822 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10823
10824 return NULL_TREE;
10825
10826 case MIN_EXPR:
10827 case MAX_EXPR:
10828 goto associate;
10829
10830 case TRUTH_ANDIF_EXPR:
10831 /* Note that the operands of this must be ints
10832 and their values must be 0 or 1.
10833 ("true" is a fixed value perhaps depending on the language.) */
10834 /* If first arg is constant zero, return it. */
10835 if (integer_zerop (arg0))
10836 return fold_convert_loc (loc, type, arg0);
10837 /* FALLTHRU */
10838 case TRUTH_AND_EXPR:
10839 /* If either arg is constant true, drop it. */
10840 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10841 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10842 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10843 /* Preserve sequence points. */
10844 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10845 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10846 /* If second arg is constant zero, result is zero, but first arg
10847 must be evaluated. */
10848 if (integer_zerop (arg1))
10849 return omit_one_operand_loc (loc, type, arg1, arg0);
10850 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10851 case will be handled here. */
10852 if (integer_zerop (arg0))
10853 return omit_one_operand_loc (loc, type, arg0, arg1);
10854
10855 /* !X && X is always false. */
10856 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10857 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10858 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10859 /* X && !X is always false. */
10860 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10861 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10862 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10863
10864 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10865 means A >= Y && A != MAX, but in this case we know that
10866 A < X <= MAX. */
10867
10868 if (!TREE_SIDE_EFFECTS (arg0)
10869 && !TREE_SIDE_EFFECTS (arg1))
10870 {
10871 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10872 if (tem && !operand_equal_p (tem, arg0, 0))
10873 return fold_build2_loc (loc, code, type, tem, arg1);
10874
10875 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10876 if (tem && !operand_equal_p (tem, arg1, 0))
10877 return fold_build2_loc (loc, code, type, arg0, tem);
10878 }
10879
10880 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10881 != NULL_TREE)
10882 return tem;
10883
10884 return NULL_TREE;
10885
10886 case TRUTH_ORIF_EXPR:
10887 /* Note that the operands of this must be ints
10888 and their values must be 0 or true.
10889 ("true" is a fixed value perhaps depending on the language.) */
10890 /* If first arg is constant true, return it. */
10891 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10892 return fold_convert_loc (loc, type, arg0);
10893 /* FALLTHRU */
10894 case TRUTH_OR_EXPR:
10895 /* If either arg is constant zero, drop it. */
10896 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10897 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10898 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10899 /* Preserve sequence points. */
10900 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10901 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10902 /* If second arg is constant true, result is true, but we must
10903 evaluate first arg. */
10904 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10905 return omit_one_operand_loc (loc, type, arg1, arg0);
10906 /* Likewise for first arg, but note this only occurs here for
10907 TRUTH_OR_EXPR. */
10908 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10909 return omit_one_operand_loc (loc, type, arg0, arg1);
10910
10911 /* !X || X is always true. */
10912 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10913 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10914 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10915 /* X || !X is always true. */
10916 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10917 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10918 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10919
10920 /* (X && !Y) || (!X && Y) is X ^ Y */
10921 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10922 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10923 {
10924 tree a0, a1, l0, l1, n0, n1;
10925
10926 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10927 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10928
10929 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10930 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10931
10932 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10933 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10934
10935 if ((operand_equal_p (n0, a0, 0)
10936 && operand_equal_p (n1, a1, 0))
10937 || (operand_equal_p (n0, a1, 0)
10938 && operand_equal_p (n1, a0, 0)))
10939 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10940 }
10941
10942 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10943 != NULL_TREE)
10944 return tem;
10945
10946 return NULL_TREE;
10947
10948 case TRUTH_XOR_EXPR:
10949 /* If the second arg is constant zero, drop it. */
10950 if (integer_zerop (arg1))
10951 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10952 /* If the second arg is constant true, this is a logical inversion. */
10953 if (integer_onep (arg1))
10954 {
10955 tem = invert_truthvalue_loc (loc, arg0);
10956 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10957 }
10958 /* Identical arguments cancel to zero. */
10959 if (operand_equal_p (arg0, arg1, 0))
10960 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10961
10962 /* !X ^ X is always true. */
10963 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10964 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10965 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10966
10967 /* X ^ !X is always true. */
10968 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10969 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10970 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10971
10972 return NULL_TREE;
10973
10974 case EQ_EXPR:
10975 case NE_EXPR:
10976 STRIP_NOPS (arg0);
10977 STRIP_NOPS (arg1);
10978
10979 tem = fold_comparison (loc, code, type, op0, op1);
10980 if (tem != NULL_TREE)
10981 return tem;
10982
10983 /* bool_var != 1 becomes !bool_var. */
10984 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10985 && code == NE_EXPR)
10986 return fold_convert_loc (loc, type,
10987 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10988 TREE_TYPE (arg0), arg0));
10989
10990 /* bool_var == 0 becomes !bool_var. */
10991 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10992 && code == EQ_EXPR)
10993 return fold_convert_loc (loc, type,
10994 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10995 TREE_TYPE (arg0), arg0));
10996
10997 /* !exp != 0 becomes !exp */
10998 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10999 && code == NE_EXPR)
11000 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11001
11002 /* If this is an EQ or NE comparison with zero and ARG0 is
11003 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11004 two operations, but the latter can be done in one less insn
11005 on machines that have only two-operand insns or on which a
11006 constant cannot be the first operand. */
11007 if (TREE_CODE (arg0) == BIT_AND_EXPR
11008 && integer_zerop (arg1))
11009 {
11010 tree arg00 = TREE_OPERAND (arg0, 0);
11011 tree arg01 = TREE_OPERAND (arg0, 1);
11012 if (TREE_CODE (arg00) == LSHIFT_EXPR
11013 && integer_onep (TREE_OPERAND (arg00, 0)))
11014 {
11015 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
11016 arg01, TREE_OPERAND (arg00, 1));
11017 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11018 build_int_cst (TREE_TYPE (arg0), 1));
11019 return fold_build2_loc (loc, code, type,
11020 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11021 arg1);
11022 }
11023 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11024 && integer_onep (TREE_OPERAND (arg01, 0)))
11025 {
11026 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
11027 arg00, TREE_OPERAND (arg01, 1));
11028 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11029 build_int_cst (TREE_TYPE (arg0), 1));
11030 return fold_build2_loc (loc, code, type,
11031 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11032 arg1);
11033 }
11034 }
11035
11036 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11037 C1 is a valid shift constant, and C2 is a power of two, i.e.
11038 a single bit. */
11039 if (TREE_CODE (arg0) == BIT_AND_EXPR
11040 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11041 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11042 == INTEGER_CST
11043 && integer_pow2p (TREE_OPERAND (arg0, 1))
11044 && integer_zerop (arg1))
11045 {
11046 tree itype = TREE_TYPE (arg0);
11047 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11048 prec = TYPE_PRECISION (itype);
11049
11050 /* Check for a valid shift count. */
11051 if (wi::ltu_p (wi::to_wide (arg001), prec))
11052 {
11053 tree arg01 = TREE_OPERAND (arg0, 1);
11054 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11055 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11056 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11057 can be rewritten as (X & (C2 << C1)) != 0. */
11058 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11059 {
11060 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
11061 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
11062 return fold_build2_loc (loc, code, type, tem,
11063 fold_convert_loc (loc, itype, arg1));
11064 }
11065 /* Otherwise, for signed (arithmetic) shifts,
11066 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11067 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11068 else if (!TYPE_UNSIGNED (itype))
11069 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11070 arg000, build_int_cst (itype, 0));
11071 /* Otherwise, of unsigned (logical) shifts,
11072 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11073 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11074 else
11075 return omit_one_operand_loc (loc, type,
11076 code == EQ_EXPR ? integer_one_node
11077 : integer_zero_node,
11078 arg000);
11079 }
11080 }
11081
11082 /* If this is a comparison of a field, we may be able to simplify it. */
11083 if ((TREE_CODE (arg0) == COMPONENT_REF
11084 || TREE_CODE (arg0) == BIT_FIELD_REF)
11085 /* Handle the constant case even without -O
11086 to make sure the warnings are given. */
11087 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11088 {
11089 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
11090 if (t1)
11091 return t1;
11092 }
11093
11094 /* Optimize comparisons of strlen vs zero to a compare of the
11095 first character of the string vs zero. To wit,
11096 strlen(ptr) == 0 => *ptr == 0
11097 strlen(ptr) != 0 => *ptr != 0
11098 Other cases should reduce to one of these two (or a constant)
11099 due to the return value of strlen being unsigned. */
11100 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
11101 {
11102 tree fndecl = get_callee_fndecl (arg0);
11103
11104 if (fndecl
11105 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
11106 && call_expr_nargs (arg0) == 1
11107 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
11108 == POINTER_TYPE))
11109 {
11110 tree ptrtype
11111 = build_pointer_type (build_qualified_type (char_type_node,
11112 TYPE_QUAL_CONST));
11113 tree ptr = fold_convert_loc (loc, ptrtype,
11114 CALL_EXPR_ARG (arg0, 0));
11115 tree iref = build_fold_indirect_ref_loc (loc, ptr);
11116 return fold_build2_loc (loc, code, type, iref,
11117 build_int_cst (TREE_TYPE (iref), 0));
11118 }
11119 }
11120
11121 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11122 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11123 if (TREE_CODE (arg0) == RSHIFT_EXPR
11124 && integer_zerop (arg1)
11125 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11126 {
11127 tree arg00 = TREE_OPERAND (arg0, 0);
11128 tree arg01 = TREE_OPERAND (arg0, 1);
11129 tree itype = TREE_TYPE (arg00);
11130 if (wi::to_wide (arg01) == element_precision (itype) - 1)
11131 {
11132 if (TYPE_UNSIGNED (itype))
11133 {
11134 itype = signed_type_for (itype);
11135 arg00 = fold_convert_loc (loc, itype, arg00);
11136 }
11137 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11138 type, arg00, build_zero_cst (itype));
11139 }
11140 }
11141
11142 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11143 (X & C) == 0 when C is a single bit. */
11144 if (TREE_CODE (arg0) == BIT_AND_EXPR
11145 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11146 && integer_zerop (arg1)
11147 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11148 {
11149 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
11150 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11151 TREE_OPERAND (arg0, 1));
11152 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11153 type, tem,
11154 fold_convert_loc (loc, TREE_TYPE (arg0),
11155 arg1));
11156 }
11157
11158 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11159 constant C is a power of two, i.e. a single bit. */
11160 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11161 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11162 && integer_zerop (arg1)
11163 && integer_pow2p (TREE_OPERAND (arg0, 1))
11164 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11165 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11166 {
11167 tree arg00 = TREE_OPERAND (arg0, 0);
11168 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11169 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11170 }
11171
11172 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11173 when is C is a power of two, i.e. a single bit. */
11174 if (TREE_CODE (arg0) == BIT_AND_EXPR
11175 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11176 && integer_zerop (arg1)
11177 && integer_pow2p (TREE_OPERAND (arg0, 1))
11178 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11179 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11180 {
11181 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11182 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
11183 arg000, TREE_OPERAND (arg0, 1));
11184 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11185 tem, build_int_cst (TREE_TYPE (tem), 0));
11186 }
11187
11188 if (integer_zerop (arg1)
11189 && tree_expr_nonzero_p (arg0))
11190 {
11191 tree res = constant_boolean_node (code==NE_EXPR, type);
11192 return omit_one_operand_loc (loc, type, res, arg0);
11193 }
11194
11195 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11196 if (TREE_CODE (arg0) == BIT_AND_EXPR
11197 && TREE_CODE (arg1) == BIT_AND_EXPR)
11198 {
11199 tree arg00 = TREE_OPERAND (arg0, 0);
11200 tree arg01 = TREE_OPERAND (arg0, 1);
11201 tree arg10 = TREE_OPERAND (arg1, 0);
11202 tree arg11 = TREE_OPERAND (arg1, 1);
11203 tree itype = TREE_TYPE (arg0);
11204
11205 if (operand_equal_p (arg01, arg11, 0))
11206 {
11207 tem = fold_convert_loc (loc, itype, arg10);
11208 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11209 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
11210 return fold_build2_loc (loc, code, type, tem,
11211 build_zero_cst (itype));
11212 }
11213 if (operand_equal_p (arg01, arg10, 0))
11214 {
11215 tem = fold_convert_loc (loc, itype, arg11);
11216 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11217 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
11218 return fold_build2_loc (loc, code, type, tem,
11219 build_zero_cst (itype));
11220 }
11221 if (operand_equal_p (arg00, arg11, 0))
11222 {
11223 tem = fold_convert_loc (loc, itype, arg10);
11224 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
11225 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
11226 return fold_build2_loc (loc, code, type, tem,
11227 build_zero_cst (itype));
11228 }
11229 if (operand_equal_p (arg00, arg10, 0))
11230 {
11231 tem = fold_convert_loc (loc, itype, arg11);
11232 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
11233 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
11234 return fold_build2_loc (loc, code, type, tem,
11235 build_zero_cst (itype));
11236 }
11237 }
11238
11239 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11240 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11241 {
11242 tree arg00 = TREE_OPERAND (arg0, 0);
11243 tree arg01 = TREE_OPERAND (arg0, 1);
11244 tree arg10 = TREE_OPERAND (arg1, 0);
11245 tree arg11 = TREE_OPERAND (arg1, 1);
11246 tree itype = TREE_TYPE (arg0);
11247
11248 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11249 operand_equal_p guarantees no side-effects so we don't need
11250 to use omit_one_operand on Z. */
11251 if (operand_equal_p (arg01, arg11, 0))
11252 return fold_build2_loc (loc, code, type, arg00,
11253 fold_convert_loc (loc, TREE_TYPE (arg00),
11254 arg10));
11255 if (operand_equal_p (arg01, arg10, 0))
11256 return fold_build2_loc (loc, code, type, arg00,
11257 fold_convert_loc (loc, TREE_TYPE (arg00),
11258 arg11));
11259 if (operand_equal_p (arg00, arg11, 0))
11260 return fold_build2_loc (loc, code, type, arg01,
11261 fold_convert_loc (loc, TREE_TYPE (arg01),
11262 arg10));
11263 if (operand_equal_p (arg00, arg10, 0))
11264 return fold_build2_loc (loc, code, type, arg01,
11265 fold_convert_loc (loc, TREE_TYPE (arg01),
11266 arg11));
11267
11268 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11269 if (TREE_CODE (arg01) == INTEGER_CST
11270 && TREE_CODE (arg11) == INTEGER_CST)
11271 {
11272 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11273 fold_convert_loc (loc, itype, arg11));
11274 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11275 return fold_build2_loc (loc, code, type, tem,
11276 fold_convert_loc (loc, itype, arg10));
11277 }
11278 }
11279
11280 /* Attempt to simplify equality/inequality comparisons of complex
11281 values. Only lower the comparison if the result is known or
11282 can be simplified to a single scalar comparison. */
11283 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11284 || TREE_CODE (arg0) == COMPLEX_CST)
11285 && (TREE_CODE (arg1) == COMPLEX_EXPR
11286 || TREE_CODE (arg1) == COMPLEX_CST))
11287 {
11288 tree real0, imag0, real1, imag1;
11289 tree rcond, icond;
11290
11291 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11292 {
11293 real0 = TREE_OPERAND (arg0, 0);
11294 imag0 = TREE_OPERAND (arg0, 1);
11295 }
11296 else
11297 {
11298 real0 = TREE_REALPART (arg0);
11299 imag0 = TREE_IMAGPART (arg0);
11300 }
11301
11302 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11303 {
11304 real1 = TREE_OPERAND (arg1, 0);
11305 imag1 = TREE_OPERAND (arg1, 1);
11306 }
11307 else
11308 {
11309 real1 = TREE_REALPART (arg1);
11310 imag1 = TREE_IMAGPART (arg1);
11311 }
11312
11313 rcond = fold_binary_loc (loc, code, type, real0, real1);
11314 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11315 {
11316 if (integer_zerop (rcond))
11317 {
11318 if (code == EQ_EXPR)
11319 return omit_two_operands_loc (loc, type, boolean_false_node,
11320 imag0, imag1);
11321 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11322 }
11323 else
11324 {
11325 if (code == NE_EXPR)
11326 return omit_two_operands_loc (loc, type, boolean_true_node,
11327 imag0, imag1);
11328 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11329 }
11330 }
11331
11332 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11333 if (icond && TREE_CODE (icond) == INTEGER_CST)
11334 {
11335 if (integer_zerop (icond))
11336 {
11337 if (code == EQ_EXPR)
11338 return omit_two_operands_loc (loc, type, boolean_false_node,
11339 real0, real1);
11340 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11341 }
11342 else
11343 {
11344 if (code == NE_EXPR)
11345 return omit_two_operands_loc (loc, type, boolean_true_node,
11346 real0, real1);
11347 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11348 }
11349 }
11350 }
11351
11352 return NULL_TREE;
11353
11354 case LT_EXPR:
11355 case GT_EXPR:
11356 case LE_EXPR:
11357 case GE_EXPR:
11358 tem = fold_comparison (loc, code, type, op0, op1);
11359 if (tem != NULL_TREE)
11360 return tem;
11361
11362 /* Transform comparisons of the form X +- C CMP X. */
11363 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11364 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11365 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11366 && !HONOR_SNANS (arg0))
11367 {
11368 tree arg01 = TREE_OPERAND (arg0, 1);
11369 enum tree_code code0 = TREE_CODE (arg0);
11370 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11371
11372 /* (X - c) > X becomes false. */
11373 if (code == GT_EXPR
11374 && ((code0 == MINUS_EXPR && is_positive >= 0)
11375 || (code0 == PLUS_EXPR && is_positive <= 0)))
11376 return constant_boolean_node (0, type);
11377
11378 /* Likewise (X + c) < X becomes false. */
11379 if (code == LT_EXPR
11380 && ((code0 == PLUS_EXPR && is_positive >= 0)
11381 || (code0 == MINUS_EXPR && is_positive <= 0)))
11382 return constant_boolean_node (0, type);
11383
11384 /* Convert (X - c) <= X to true. */
11385 if (!HONOR_NANS (arg1)
11386 && code == LE_EXPR
11387 && ((code0 == MINUS_EXPR && is_positive >= 0)
11388 || (code0 == PLUS_EXPR && is_positive <= 0)))
11389 return constant_boolean_node (1, type);
11390
11391 /* Convert (X + c) >= X to true. */
11392 if (!HONOR_NANS (arg1)
11393 && code == GE_EXPR
11394 && ((code0 == PLUS_EXPR && is_positive >= 0)
11395 || (code0 == MINUS_EXPR && is_positive <= 0)))
11396 return constant_boolean_node (1, type);
11397 }
11398
11399 /* If we are comparing an ABS_EXPR with a constant, we can
11400 convert all the cases into explicit comparisons, but they may
11401 well not be faster than doing the ABS and one comparison.
11402 But ABS (X) <= C is a range comparison, which becomes a subtraction
11403 and a comparison, and is probably faster. */
11404 if (code == LE_EXPR
11405 && TREE_CODE (arg1) == INTEGER_CST
11406 && TREE_CODE (arg0) == ABS_EXPR
11407 && ! TREE_SIDE_EFFECTS (arg0)
11408 && (tem = negate_expr (arg1)) != 0
11409 && TREE_CODE (tem) == INTEGER_CST
11410 && !TREE_OVERFLOW (tem))
11411 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11412 build2 (GE_EXPR, type,
11413 TREE_OPERAND (arg0, 0), tem),
11414 build2 (LE_EXPR, type,
11415 TREE_OPERAND (arg0, 0), arg1));
11416
11417 /* Convert ABS_EXPR<x> >= 0 to true. */
11418 strict_overflow_p = false;
11419 if (code == GE_EXPR
11420 && (integer_zerop (arg1)
11421 || (! HONOR_NANS (arg0)
11422 && real_zerop (arg1)))
11423 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11424 {
11425 if (strict_overflow_p)
11426 fold_overflow_warning (("assuming signed overflow does not occur "
11427 "when simplifying comparison of "
11428 "absolute value and zero"),
11429 WARN_STRICT_OVERFLOW_CONDITIONAL);
11430 return omit_one_operand_loc (loc, type,
11431 constant_boolean_node (true, type),
11432 arg0);
11433 }
11434
11435 /* Convert ABS_EXPR<x> < 0 to false. */
11436 strict_overflow_p = false;
11437 if (code == LT_EXPR
11438 && (integer_zerop (arg1) || real_zerop (arg1))
11439 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11440 {
11441 if (strict_overflow_p)
11442 fold_overflow_warning (("assuming signed overflow does not occur "
11443 "when simplifying comparison of "
11444 "absolute value and zero"),
11445 WARN_STRICT_OVERFLOW_CONDITIONAL);
11446 return omit_one_operand_loc (loc, type,
11447 constant_boolean_node (false, type),
11448 arg0);
11449 }
11450
11451 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11452 and similarly for >= into !=. */
11453 if ((code == LT_EXPR || code == GE_EXPR)
11454 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11455 && TREE_CODE (arg1) == LSHIFT_EXPR
11456 && integer_onep (TREE_OPERAND (arg1, 0)))
11457 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11458 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11459 TREE_OPERAND (arg1, 1)),
11460 build_zero_cst (TREE_TYPE (arg0)));
11461
11462 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11463 otherwise Y might be >= # of bits in X's type and thus e.g.
11464 (unsigned char) (1 << Y) for Y 15 might be 0.
11465 If the cast is widening, then 1 << Y should have unsigned type,
11466 otherwise if Y is number of bits in the signed shift type minus 1,
11467 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11468 31 might be 0xffffffff80000000. */
11469 if ((code == LT_EXPR || code == GE_EXPR)
11470 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11471 && CONVERT_EXPR_P (arg1)
11472 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11473 && (element_precision (TREE_TYPE (arg1))
11474 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11475 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11476 || (element_precision (TREE_TYPE (arg1))
11477 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11478 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11479 {
11480 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11481 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11482 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11483 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11484 build_zero_cst (TREE_TYPE (arg0)));
11485 }
11486
11487 return NULL_TREE;
11488
11489 case UNORDERED_EXPR:
11490 case ORDERED_EXPR:
11491 case UNLT_EXPR:
11492 case UNLE_EXPR:
11493 case UNGT_EXPR:
11494 case UNGE_EXPR:
11495 case UNEQ_EXPR:
11496 case LTGT_EXPR:
11497 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11498 {
11499 tree targ0 = strip_float_extensions (arg0);
11500 tree targ1 = strip_float_extensions (arg1);
11501 tree newtype = TREE_TYPE (targ0);
11502
11503 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11504 newtype = TREE_TYPE (targ1);
11505
11506 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11507 return fold_build2_loc (loc, code, type,
11508 fold_convert_loc (loc, newtype, targ0),
11509 fold_convert_loc (loc, newtype, targ1));
11510 }
11511
11512 return NULL_TREE;
11513
11514 case COMPOUND_EXPR:
11515 /* When pedantic, a compound expression can be neither an lvalue
11516 nor an integer constant expression. */
11517 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11518 return NULL_TREE;
11519 /* Don't let (0, 0) be null pointer constant. */
11520 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11521 : fold_convert_loc (loc, type, arg1);
11522 return pedantic_non_lvalue_loc (loc, tem);
11523
11524 case ASSERT_EXPR:
11525 /* An ASSERT_EXPR should never be passed to fold_binary. */
11526 gcc_unreachable ();
11527
11528 default:
11529 return NULL_TREE;
11530 } /* switch (code) */
11531 }
11532
11533 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11534 ((A & N) + B) & M -> (A + B) & M
11535 Similarly if (N & M) == 0,
11536 ((A | N) + B) & M -> (A + B) & M
11537 and for - instead of + (or unary - instead of +)
11538 and/or ^ instead of |.
11539 If B is constant and (B & M) == 0, fold into A & M.
11540
11541 This function is a helper for match.pd patterns. Return non-NULL
11542 type in which the simplified operation should be performed only
11543 if any optimization is possible.
11544
11545 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
11546 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
11547 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
11548 +/-. */
11549 tree
11550 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
11551 tree arg00, enum tree_code code00, tree arg000, tree arg001,
11552 tree arg01, enum tree_code code01, tree arg010, tree arg011,
11553 tree *pmop)
11554 {
11555 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
11556 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
11557 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11558 if (~cst1 == 0
11559 || (cst1 & (cst1 + 1)) != 0
11560 || !INTEGRAL_TYPE_P (type)
11561 || (!TYPE_OVERFLOW_WRAPS (type)
11562 && TREE_CODE (type) != INTEGER_TYPE)
11563 || (wi::max_value (type) & cst1) != cst1)
11564 return NULL_TREE;
11565
11566 enum tree_code codes[2] = { code00, code01 };
11567 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
11568 int which = 0;
11569 wide_int cst0;
11570
11571 /* Now we know that arg0 is (C + D) or (C - D) or -C and
11572 arg1 (M) is == (1LL << cst) - 1.
11573 Store C into PMOP[0] and D into PMOP[1]. */
11574 pmop[0] = arg00;
11575 pmop[1] = arg01;
11576 which = code != NEGATE_EXPR;
11577
11578 for (; which >= 0; which--)
11579 switch (codes[which])
11580 {
11581 case BIT_AND_EXPR:
11582 case BIT_IOR_EXPR:
11583 case BIT_XOR_EXPR:
11584 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
11585 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
11586 if (codes[which] == BIT_AND_EXPR)
11587 {
11588 if (cst0 != cst1)
11589 break;
11590 }
11591 else if (cst0 != 0)
11592 break;
11593 /* If C or D is of the form (A & N) where
11594 (N & M) == M, or of the form (A | N) or
11595 (A ^ N) where (N & M) == 0, replace it with A. */
11596 pmop[which] = arg0xx[2 * which];
11597 break;
11598 case ERROR_MARK:
11599 if (TREE_CODE (pmop[which]) != INTEGER_CST)
11600 break;
11601 /* If C or D is a N where (N & M) == 0, it can be
11602 omitted (replaced with 0). */
11603 if ((code == PLUS_EXPR
11604 || (code == MINUS_EXPR && which == 0))
11605 && (cst1 & wi::to_wide (pmop[which])) == 0)
11606 pmop[which] = build_int_cst (type, 0);
11607 /* Similarly, with C - N where (-N & M) == 0. */
11608 if (code == MINUS_EXPR
11609 && which == 1
11610 && (cst1 & -wi::to_wide (pmop[which])) == 0)
11611 pmop[which] = build_int_cst (type, 0);
11612 break;
11613 default:
11614 gcc_unreachable ();
11615 }
11616
11617 /* Only build anything new if we optimized one or both arguments above. */
11618 if (pmop[0] == arg00 && pmop[1] == arg01)
11619 return NULL_TREE;
11620
11621 if (TYPE_OVERFLOW_WRAPS (type))
11622 return type;
11623 else
11624 return unsigned_type_for (type);
11625 }
11626
11627 /* Used by contains_label_[p1]. */
11628
11629 struct contains_label_data
11630 {
11631 hash_set<tree> *pset;
11632 bool inside_switch_p;
11633 };
11634
11635 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11636 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
11637 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
11638
11639 static tree
11640 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
11641 {
11642 contains_label_data *d = (contains_label_data *) data;
11643 switch (TREE_CODE (*tp))
11644 {
11645 case LABEL_EXPR:
11646 return *tp;
11647
11648 case CASE_LABEL_EXPR:
11649 if (!d->inside_switch_p)
11650 return *tp;
11651 return NULL_TREE;
11652
11653 case SWITCH_EXPR:
11654 if (!d->inside_switch_p)
11655 {
11656 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
11657 return *tp;
11658 d->inside_switch_p = true;
11659 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
11660 return *tp;
11661 d->inside_switch_p = false;
11662 *walk_subtrees = 0;
11663 }
11664 return NULL_TREE;
11665
11666 case GOTO_EXPR:
11667 *walk_subtrees = 0;
11668 return NULL_TREE;
11669
11670 default:
11671 return NULL_TREE;
11672 }
11673 }
11674
11675 /* Return whether the sub-tree ST contains a label which is accessible from
11676 outside the sub-tree. */
11677
11678 static bool
11679 contains_label_p (tree st)
11680 {
11681 hash_set<tree> pset;
11682 contains_label_data data = { &pset, false };
11683 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
11684 }
11685
11686 /* Fold a ternary expression of code CODE and type TYPE with operands
11687 OP0, OP1, and OP2. Return the folded expression if folding is
11688 successful. Otherwise, return NULL_TREE. */
11689
11690 tree
11691 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11692 tree op0, tree op1, tree op2)
11693 {
11694 tree tem;
11695 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11696 enum tree_code_class kind = TREE_CODE_CLASS (code);
11697
11698 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11699 && TREE_CODE_LENGTH (code) == 3);
11700
11701 /* If this is a commutative operation, and OP0 is a constant, move it
11702 to OP1 to reduce the number of tests below. */
11703 if (commutative_ternary_tree_code (code)
11704 && tree_swap_operands_p (op0, op1))
11705 return fold_build3_loc (loc, code, type, op1, op0, op2);
11706
11707 tem = generic_simplify (loc, code, type, op0, op1, op2);
11708 if (tem)
11709 return tem;
11710
11711 /* Strip any conversions that don't change the mode. This is safe
11712 for every expression, except for a comparison expression because
11713 its signedness is derived from its operands. So, in the latter
11714 case, only strip conversions that don't change the signedness.
11715
11716 Note that this is done as an internal manipulation within the
11717 constant folder, in order to find the simplest representation of
11718 the arguments so that their form can be studied. In any cases,
11719 the appropriate type conversions should be put back in the tree
11720 that will get out of the constant folder. */
11721 if (op0)
11722 {
11723 arg0 = op0;
11724 STRIP_NOPS (arg0);
11725 }
11726
11727 if (op1)
11728 {
11729 arg1 = op1;
11730 STRIP_NOPS (arg1);
11731 }
11732
11733 if (op2)
11734 {
11735 arg2 = op2;
11736 STRIP_NOPS (arg2);
11737 }
11738
11739 switch (code)
11740 {
11741 case COMPONENT_REF:
11742 if (TREE_CODE (arg0) == CONSTRUCTOR
11743 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11744 {
11745 unsigned HOST_WIDE_INT idx;
11746 tree field, value;
11747 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11748 if (field == arg1)
11749 return value;
11750 }
11751 return NULL_TREE;
11752
11753 case COND_EXPR:
11754 case VEC_COND_EXPR:
11755 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11756 so all simple results must be passed through pedantic_non_lvalue. */
11757 if (TREE_CODE (arg0) == INTEGER_CST)
11758 {
11759 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11760 tem = integer_zerop (arg0) ? op2 : op1;
11761 /* Only optimize constant conditions when the selected branch
11762 has the same type as the COND_EXPR. This avoids optimizing
11763 away "c ? x : throw", where the throw has a void type.
11764 Avoid throwing away that operand which contains label. */
11765 if ((!TREE_SIDE_EFFECTS (unused_op)
11766 || !contains_label_p (unused_op))
11767 && (! VOID_TYPE_P (TREE_TYPE (tem))
11768 || VOID_TYPE_P (type)))
11769 return pedantic_non_lvalue_loc (loc, tem);
11770 return NULL_TREE;
11771 }
11772 else if (TREE_CODE (arg0) == VECTOR_CST)
11773 {
11774 unsigned HOST_WIDE_INT nelts;
11775 if ((TREE_CODE (arg1) == VECTOR_CST
11776 || TREE_CODE (arg1) == CONSTRUCTOR)
11777 && (TREE_CODE (arg2) == VECTOR_CST
11778 || TREE_CODE (arg2) == CONSTRUCTOR)
11779 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
11780 {
11781 vec_perm_builder sel (nelts, nelts, 1);
11782 for (unsigned int i = 0; i < nelts; i++)
11783 {
11784 tree val = VECTOR_CST_ELT (arg0, i);
11785 if (integer_all_onesp (val))
11786 sel.quick_push (i);
11787 else if (integer_zerop (val))
11788 sel.quick_push (nelts + i);
11789 else /* Currently unreachable. */
11790 return NULL_TREE;
11791 }
11792 vec_perm_indices indices (sel, 2, nelts);
11793 tree t = fold_vec_perm (type, arg1, arg2, indices);
11794 if (t != NULL_TREE)
11795 return t;
11796 }
11797 }
11798
11799 /* If we have A op B ? A : C, we may be able to convert this to a
11800 simpler expression, depending on the operation and the values
11801 of B and C. Signed zeros prevent all of these transformations,
11802 for reasons given above each one.
11803
11804 Also try swapping the arguments and inverting the conditional. */
11805 if (COMPARISON_CLASS_P (arg0)
11806 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
11807 && !HONOR_SIGNED_ZEROS (element_mode (op1)))
11808 {
11809 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11810 if (tem)
11811 return tem;
11812 }
11813
11814 if (COMPARISON_CLASS_P (arg0)
11815 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
11816 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11817 {
11818 location_t loc0 = expr_location_or (arg0, loc);
11819 tem = fold_invert_truthvalue (loc0, arg0);
11820 if (tem && COMPARISON_CLASS_P (tem))
11821 {
11822 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11823 if (tem)
11824 return tem;
11825 }
11826 }
11827
11828 /* If the second operand is simpler than the third, swap them
11829 since that produces better jump optimization results. */
11830 if (truth_value_p (TREE_CODE (arg0))
11831 && tree_swap_operands_p (op1, op2))
11832 {
11833 location_t loc0 = expr_location_or (arg0, loc);
11834 /* See if this can be inverted. If it can't, possibly because
11835 it was a floating-point inequality comparison, don't do
11836 anything. */
11837 tem = fold_invert_truthvalue (loc0, arg0);
11838 if (tem)
11839 return fold_build3_loc (loc, code, type, tem, op2, op1);
11840 }
11841
11842 /* Convert A ? 1 : 0 to simply A. */
11843 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11844 : (integer_onep (op1)
11845 && !VECTOR_TYPE_P (type)))
11846 && integer_zerop (op2)
11847 /* If we try to convert OP0 to our type, the
11848 call to fold will try to move the conversion inside
11849 a COND, which will recurse. In that case, the COND_EXPR
11850 is probably the best choice, so leave it alone. */
11851 && type == TREE_TYPE (arg0))
11852 return pedantic_non_lvalue_loc (loc, arg0);
11853
11854 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11855 over COND_EXPR in cases such as floating point comparisons. */
11856 if (integer_zerop (op1)
11857 && code == COND_EXPR
11858 && integer_onep (op2)
11859 && !VECTOR_TYPE_P (type)
11860 && truth_value_p (TREE_CODE (arg0)))
11861 return pedantic_non_lvalue_loc (loc,
11862 fold_convert_loc (loc, type,
11863 invert_truthvalue_loc (loc,
11864 arg0)));
11865
11866 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11867 if (TREE_CODE (arg0) == LT_EXPR
11868 && integer_zerop (TREE_OPERAND (arg0, 1))
11869 && integer_zerop (op2)
11870 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11871 {
11872 /* sign_bit_p looks through both zero and sign extensions,
11873 but for this optimization only sign extensions are
11874 usable. */
11875 tree tem2 = TREE_OPERAND (arg0, 0);
11876 while (tem != tem2)
11877 {
11878 if (TREE_CODE (tem2) != NOP_EXPR
11879 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11880 {
11881 tem = NULL_TREE;
11882 break;
11883 }
11884 tem2 = TREE_OPERAND (tem2, 0);
11885 }
11886 /* sign_bit_p only checks ARG1 bits within A's precision.
11887 If <sign bit of A> has wider type than A, bits outside
11888 of A's precision in <sign bit of A> need to be checked.
11889 If they are all 0, this optimization needs to be done
11890 in unsigned A's type, if they are all 1 in signed A's type,
11891 otherwise this can't be done. */
11892 if (tem
11893 && TYPE_PRECISION (TREE_TYPE (tem))
11894 < TYPE_PRECISION (TREE_TYPE (arg1))
11895 && TYPE_PRECISION (TREE_TYPE (tem))
11896 < TYPE_PRECISION (type))
11897 {
11898 int inner_width, outer_width;
11899 tree tem_type;
11900
11901 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11902 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11903 if (outer_width > TYPE_PRECISION (type))
11904 outer_width = TYPE_PRECISION (type);
11905
11906 wide_int mask = wi::shifted_mask
11907 (inner_width, outer_width - inner_width, false,
11908 TYPE_PRECISION (TREE_TYPE (arg1)));
11909
11910 wide_int common = mask & wi::to_wide (arg1);
11911 if (common == mask)
11912 {
11913 tem_type = signed_type_for (TREE_TYPE (tem));
11914 tem = fold_convert_loc (loc, tem_type, tem);
11915 }
11916 else if (common == 0)
11917 {
11918 tem_type = unsigned_type_for (TREE_TYPE (tem));
11919 tem = fold_convert_loc (loc, tem_type, tem);
11920 }
11921 else
11922 tem = NULL;
11923 }
11924
11925 if (tem)
11926 return
11927 fold_convert_loc (loc, type,
11928 fold_build2_loc (loc, BIT_AND_EXPR,
11929 TREE_TYPE (tem), tem,
11930 fold_convert_loc (loc,
11931 TREE_TYPE (tem),
11932 arg1)));
11933 }
11934
11935 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11936 already handled above. */
11937 if (TREE_CODE (arg0) == BIT_AND_EXPR
11938 && integer_onep (TREE_OPERAND (arg0, 1))
11939 && integer_zerop (op2)
11940 && integer_pow2p (arg1))
11941 {
11942 tree tem = TREE_OPERAND (arg0, 0);
11943 STRIP_NOPS (tem);
11944 if (TREE_CODE (tem) == RSHIFT_EXPR
11945 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11946 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
11947 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
11948 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11949 fold_convert_loc (loc, type,
11950 TREE_OPERAND (tem, 0)),
11951 op1);
11952 }
11953
11954 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11955 is probably obsolete because the first operand should be a
11956 truth value (that's why we have the two cases above), but let's
11957 leave it in until we can confirm this for all front-ends. */
11958 if (integer_zerop (op2)
11959 && TREE_CODE (arg0) == NE_EXPR
11960 && integer_zerop (TREE_OPERAND (arg0, 1))
11961 && integer_pow2p (arg1)
11962 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11963 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11964 arg1, OEP_ONLY_CONST)
11965 /* operand_equal_p compares just value, not precision, so e.g.
11966 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
11967 second operand 32-bit -128, which is not a power of two (or vice
11968 versa. */
11969 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
11970 return pedantic_non_lvalue_loc (loc,
11971 fold_convert_loc (loc, type,
11972 TREE_OPERAND (arg0,
11973 0)));
11974
11975 /* Disable the transformations below for vectors, since
11976 fold_binary_op_with_conditional_arg may undo them immediately,
11977 yielding an infinite loop. */
11978 if (code == VEC_COND_EXPR)
11979 return NULL_TREE;
11980
11981 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11982 if (integer_zerop (op2)
11983 && truth_value_p (TREE_CODE (arg0))
11984 && truth_value_p (TREE_CODE (arg1))
11985 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11986 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11987 : TRUTH_ANDIF_EXPR,
11988 type, fold_convert_loc (loc, type, arg0), op1);
11989
11990 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11991 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11992 && truth_value_p (TREE_CODE (arg0))
11993 && truth_value_p (TREE_CODE (arg1))
11994 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11995 {
11996 location_t loc0 = expr_location_or (arg0, loc);
11997 /* Only perform transformation if ARG0 is easily inverted. */
11998 tem = fold_invert_truthvalue (loc0, arg0);
11999 if (tem)
12000 return fold_build2_loc (loc, code == VEC_COND_EXPR
12001 ? BIT_IOR_EXPR
12002 : TRUTH_ORIF_EXPR,
12003 type, fold_convert_loc (loc, type, tem),
12004 op1);
12005 }
12006
12007 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12008 if (integer_zerop (arg1)
12009 && truth_value_p (TREE_CODE (arg0))
12010 && truth_value_p (TREE_CODE (op2))
12011 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12012 {
12013 location_t loc0 = expr_location_or (arg0, loc);
12014 /* Only perform transformation if ARG0 is easily inverted. */
12015 tem = fold_invert_truthvalue (loc0, arg0);
12016 if (tem)
12017 return fold_build2_loc (loc, code == VEC_COND_EXPR
12018 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
12019 type, fold_convert_loc (loc, type, tem),
12020 op2);
12021 }
12022
12023 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12024 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
12025 && truth_value_p (TREE_CODE (arg0))
12026 && truth_value_p (TREE_CODE (op2))
12027 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12028 return fold_build2_loc (loc, code == VEC_COND_EXPR
12029 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
12030 type, fold_convert_loc (loc, type, arg0), op2);
12031
12032 return NULL_TREE;
12033
12034 case CALL_EXPR:
12035 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12036 of fold_ternary on them. */
12037 gcc_unreachable ();
12038
12039 case BIT_FIELD_REF:
12040 if (TREE_CODE (arg0) == VECTOR_CST
12041 && (type == TREE_TYPE (TREE_TYPE (arg0))
12042 || (VECTOR_TYPE_P (type)
12043 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
12044 && tree_fits_uhwi_p (op1)
12045 && tree_fits_uhwi_p (op2))
12046 {
12047 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
12048 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
12049 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
12050 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
12051
12052 if (n != 0
12053 && (idx % width) == 0
12054 && (n % width) == 0
12055 && known_le ((idx + n) / width,
12056 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
12057 {
12058 idx = idx / width;
12059 n = n / width;
12060
12061 if (TREE_CODE (arg0) == VECTOR_CST)
12062 {
12063 if (n == 1)
12064 {
12065 tem = VECTOR_CST_ELT (arg0, idx);
12066 if (VECTOR_TYPE_P (type))
12067 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
12068 return tem;
12069 }
12070
12071 tree_vector_builder vals (type, n, 1);
12072 for (unsigned i = 0; i < n; ++i)
12073 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
12074 return vals.build ();
12075 }
12076 }
12077 }
12078
12079 /* On constants we can use native encode/interpret to constant
12080 fold (nearly) all BIT_FIELD_REFs. */
12081 if (CONSTANT_CLASS_P (arg0)
12082 && can_native_interpret_type_p (type)
12083 && BITS_PER_UNIT == 8
12084 && tree_fits_uhwi_p (op1)
12085 && tree_fits_uhwi_p (op2))
12086 {
12087 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12088 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
12089 /* Limit us to a reasonable amount of work. To relax the
12090 other limitations we need bit-shifting of the buffer
12091 and rounding up the size. */
12092 if (bitpos % BITS_PER_UNIT == 0
12093 && bitsize % BITS_PER_UNIT == 0
12094 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
12095 {
12096 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
12097 unsigned HOST_WIDE_INT len
12098 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
12099 bitpos / BITS_PER_UNIT);
12100 if (len > 0
12101 && len * BITS_PER_UNIT >= bitsize)
12102 {
12103 tree v = native_interpret_expr (type, b,
12104 bitsize / BITS_PER_UNIT);
12105 if (v)
12106 return v;
12107 }
12108 }
12109 }
12110
12111 return NULL_TREE;
12112
12113 case VEC_PERM_EXPR:
12114 /* Perform constant folding of BIT_INSERT_EXPR. */
12115 if (TREE_CODE (arg2) == VECTOR_CST
12116 && TREE_CODE (op0) == VECTOR_CST
12117 && TREE_CODE (op1) == VECTOR_CST)
12118 {
12119 /* Build a vector of integers from the tree mask. */
12120 vec_perm_builder builder;
12121 if (!tree_to_vec_perm_builder (&builder, arg2))
12122 return NULL_TREE;
12123
12124 /* Create a vec_perm_indices for the integer vector. */
12125 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
12126 bool single_arg = (op0 == op1);
12127 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
12128 return fold_vec_perm (type, op0, op1, sel);
12129 }
12130 return NULL_TREE;
12131
12132 case BIT_INSERT_EXPR:
12133 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
12134 if (TREE_CODE (arg0) == INTEGER_CST
12135 && TREE_CODE (arg1) == INTEGER_CST)
12136 {
12137 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12138 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
12139 wide_int tem = (wi::to_wide (arg0)
12140 & wi::shifted_mask (bitpos, bitsize, true,
12141 TYPE_PRECISION (type)));
12142 wide_int tem2
12143 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
12144 bitsize), bitpos);
12145 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
12146 }
12147 else if (TREE_CODE (arg0) == VECTOR_CST
12148 && CONSTANT_CLASS_P (arg1)
12149 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
12150 TREE_TYPE (arg1)))
12151 {
12152 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12153 unsigned HOST_WIDE_INT elsize
12154 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
12155 if (bitpos % elsize == 0)
12156 {
12157 unsigned k = bitpos / elsize;
12158 unsigned HOST_WIDE_INT nelts;
12159 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
12160 return arg0;
12161 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
12162 {
12163 tree_vector_builder elts (type, nelts, 1);
12164 elts.quick_grow (nelts);
12165 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
12166 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
12167 return elts.build ();
12168 }
12169 }
12170 }
12171 return NULL_TREE;
12172
12173 default:
12174 return NULL_TREE;
12175 } /* switch (code) */
12176 }
12177
12178 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
12179 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
12180 constructor element index of the value returned. If the element is
12181 not found NULL_TREE is returned and *CTOR_IDX is updated to
12182 the index of the element after the ACCESS_INDEX position (which
12183 may be outside of the CTOR array). */
12184
12185 tree
12186 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
12187 unsigned *ctor_idx)
12188 {
12189 tree index_type = NULL_TREE;
12190 signop index_sgn = UNSIGNED;
12191 offset_int low_bound = 0;
12192
12193 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
12194 {
12195 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
12196 if (domain_type && TYPE_MIN_VALUE (domain_type))
12197 {
12198 /* Static constructors for variably sized objects makes no sense. */
12199 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
12200 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
12201 /* ??? When it is obvious that the range is signed, treat it so. */
12202 if (TYPE_UNSIGNED (index_type)
12203 && TYPE_MAX_VALUE (domain_type)
12204 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
12205 TYPE_MIN_VALUE (domain_type)))
12206 {
12207 index_sgn = SIGNED;
12208 low_bound
12209 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
12210 SIGNED);
12211 }
12212 else
12213 {
12214 index_sgn = TYPE_SIGN (index_type);
12215 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
12216 }
12217 }
12218 }
12219
12220 if (index_type)
12221 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
12222 index_sgn);
12223
12224 offset_int index = low_bound;
12225 if (index_type)
12226 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
12227
12228 offset_int max_index = index;
12229 unsigned cnt;
12230 tree cfield, cval;
12231 bool first_p = true;
12232
12233 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
12234 {
12235 /* Array constructor might explicitly set index, or specify a range,
12236 or leave index NULL meaning that it is next index after previous
12237 one. */
12238 if (cfield)
12239 {
12240 if (TREE_CODE (cfield) == INTEGER_CST)
12241 max_index = index
12242 = offset_int::from (wi::to_wide (cfield), index_sgn);
12243 else
12244 {
12245 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
12246 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
12247 index_sgn);
12248 max_index
12249 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
12250 index_sgn);
12251 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
12252 }
12253 }
12254 else if (!first_p)
12255 {
12256 index = max_index + 1;
12257 if (index_type)
12258 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
12259 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
12260 max_index = index;
12261 }
12262 else
12263 first_p = false;
12264
12265 /* Do we have match? */
12266 if (wi::cmp (access_index, index, index_sgn) >= 0)
12267 {
12268 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
12269 {
12270 if (ctor_idx)
12271 *ctor_idx = cnt;
12272 return cval;
12273 }
12274 }
12275 else if (in_gimple_form)
12276 /* We're past the element we search for. Note during parsing
12277 the elements might not be sorted.
12278 ??? We should use a binary search and a flag on the
12279 CONSTRUCTOR as to whether elements are sorted in declaration
12280 order. */
12281 break;
12282 }
12283 if (ctor_idx)
12284 *ctor_idx = cnt;
12285 return NULL_TREE;
12286 }
12287
12288 /* Perform constant folding and related simplification of EXPR.
12289 The related simplifications include x*1 => x, x*0 => 0, etc.,
12290 and application of the associative law.
12291 NOP_EXPR conversions may be removed freely (as long as we
12292 are careful not to change the type of the overall expression).
12293 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12294 but we can constant-fold them if they have constant operands. */
12295
12296 #ifdef ENABLE_FOLD_CHECKING
12297 # define fold(x) fold_1 (x)
12298 static tree fold_1 (tree);
12299 static
12300 #endif
12301 tree
12302 fold (tree expr)
12303 {
12304 const tree t = expr;
12305 enum tree_code code = TREE_CODE (t);
12306 enum tree_code_class kind = TREE_CODE_CLASS (code);
12307 tree tem;
12308 location_t loc = EXPR_LOCATION (expr);
12309
12310 /* Return right away if a constant. */
12311 if (kind == tcc_constant)
12312 return t;
12313
12314 /* CALL_EXPR-like objects with variable numbers of operands are
12315 treated specially. */
12316 if (kind == tcc_vl_exp)
12317 {
12318 if (code == CALL_EXPR)
12319 {
12320 tem = fold_call_expr (loc, expr, false);
12321 return tem ? tem : expr;
12322 }
12323 return expr;
12324 }
12325
12326 if (IS_EXPR_CODE_CLASS (kind))
12327 {
12328 tree type = TREE_TYPE (t);
12329 tree op0, op1, op2;
12330
12331 switch (TREE_CODE_LENGTH (code))
12332 {
12333 case 1:
12334 op0 = TREE_OPERAND (t, 0);
12335 tem = fold_unary_loc (loc, code, type, op0);
12336 return tem ? tem : expr;
12337 case 2:
12338 op0 = TREE_OPERAND (t, 0);
12339 op1 = TREE_OPERAND (t, 1);
12340 tem = fold_binary_loc (loc, code, type, op0, op1);
12341 return tem ? tem : expr;
12342 case 3:
12343 op0 = TREE_OPERAND (t, 0);
12344 op1 = TREE_OPERAND (t, 1);
12345 op2 = TREE_OPERAND (t, 2);
12346 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12347 return tem ? tem : expr;
12348 default:
12349 break;
12350 }
12351 }
12352
12353 switch (code)
12354 {
12355 case ARRAY_REF:
12356 {
12357 tree op0 = TREE_OPERAND (t, 0);
12358 tree op1 = TREE_OPERAND (t, 1);
12359
12360 if (TREE_CODE (op1) == INTEGER_CST
12361 && TREE_CODE (op0) == CONSTRUCTOR
12362 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12363 {
12364 tree val = get_array_ctor_element_at_index (op0,
12365 wi::to_offset (op1));
12366 if (val)
12367 return val;
12368 }
12369
12370 return t;
12371 }
12372
12373 /* Return a VECTOR_CST if possible. */
12374 case CONSTRUCTOR:
12375 {
12376 tree type = TREE_TYPE (t);
12377 if (TREE_CODE (type) != VECTOR_TYPE)
12378 return t;
12379
12380 unsigned i;
12381 tree val;
12382 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12383 if (! CONSTANT_CLASS_P (val))
12384 return t;
12385
12386 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12387 }
12388
12389 case CONST_DECL:
12390 return fold (DECL_INITIAL (t));
12391
12392 default:
12393 return t;
12394 } /* switch (code) */
12395 }
12396
12397 #ifdef ENABLE_FOLD_CHECKING
12398 #undef fold
12399
12400 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12401 hash_table<nofree_ptr_hash<const tree_node> > *);
12402 static void fold_check_failed (const_tree, const_tree);
12403 void print_fold_checksum (const_tree);
12404
12405 /* When --enable-checking=fold, compute a digest of expr before
12406 and after actual fold call to see if fold did not accidentally
12407 change original expr. */
12408
12409 tree
12410 fold (tree expr)
12411 {
12412 tree ret;
12413 struct md5_ctx ctx;
12414 unsigned char checksum_before[16], checksum_after[16];
12415 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12416
12417 md5_init_ctx (&ctx);
12418 fold_checksum_tree (expr, &ctx, &ht);
12419 md5_finish_ctx (&ctx, checksum_before);
12420 ht.empty ();
12421
12422 ret = fold_1 (expr);
12423
12424 md5_init_ctx (&ctx);
12425 fold_checksum_tree (expr, &ctx, &ht);
12426 md5_finish_ctx (&ctx, checksum_after);
12427
12428 if (memcmp (checksum_before, checksum_after, 16))
12429 fold_check_failed (expr, ret);
12430
12431 return ret;
12432 }
12433
12434 void
12435 print_fold_checksum (const_tree expr)
12436 {
12437 struct md5_ctx ctx;
12438 unsigned char checksum[16], cnt;
12439 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12440
12441 md5_init_ctx (&ctx);
12442 fold_checksum_tree (expr, &ctx, &ht);
12443 md5_finish_ctx (&ctx, checksum);
12444 for (cnt = 0; cnt < 16; ++cnt)
12445 fprintf (stderr, "%02x", checksum[cnt]);
12446 putc ('\n', stderr);
12447 }
12448
12449 static void
12450 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12451 {
12452 internal_error ("fold check: original tree changed by fold");
12453 }
12454
12455 static void
12456 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12457 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12458 {
12459 const tree_node **slot;
12460 enum tree_code code;
12461 union tree_node *buf;
12462 int i, len;
12463
12464 recursive_label:
12465 if (expr == NULL)
12466 return;
12467 slot = ht->find_slot (expr, INSERT);
12468 if (*slot != NULL)
12469 return;
12470 *slot = expr;
12471 code = TREE_CODE (expr);
12472 if (TREE_CODE_CLASS (code) == tcc_declaration
12473 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12474 {
12475 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12476 size_t sz = tree_size (expr);
12477 buf = XALLOCAVAR (union tree_node, sz);
12478 memcpy ((char *) buf, expr, sz);
12479 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
12480 buf->decl_with_vis.symtab_node = NULL;
12481 buf->base.nowarning_flag = 0;
12482 expr = (tree) buf;
12483 }
12484 else if (TREE_CODE_CLASS (code) == tcc_type
12485 && (TYPE_POINTER_TO (expr)
12486 || TYPE_REFERENCE_TO (expr)
12487 || TYPE_CACHED_VALUES_P (expr)
12488 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12489 || TYPE_NEXT_VARIANT (expr)
12490 || TYPE_ALIAS_SET_KNOWN_P (expr)))
12491 {
12492 /* Allow these fields to be modified. */
12493 tree tmp;
12494 size_t sz = tree_size (expr);
12495 buf = XALLOCAVAR (union tree_node, sz);
12496 memcpy ((char *) buf, expr, sz);
12497 expr = tmp = (tree) buf;
12498 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12499 TYPE_POINTER_TO (tmp) = NULL;
12500 TYPE_REFERENCE_TO (tmp) = NULL;
12501 TYPE_NEXT_VARIANT (tmp) = NULL;
12502 TYPE_ALIAS_SET (tmp) = -1;
12503 if (TYPE_CACHED_VALUES_P (tmp))
12504 {
12505 TYPE_CACHED_VALUES_P (tmp) = 0;
12506 TYPE_CACHED_VALUES (tmp) = NULL;
12507 }
12508 }
12509 else if (TREE_NO_WARNING (expr) && (DECL_P (expr) || EXPR_P (expr)))
12510 {
12511 /* Allow TREE_NO_WARNING to be set. Perhaps we shouldn't allow that
12512 and change builtins.c etc. instead - see PR89543. */
12513 size_t sz = tree_size (expr);
12514 buf = XALLOCAVAR (union tree_node, sz);
12515 memcpy ((char *) buf, expr, sz);
12516 buf->base.nowarning_flag = 0;
12517 expr = (tree) buf;
12518 }
12519 md5_process_bytes (expr, tree_size (expr), ctx);
12520 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12521 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12522 if (TREE_CODE_CLASS (code) != tcc_type
12523 && TREE_CODE_CLASS (code) != tcc_declaration
12524 && code != TREE_LIST
12525 && code != SSA_NAME
12526 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12527 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12528 switch (TREE_CODE_CLASS (code))
12529 {
12530 case tcc_constant:
12531 switch (code)
12532 {
12533 case STRING_CST:
12534 md5_process_bytes (TREE_STRING_POINTER (expr),
12535 TREE_STRING_LENGTH (expr), ctx);
12536 break;
12537 case COMPLEX_CST:
12538 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12539 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12540 break;
12541 case VECTOR_CST:
12542 len = vector_cst_encoded_nelts (expr);
12543 for (i = 0; i < len; ++i)
12544 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
12545 break;
12546 default:
12547 break;
12548 }
12549 break;
12550 case tcc_exceptional:
12551 switch (code)
12552 {
12553 case TREE_LIST:
12554 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12555 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12556 expr = TREE_CHAIN (expr);
12557 goto recursive_label;
12558 break;
12559 case TREE_VEC:
12560 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12561 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12562 break;
12563 default:
12564 break;
12565 }
12566 break;
12567 case tcc_expression:
12568 case tcc_reference:
12569 case tcc_comparison:
12570 case tcc_unary:
12571 case tcc_binary:
12572 case tcc_statement:
12573 case tcc_vl_exp:
12574 len = TREE_OPERAND_LENGTH (expr);
12575 for (i = 0; i < len; ++i)
12576 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12577 break;
12578 case tcc_declaration:
12579 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12580 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12581 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12582 {
12583 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12584 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12585 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12586 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12587 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12588 }
12589
12590 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12591 {
12592 if (TREE_CODE (expr) == FUNCTION_DECL)
12593 {
12594 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12595 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12596 }
12597 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12598 }
12599 break;
12600 case tcc_type:
12601 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12602 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12603 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12604 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12605 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12606 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12607 if (INTEGRAL_TYPE_P (expr)
12608 || SCALAR_FLOAT_TYPE_P (expr))
12609 {
12610 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12611 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12612 }
12613 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12614 if (TREE_CODE (expr) == RECORD_TYPE
12615 || TREE_CODE (expr) == UNION_TYPE
12616 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12617 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12618 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12619 break;
12620 default:
12621 break;
12622 }
12623 }
12624
12625 /* Helper function for outputting the checksum of a tree T. When
12626 debugging with gdb, you can "define mynext" to be "next" followed
12627 by "call debug_fold_checksum (op0)", then just trace down till the
12628 outputs differ. */
12629
12630 DEBUG_FUNCTION void
12631 debug_fold_checksum (const_tree t)
12632 {
12633 int i;
12634 unsigned char checksum[16];
12635 struct md5_ctx ctx;
12636 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12637
12638 md5_init_ctx (&ctx);
12639 fold_checksum_tree (t, &ctx, &ht);
12640 md5_finish_ctx (&ctx, checksum);
12641 ht.empty ();
12642
12643 for (i = 0; i < 16; i++)
12644 fprintf (stderr, "%d ", checksum[i]);
12645
12646 fprintf (stderr, "\n");
12647 }
12648
12649 #endif
12650
12651 /* Fold a unary tree expression with code CODE of type TYPE with an
12652 operand OP0. LOC is the location of the resulting expression.
12653 Return a folded expression if successful. Otherwise, return a tree
12654 expression with code CODE of type TYPE with an operand OP0. */
12655
12656 tree
12657 fold_build1_loc (location_t loc,
12658 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12659 {
12660 tree tem;
12661 #ifdef ENABLE_FOLD_CHECKING
12662 unsigned char checksum_before[16], checksum_after[16];
12663 struct md5_ctx ctx;
12664 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12665
12666 md5_init_ctx (&ctx);
12667 fold_checksum_tree (op0, &ctx, &ht);
12668 md5_finish_ctx (&ctx, checksum_before);
12669 ht.empty ();
12670 #endif
12671
12672 tem = fold_unary_loc (loc, code, type, op0);
12673 if (!tem)
12674 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
12675
12676 #ifdef ENABLE_FOLD_CHECKING
12677 md5_init_ctx (&ctx);
12678 fold_checksum_tree (op0, &ctx, &ht);
12679 md5_finish_ctx (&ctx, checksum_after);
12680
12681 if (memcmp (checksum_before, checksum_after, 16))
12682 fold_check_failed (op0, tem);
12683 #endif
12684 return tem;
12685 }
12686
12687 /* Fold a binary tree expression with code CODE of type TYPE with
12688 operands OP0 and OP1. LOC is the location of the resulting
12689 expression. Return a folded expression if successful. Otherwise,
12690 return a tree expression with code CODE of type TYPE with operands
12691 OP0 and OP1. */
12692
12693 tree
12694 fold_build2_loc (location_t loc,
12695 enum tree_code code, tree type, tree op0, tree op1
12696 MEM_STAT_DECL)
12697 {
12698 tree tem;
12699 #ifdef ENABLE_FOLD_CHECKING
12700 unsigned char checksum_before_op0[16],
12701 checksum_before_op1[16],
12702 checksum_after_op0[16],
12703 checksum_after_op1[16];
12704 struct md5_ctx ctx;
12705 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12706
12707 md5_init_ctx (&ctx);
12708 fold_checksum_tree (op0, &ctx, &ht);
12709 md5_finish_ctx (&ctx, checksum_before_op0);
12710 ht.empty ();
12711
12712 md5_init_ctx (&ctx);
12713 fold_checksum_tree (op1, &ctx, &ht);
12714 md5_finish_ctx (&ctx, checksum_before_op1);
12715 ht.empty ();
12716 #endif
12717
12718 tem = fold_binary_loc (loc, code, type, op0, op1);
12719 if (!tem)
12720 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12721
12722 #ifdef ENABLE_FOLD_CHECKING
12723 md5_init_ctx (&ctx);
12724 fold_checksum_tree (op0, &ctx, &ht);
12725 md5_finish_ctx (&ctx, checksum_after_op0);
12726 ht.empty ();
12727
12728 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12729 fold_check_failed (op0, tem);
12730
12731 md5_init_ctx (&ctx);
12732 fold_checksum_tree (op1, &ctx, &ht);
12733 md5_finish_ctx (&ctx, checksum_after_op1);
12734
12735 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12736 fold_check_failed (op1, tem);
12737 #endif
12738 return tem;
12739 }
12740
12741 /* Fold a ternary tree expression with code CODE of type TYPE with
12742 operands OP0, OP1, and OP2. Return a folded expression if
12743 successful. Otherwise, return a tree expression with code CODE of
12744 type TYPE with operands OP0, OP1, and OP2. */
12745
12746 tree
12747 fold_build3_loc (location_t loc, enum tree_code code, tree type,
12748 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12749 {
12750 tree tem;
12751 #ifdef ENABLE_FOLD_CHECKING
12752 unsigned char checksum_before_op0[16],
12753 checksum_before_op1[16],
12754 checksum_before_op2[16],
12755 checksum_after_op0[16],
12756 checksum_after_op1[16],
12757 checksum_after_op2[16];
12758 struct md5_ctx ctx;
12759 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12760
12761 md5_init_ctx (&ctx);
12762 fold_checksum_tree (op0, &ctx, &ht);
12763 md5_finish_ctx (&ctx, checksum_before_op0);
12764 ht.empty ();
12765
12766 md5_init_ctx (&ctx);
12767 fold_checksum_tree (op1, &ctx, &ht);
12768 md5_finish_ctx (&ctx, checksum_before_op1);
12769 ht.empty ();
12770
12771 md5_init_ctx (&ctx);
12772 fold_checksum_tree (op2, &ctx, &ht);
12773 md5_finish_ctx (&ctx, checksum_before_op2);
12774 ht.empty ();
12775 #endif
12776
12777 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12778 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12779 if (!tem)
12780 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12781
12782 #ifdef ENABLE_FOLD_CHECKING
12783 md5_init_ctx (&ctx);
12784 fold_checksum_tree (op0, &ctx, &ht);
12785 md5_finish_ctx (&ctx, checksum_after_op0);
12786 ht.empty ();
12787
12788 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12789 fold_check_failed (op0, tem);
12790
12791 md5_init_ctx (&ctx);
12792 fold_checksum_tree (op1, &ctx, &ht);
12793 md5_finish_ctx (&ctx, checksum_after_op1);
12794 ht.empty ();
12795
12796 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12797 fold_check_failed (op1, tem);
12798
12799 md5_init_ctx (&ctx);
12800 fold_checksum_tree (op2, &ctx, &ht);
12801 md5_finish_ctx (&ctx, checksum_after_op2);
12802
12803 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12804 fold_check_failed (op2, tem);
12805 #endif
12806 return tem;
12807 }
12808
12809 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12810 arguments in ARGARRAY, and a null static chain.
12811 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12812 of type TYPE from the given operands as constructed by build_call_array. */
12813
12814 tree
12815 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12816 int nargs, tree *argarray)
12817 {
12818 tree tem;
12819 #ifdef ENABLE_FOLD_CHECKING
12820 unsigned char checksum_before_fn[16],
12821 checksum_before_arglist[16],
12822 checksum_after_fn[16],
12823 checksum_after_arglist[16];
12824 struct md5_ctx ctx;
12825 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12826 int i;
12827
12828 md5_init_ctx (&ctx);
12829 fold_checksum_tree (fn, &ctx, &ht);
12830 md5_finish_ctx (&ctx, checksum_before_fn);
12831 ht.empty ();
12832
12833 md5_init_ctx (&ctx);
12834 for (i = 0; i < nargs; i++)
12835 fold_checksum_tree (argarray[i], &ctx, &ht);
12836 md5_finish_ctx (&ctx, checksum_before_arglist);
12837 ht.empty ();
12838 #endif
12839
12840 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12841 if (!tem)
12842 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12843
12844 #ifdef ENABLE_FOLD_CHECKING
12845 md5_init_ctx (&ctx);
12846 fold_checksum_tree (fn, &ctx, &ht);
12847 md5_finish_ctx (&ctx, checksum_after_fn);
12848 ht.empty ();
12849
12850 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12851 fold_check_failed (fn, tem);
12852
12853 md5_init_ctx (&ctx);
12854 for (i = 0; i < nargs; i++)
12855 fold_checksum_tree (argarray[i], &ctx, &ht);
12856 md5_finish_ctx (&ctx, checksum_after_arglist);
12857
12858 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12859 fold_check_failed (NULL_TREE, tem);
12860 #endif
12861 return tem;
12862 }
12863
12864 /* Perform constant folding and related simplification of initializer
12865 expression EXPR. These behave identically to "fold_buildN" but ignore
12866 potential run-time traps and exceptions that fold must preserve. */
12867
12868 #define START_FOLD_INIT \
12869 int saved_signaling_nans = flag_signaling_nans;\
12870 int saved_trapping_math = flag_trapping_math;\
12871 int saved_rounding_math = flag_rounding_math;\
12872 int saved_trapv = flag_trapv;\
12873 int saved_folding_initializer = folding_initializer;\
12874 flag_signaling_nans = 0;\
12875 flag_trapping_math = 0;\
12876 flag_rounding_math = 0;\
12877 flag_trapv = 0;\
12878 folding_initializer = 1;
12879
12880 #define END_FOLD_INIT \
12881 flag_signaling_nans = saved_signaling_nans;\
12882 flag_trapping_math = saved_trapping_math;\
12883 flag_rounding_math = saved_rounding_math;\
12884 flag_trapv = saved_trapv;\
12885 folding_initializer = saved_folding_initializer;
12886
12887 tree
12888 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12889 tree type, tree op)
12890 {
12891 tree result;
12892 START_FOLD_INIT;
12893
12894 result = fold_build1_loc (loc, code, type, op);
12895
12896 END_FOLD_INIT;
12897 return result;
12898 }
12899
12900 tree
12901 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12902 tree type, tree op0, tree op1)
12903 {
12904 tree result;
12905 START_FOLD_INIT;
12906
12907 result = fold_build2_loc (loc, code, type, op0, op1);
12908
12909 END_FOLD_INIT;
12910 return result;
12911 }
12912
12913 tree
12914 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12915 int nargs, tree *argarray)
12916 {
12917 tree result;
12918 START_FOLD_INIT;
12919
12920 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12921
12922 END_FOLD_INIT;
12923 return result;
12924 }
12925
12926 #undef START_FOLD_INIT
12927 #undef END_FOLD_INIT
12928
12929 /* Determine if first argument is a multiple of second argument. Return 0 if
12930 it is not, or we cannot easily determined it to be.
12931
12932 An example of the sort of thing we care about (at this point; this routine
12933 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12934 fold cases do now) is discovering that
12935
12936 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12937
12938 is a multiple of
12939
12940 SAVE_EXPR (J * 8)
12941
12942 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12943
12944 This code also handles discovering that
12945
12946 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12947
12948 is a multiple of 8 so we don't have to worry about dealing with a
12949 possible remainder.
12950
12951 Note that we *look* inside a SAVE_EXPR only to determine how it was
12952 calculated; it is not safe for fold to do much of anything else with the
12953 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12954 at run time. For example, the latter example above *cannot* be implemented
12955 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12956 evaluation time of the original SAVE_EXPR is not necessarily the same at
12957 the time the new expression is evaluated. The only optimization of this
12958 sort that would be valid is changing
12959
12960 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12961
12962 divided by 8 to
12963
12964 SAVE_EXPR (I) * SAVE_EXPR (J)
12965
12966 (where the same SAVE_EXPR (J) is used in the original and the
12967 transformed version). */
12968
12969 int
12970 multiple_of_p (tree type, const_tree top, const_tree bottom)
12971 {
12972 gimple *stmt;
12973 tree t1, op1, op2;
12974
12975 if (operand_equal_p (top, bottom, 0))
12976 return 1;
12977
12978 if (TREE_CODE (type) != INTEGER_TYPE)
12979 return 0;
12980
12981 switch (TREE_CODE (top))
12982 {
12983 case BIT_AND_EXPR:
12984 /* Bitwise and provides a power of two multiple. If the mask is
12985 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12986 if (!integer_pow2p (bottom))
12987 return 0;
12988 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12989 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12990
12991 case MULT_EXPR:
12992 if (TREE_CODE (bottom) == INTEGER_CST)
12993 {
12994 op1 = TREE_OPERAND (top, 0);
12995 op2 = TREE_OPERAND (top, 1);
12996 if (TREE_CODE (op1) == INTEGER_CST)
12997 std::swap (op1, op2);
12998 if (TREE_CODE (op2) == INTEGER_CST)
12999 {
13000 if (multiple_of_p (type, op2, bottom))
13001 return 1;
13002 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
13003 if (multiple_of_p (type, bottom, op2))
13004 {
13005 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
13006 wi::to_widest (op2));
13007 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
13008 {
13009 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
13010 return multiple_of_p (type, op1, op2);
13011 }
13012 }
13013 return multiple_of_p (type, op1, bottom);
13014 }
13015 }
13016 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13017 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13018
13019 case MINUS_EXPR:
13020 /* It is impossible to prove if op0 - op1 is multiple of bottom
13021 precisely, so be conservative here checking if both op0 and op1
13022 are multiple of bottom. Note we check the second operand first
13023 since it's usually simpler. */
13024 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13025 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13026
13027 case PLUS_EXPR:
13028 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
13029 as op0 - 3 if the expression has unsigned type. For example,
13030 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
13031 op1 = TREE_OPERAND (top, 1);
13032 if (TYPE_UNSIGNED (type)
13033 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
13034 op1 = fold_build1 (NEGATE_EXPR, type, op1);
13035 return (multiple_of_p (type, op1, bottom)
13036 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13037
13038 case LSHIFT_EXPR:
13039 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13040 {
13041 op1 = TREE_OPERAND (top, 1);
13042 /* const_binop may not detect overflow correctly,
13043 so check for it explicitly here. */
13044 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
13045 wi::to_wide (op1))
13046 && (t1 = fold_convert (type,
13047 const_binop (LSHIFT_EXPR, size_one_node,
13048 op1))) != 0
13049 && !TREE_OVERFLOW (t1))
13050 return multiple_of_p (type, t1, bottom);
13051 }
13052 return 0;
13053
13054 case NOP_EXPR:
13055 /* Can't handle conversions from non-integral or wider integral type. */
13056 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13057 || (TYPE_PRECISION (type)
13058 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13059 return 0;
13060
13061 /* fall through */
13062
13063 case SAVE_EXPR:
13064 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13065
13066 case COND_EXPR:
13067 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13068 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
13069
13070 case INTEGER_CST:
13071 if (TREE_CODE (bottom) != INTEGER_CST
13072 || integer_zerop (bottom)
13073 || (TYPE_UNSIGNED (type)
13074 && (tree_int_cst_sgn (top) < 0
13075 || tree_int_cst_sgn (bottom) < 0)))
13076 return 0;
13077 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
13078 SIGNED);
13079
13080 case SSA_NAME:
13081 if (TREE_CODE (bottom) == INTEGER_CST
13082 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
13083 && gimple_code (stmt) == GIMPLE_ASSIGN)
13084 {
13085 enum tree_code code = gimple_assign_rhs_code (stmt);
13086
13087 /* Check for special cases to see if top is defined as multiple
13088 of bottom:
13089
13090 top = (X & ~(bottom - 1) ; bottom is power of 2
13091
13092 or
13093
13094 Y = X % bottom
13095 top = X - Y. */
13096 if (code == BIT_AND_EXPR
13097 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
13098 && TREE_CODE (op2) == INTEGER_CST
13099 && integer_pow2p (bottom)
13100 && wi::multiple_of_p (wi::to_widest (op2),
13101 wi::to_widest (bottom), UNSIGNED))
13102 return 1;
13103
13104 op1 = gimple_assign_rhs1 (stmt);
13105 if (code == MINUS_EXPR
13106 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
13107 && TREE_CODE (op2) == SSA_NAME
13108 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
13109 && gimple_code (stmt) == GIMPLE_ASSIGN
13110 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
13111 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
13112 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
13113 return 1;
13114 }
13115
13116 /* fall through */
13117
13118 default:
13119 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
13120 return multiple_p (wi::to_poly_widest (top),
13121 wi::to_poly_widest (bottom));
13122
13123 return 0;
13124 }
13125 }
13126
13127 #define tree_expr_nonnegative_warnv_p(X, Y) \
13128 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13129
13130 #define RECURSE(X) \
13131 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
13132
13133 /* Return true if CODE or TYPE is known to be non-negative. */
13134
13135 static bool
13136 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13137 {
13138 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13139 && truth_value_p (code))
13140 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13141 have a signed:1 type (where the value is -1 and 0). */
13142 return true;
13143 return false;
13144 }
13145
13146 /* Return true if (CODE OP0) is known to be non-negative. If the return
13147 value is based on the assumption that signed overflow is undefined,
13148 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13149 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13150
13151 bool
13152 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13153 bool *strict_overflow_p, int depth)
13154 {
13155 if (TYPE_UNSIGNED (type))
13156 return true;
13157
13158 switch (code)
13159 {
13160 case ABS_EXPR:
13161 /* We can't return 1 if flag_wrapv is set because
13162 ABS_EXPR<INT_MIN> = INT_MIN. */
13163 if (!ANY_INTEGRAL_TYPE_P (type))
13164 return true;
13165 if (TYPE_OVERFLOW_UNDEFINED (type))
13166 {
13167 *strict_overflow_p = true;
13168 return true;
13169 }
13170 break;
13171
13172 case NON_LVALUE_EXPR:
13173 case FLOAT_EXPR:
13174 case FIX_TRUNC_EXPR:
13175 return RECURSE (op0);
13176
13177 CASE_CONVERT:
13178 {
13179 tree inner_type = TREE_TYPE (op0);
13180 tree outer_type = type;
13181
13182 if (TREE_CODE (outer_type) == REAL_TYPE)
13183 {
13184 if (TREE_CODE (inner_type) == REAL_TYPE)
13185 return RECURSE (op0);
13186 if (INTEGRAL_TYPE_P (inner_type))
13187 {
13188 if (TYPE_UNSIGNED (inner_type))
13189 return true;
13190 return RECURSE (op0);
13191 }
13192 }
13193 else if (INTEGRAL_TYPE_P (outer_type))
13194 {
13195 if (TREE_CODE (inner_type) == REAL_TYPE)
13196 return RECURSE (op0);
13197 if (INTEGRAL_TYPE_P (inner_type))
13198 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13199 && TYPE_UNSIGNED (inner_type);
13200 }
13201 }
13202 break;
13203
13204 default:
13205 return tree_simple_nonnegative_warnv_p (code, type);
13206 }
13207
13208 /* We don't know sign of `t', so be conservative and return false. */
13209 return false;
13210 }
13211
13212 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13213 value is based on the assumption that signed overflow is undefined,
13214 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13215 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13216
13217 bool
13218 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13219 tree op1, bool *strict_overflow_p,
13220 int depth)
13221 {
13222 if (TYPE_UNSIGNED (type))
13223 return true;
13224
13225 switch (code)
13226 {
13227 case POINTER_PLUS_EXPR:
13228 case PLUS_EXPR:
13229 if (FLOAT_TYPE_P (type))
13230 return RECURSE (op0) && RECURSE (op1);
13231
13232 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13233 both unsigned and at least 2 bits shorter than the result. */
13234 if (TREE_CODE (type) == INTEGER_TYPE
13235 && TREE_CODE (op0) == NOP_EXPR
13236 && TREE_CODE (op1) == NOP_EXPR)
13237 {
13238 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13239 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13240 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13241 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13242 {
13243 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13244 TYPE_PRECISION (inner2)) + 1;
13245 return prec < TYPE_PRECISION (type);
13246 }
13247 }
13248 break;
13249
13250 case MULT_EXPR:
13251 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
13252 {
13253 /* x * x is always non-negative for floating point x
13254 or without overflow. */
13255 if (operand_equal_p (op0, op1, 0)
13256 || (RECURSE (op0) && RECURSE (op1)))
13257 {
13258 if (ANY_INTEGRAL_TYPE_P (type)
13259 && TYPE_OVERFLOW_UNDEFINED (type))
13260 *strict_overflow_p = true;
13261 return true;
13262 }
13263 }
13264
13265 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13266 both unsigned and their total bits is shorter than the result. */
13267 if (TREE_CODE (type) == INTEGER_TYPE
13268 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
13269 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
13270 {
13271 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
13272 ? TREE_TYPE (TREE_OPERAND (op0, 0))
13273 : TREE_TYPE (op0);
13274 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
13275 ? TREE_TYPE (TREE_OPERAND (op1, 0))
13276 : TREE_TYPE (op1);
13277
13278 bool unsigned0 = TYPE_UNSIGNED (inner0);
13279 bool unsigned1 = TYPE_UNSIGNED (inner1);
13280
13281 if (TREE_CODE (op0) == INTEGER_CST)
13282 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
13283
13284 if (TREE_CODE (op1) == INTEGER_CST)
13285 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
13286
13287 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
13288 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
13289 {
13290 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
13291 ? tree_int_cst_min_precision (op0, UNSIGNED)
13292 : TYPE_PRECISION (inner0);
13293
13294 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
13295 ? tree_int_cst_min_precision (op1, UNSIGNED)
13296 : TYPE_PRECISION (inner1);
13297
13298 return precision0 + precision1 < TYPE_PRECISION (type);
13299 }
13300 }
13301 return false;
13302
13303 case BIT_AND_EXPR:
13304 case MAX_EXPR:
13305 return RECURSE (op0) || RECURSE (op1);
13306
13307 case BIT_IOR_EXPR:
13308 case BIT_XOR_EXPR:
13309 case MIN_EXPR:
13310 case RDIV_EXPR:
13311 case TRUNC_DIV_EXPR:
13312 case CEIL_DIV_EXPR:
13313 case FLOOR_DIV_EXPR:
13314 case ROUND_DIV_EXPR:
13315 return RECURSE (op0) && RECURSE (op1);
13316
13317 case TRUNC_MOD_EXPR:
13318 return RECURSE (op0);
13319
13320 case FLOOR_MOD_EXPR:
13321 return RECURSE (op1);
13322
13323 case CEIL_MOD_EXPR:
13324 case ROUND_MOD_EXPR:
13325 default:
13326 return tree_simple_nonnegative_warnv_p (code, type);
13327 }
13328
13329 /* We don't know sign of `t', so be conservative and return false. */
13330 return false;
13331 }
13332
13333 /* Return true if T is known to be non-negative. If the return
13334 value is based on the assumption that signed overflow is undefined,
13335 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13336 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13337
13338 bool
13339 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13340 {
13341 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13342 return true;
13343
13344 switch (TREE_CODE (t))
13345 {
13346 case INTEGER_CST:
13347 return tree_int_cst_sgn (t) >= 0;
13348
13349 case REAL_CST:
13350 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13351
13352 case FIXED_CST:
13353 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13354
13355 case COND_EXPR:
13356 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13357
13358 case SSA_NAME:
13359 /* Limit the depth of recursion to avoid quadratic behavior.
13360 This is expected to catch almost all occurrences in practice.
13361 If this code misses important cases that unbounded recursion
13362 would not, passes that need this information could be revised
13363 to provide it through dataflow propagation. */
13364 return (!name_registered_for_update_p (t)
13365 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13366 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
13367 strict_overflow_p, depth));
13368
13369 default:
13370 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13371 }
13372 }
13373
13374 /* Return true if T is known to be non-negative. If the return
13375 value is based on the assumption that signed overflow is undefined,
13376 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13377 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13378
13379 bool
13380 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13381 bool *strict_overflow_p, int depth)
13382 {
13383 switch (fn)
13384 {
13385 CASE_CFN_ACOS:
13386 CASE_CFN_ACOSH:
13387 CASE_CFN_CABS:
13388 CASE_CFN_COSH:
13389 CASE_CFN_ERFC:
13390 CASE_CFN_EXP:
13391 CASE_CFN_EXP10:
13392 CASE_CFN_EXP2:
13393 CASE_CFN_FABS:
13394 CASE_CFN_FDIM:
13395 CASE_CFN_HYPOT:
13396 CASE_CFN_POW10:
13397 CASE_CFN_FFS:
13398 CASE_CFN_PARITY:
13399 CASE_CFN_POPCOUNT:
13400 CASE_CFN_CLZ:
13401 CASE_CFN_CLRSB:
13402 case CFN_BUILT_IN_BSWAP32:
13403 case CFN_BUILT_IN_BSWAP64:
13404 /* Always true. */
13405 return true;
13406
13407 CASE_CFN_SQRT:
13408 CASE_CFN_SQRT_FN:
13409 /* sqrt(-0.0) is -0.0. */
13410 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13411 return true;
13412 return RECURSE (arg0);
13413
13414 CASE_CFN_ASINH:
13415 CASE_CFN_ATAN:
13416 CASE_CFN_ATANH:
13417 CASE_CFN_CBRT:
13418 CASE_CFN_CEIL:
13419 CASE_CFN_CEIL_FN:
13420 CASE_CFN_ERF:
13421 CASE_CFN_EXPM1:
13422 CASE_CFN_FLOOR:
13423 CASE_CFN_FLOOR_FN:
13424 CASE_CFN_FMOD:
13425 CASE_CFN_FREXP:
13426 CASE_CFN_ICEIL:
13427 CASE_CFN_IFLOOR:
13428 CASE_CFN_IRINT:
13429 CASE_CFN_IROUND:
13430 CASE_CFN_LCEIL:
13431 CASE_CFN_LDEXP:
13432 CASE_CFN_LFLOOR:
13433 CASE_CFN_LLCEIL:
13434 CASE_CFN_LLFLOOR:
13435 CASE_CFN_LLRINT:
13436 CASE_CFN_LLROUND:
13437 CASE_CFN_LRINT:
13438 CASE_CFN_LROUND:
13439 CASE_CFN_MODF:
13440 CASE_CFN_NEARBYINT:
13441 CASE_CFN_NEARBYINT_FN:
13442 CASE_CFN_RINT:
13443 CASE_CFN_RINT_FN:
13444 CASE_CFN_ROUND:
13445 CASE_CFN_ROUND_FN:
13446 CASE_CFN_ROUNDEVEN:
13447 CASE_CFN_ROUNDEVEN_FN:
13448 CASE_CFN_SCALB:
13449 CASE_CFN_SCALBLN:
13450 CASE_CFN_SCALBN:
13451 CASE_CFN_SIGNBIT:
13452 CASE_CFN_SIGNIFICAND:
13453 CASE_CFN_SINH:
13454 CASE_CFN_TANH:
13455 CASE_CFN_TRUNC:
13456 CASE_CFN_TRUNC_FN:
13457 /* True if the 1st argument is nonnegative. */
13458 return RECURSE (arg0);
13459
13460 CASE_CFN_FMAX:
13461 CASE_CFN_FMAX_FN:
13462 /* True if the 1st OR 2nd arguments are nonnegative. */
13463 return RECURSE (arg0) || RECURSE (arg1);
13464
13465 CASE_CFN_FMIN:
13466 CASE_CFN_FMIN_FN:
13467 /* True if the 1st AND 2nd arguments are nonnegative. */
13468 return RECURSE (arg0) && RECURSE (arg1);
13469
13470 CASE_CFN_COPYSIGN:
13471 CASE_CFN_COPYSIGN_FN:
13472 /* True if the 2nd argument is nonnegative. */
13473 return RECURSE (arg1);
13474
13475 CASE_CFN_POWI:
13476 /* True if the 1st argument is nonnegative or the second
13477 argument is an even integer. */
13478 if (TREE_CODE (arg1) == INTEGER_CST
13479 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13480 return true;
13481 return RECURSE (arg0);
13482
13483 CASE_CFN_POW:
13484 /* True if the 1st argument is nonnegative or the second
13485 argument is an even integer valued real. */
13486 if (TREE_CODE (arg1) == REAL_CST)
13487 {
13488 REAL_VALUE_TYPE c;
13489 HOST_WIDE_INT n;
13490
13491 c = TREE_REAL_CST (arg1);
13492 n = real_to_integer (&c);
13493 if ((n & 1) == 0)
13494 {
13495 REAL_VALUE_TYPE cint;
13496 real_from_integer (&cint, VOIDmode, n, SIGNED);
13497 if (real_identical (&c, &cint))
13498 return true;
13499 }
13500 }
13501 return RECURSE (arg0);
13502
13503 default:
13504 break;
13505 }
13506 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13507 }
13508
13509 /* Return true if T is known to be non-negative. If the return
13510 value is based on the assumption that signed overflow is undefined,
13511 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13512 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13513
13514 static bool
13515 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13516 {
13517 enum tree_code code = TREE_CODE (t);
13518 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13519 return true;
13520
13521 switch (code)
13522 {
13523 case TARGET_EXPR:
13524 {
13525 tree temp = TARGET_EXPR_SLOT (t);
13526 t = TARGET_EXPR_INITIAL (t);
13527
13528 /* If the initializer is non-void, then it's a normal expression
13529 that will be assigned to the slot. */
13530 if (!VOID_TYPE_P (t))
13531 return RECURSE (t);
13532
13533 /* Otherwise, the initializer sets the slot in some way. One common
13534 way is an assignment statement at the end of the initializer. */
13535 while (1)
13536 {
13537 if (TREE_CODE (t) == BIND_EXPR)
13538 t = expr_last (BIND_EXPR_BODY (t));
13539 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13540 || TREE_CODE (t) == TRY_CATCH_EXPR)
13541 t = expr_last (TREE_OPERAND (t, 0));
13542 else if (TREE_CODE (t) == STATEMENT_LIST)
13543 t = expr_last (t);
13544 else
13545 break;
13546 }
13547 if (TREE_CODE (t) == MODIFY_EXPR
13548 && TREE_OPERAND (t, 0) == temp)
13549 return RECURSE (TREE_OPERAND (t, 1));
13550
13551 return false;
13552 }
13553
13554 case CALL_EXPR:
13555 {
13556 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13557 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13558
13559 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13560 get_call_combined_fn (t),
13561 arg0,
13562 arg1,
13563 strict_overflow_p, depth);
13564 }
13565 case COMPOUND_EXPR:
13566 case MODIFY_EXPR:
13567 return RECURSE (TREE_OPERAND (t, 1));
13568
13569 case BIND_EXPR:
13570 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13571
13572 case SAVE_EXPR:
13573 return RECURSE (TREE_OPERAND (t, 0));
13574
13575 default:
13576 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13577 }
13578 }
13579
13580 #undef RECURSE
13581 #undef tree_expr_nonnegative_warnv_p
13582
13583 /* Return true if T is known to be non-negative. If the return
13584 value is based on the assumption that signed overflow is undefined,
13585 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13586 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13587
13588 bool
13589 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13590 {
13591 enum tree_code code;
13592 if (t == error_mark_node)
13593 return false;
13594
13595 code = TREE_CODE (t);
13596 switch (TREE_CODE_CLASS (code))
13597 {
13598 case tcc_binary:
13599 case tcc_comparison:
13600 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13601 TREE_TYPE (t),
13602 TREE_OPERAND (t, 0),
13603 TREE_OPERAND (t, 1),
13604 strict_overflow_p, depth);
13605
13606 case tcc_unary:
13607 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13608 TREE_TYPE (t),
13609 TREE_OPERAND (t, 0),
13610 strict_overflow_p, depth);
13611
13612 case tcc_constant:
13613 case tcc_declaration:
13614 case tcc_reference:
13615 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13616
13617 default:
13618 break;
13619 }
13620
13621 switch (code)
13622 {
13623 case TRUTH_AND_EXPR:
13624 case TRUTH_OR_EXPR:
13625 case TRUTH_XOR_EXPR:
13626 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13627 TREE_TYPE (t),
13628 TREE_OPERAND (t, 0),
13629 TREE_OPERAND (t, 1),
13630 strict_overflow_p, depth);
13631 case TRUTH_NOT_EXPR:
13632 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13633 TREE_TYPE (t),
13634 TREE_OPERAND (t, 0),
13635 strict_overflow_p, depth);
13636
13637 case COND_EXPR:
13638 case CONSTRUCTOR:
13639 case OBJ_TYPE_REF:
13640 case ASSERT_EXPR:
13641 case ADDR_EXPR:
13642 case WITH_SIZE_EXPR:
13643 case SSA_NAME:
13644 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13645
13646 default:
13647 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13648 }
13649 }
13650
13651 /* Return true if `t' is known to be non-negative. Handle warnings
13652 about undefined signed overflow. */
13653
13654 bool
13655 tree_expr_nonnegative_p (tree t)
13656 {
13657 bool ret, strict_overflow_p;
13658
13659 strict_overflow_p = false;
13660 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13661 if (strict_overflow_p)
13662 fold_overflow_warning (("assuming signed overflow does not occur when "
13663 "determining that expression is always "
13664 "non-negative"),
13665 WARN_STRICT_OVERFLOW_MISC);
13666 return ret;
13667 }
13668
13669
13670 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13671 For floating point we further ensure that T is not denormal.
13672 Similar logic is present in nonzero_address in rtlanal.h.
13673
13674 If the return value is based on the assumption that signed overflow
13675 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13676 change *STRICT_OVERFLOW_P. */
13677
13678 bool
13679 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13680 bool *strict_overflow_p)
13681 {
13682 switch (code)
13683 {
13684 case ABS_EXPR:
13685 return tree_expr_nonzero_warnv_p (op0,
13686 strict_overflow_p);
13687
13688 case NOP_EXPR:
13689 {
13690 tree inner_type = TREE_TYPE (op0);
13691 tree outer_type = type;
13692
13693 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13694 && tree_expr_nonzero_warnv_p (op0,
13695 strict_overflow_p));
13696 }
13697 break;
13698
13699 case NON_LVALUE_EXPR:
13700 return tree_expr_nonzero_warnv_p (op0,
13701 strict_overflow_p);
13702
13703 default:
13704 break;
13705 }
13706
13707 return false;
13708 }
13709
13710 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13711 For floating point we further ensure that T is not denormal.
13712 Similar logic is present in nonzero_address in rtlanal.h.
13713
13714 If the return value is based on the assumption that signed overflow
13715 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13716 change *STRICT_OVERFLOW_P. */
13717
13718 bool
13719 tree_binary_nonzero_warnv_p (enum tree_code code,
13720 tree type,
13721 tree op0,
13722 tree op1, bool *strict_overflow_p)
13723 {
13724 bool sub_strict_overflow_p;
13725 switch (code)
13726 {
13727 case POINTER_PLUS_EXPR:
13728 case PLUS_EXPR:
13729 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13730 {
13731 /* With the presence of negative values it is hard
13732 to say something. */
13733 sub_strict_overflow_p = false;
13734 if (!tree_expr_nonnegative_warnv_p (op0,
13735 &sub_strict_overflow_p)
13736 || !tree_expr_nonnegative_warnv_p (op1,
13737 &sub_strict_overflow_p))
13738 return false;
13739 /* One of operands must be positive and the other non-negative. */
13740 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13741 overflows, on a twos-complement machine the sum of two
13742 nonnegative numbers can never be zero. */
13743 return (tree_expr_nonzero_warnv_p (op0,
13744 strict_overflow_p)
13745 || tree_expr_nonzero_warnv_p (op1,
13746 strict_overflow_p));
13747 }
13748 break;
13749
13750 case MULT_EXPR:
13751 if (TYPE_OVERFLOW_UNDEFINED (type))
13752 {
13753 if (tree_expr_nonzero_warnv_p (op0,
13754 strict_overflow_p)
13755 && tree_expr_nonzero_warnv_p (op1,
13756 strict_overflow_p))
13757 {
13758 *strict_overflow_p = true;
13759 return true;
13760 }
13761 }
13762 break;
13763
13764 case MIN_EXPR:
13765 sub_strict_overflow_p = false;
13766 if (tree_expr_nonzero_warnv_p (op0,
13767 &sub_strict_overflow_p)
13768 && tree_expr_nonzero_warnv_p (op1,
13769 &sub_strict_overflow_p))
13770 {
13771 if (sub_strict_overflow_p)
13772 *strict_overflow_p = true;
13773 }
13774 break;
13775
13776 case MAX_EXPR:
13777 sub_strict_overflow_p = false;
13778 if (tree_expr_nonzero_warnv_p (op0,
13779 &sub_strict_overflow_p))
13780 {
13781 if (sub_strict_overflow_p)
13782 *strict_overflow_p = true;
13783
13784 /* When both operands are nonzero, then MAX must be too. */
13785 if (tree_expr_nonzero_warnv_p (op1,
13786 strict_overflow_p))
13787 return true;
13788
13789 /* MAX where operand 0 is positive is positive. */
13790 return tree_expr_nonnegative_warnv_p (op0,
13791 strict_overflow_p);
13792 }
13793 /* MAX where operand 1 is positive is positive. */
13794 else if (tree_expr_nonzero_warnv_p (op1,
13795 &sub_strict_overflow_p)
13796 && tree_expr_nonnegative_warnv_p (op1,
13797 &sub_strict_overflow_p))
13798 {
13799 if (sub_strict_overflow_p)
13800 *strict_overflow_p = true;
13801 return true;
13802 }
13803 break;
13804
13805 case BIT_IOR_EXPR:
13806 return (tree_expr_nonzero_warnv_p (op1,
13807 strict_overflow_p)
13808 || tree_expr_nonzero_warnv_p (op0,
13809 strict_overflow_p));
13810
13811 default:
13812 break;
13813 }
13814
13815 return false;
13816 }
13817
13818 /* Return true when T is an address and is known to be nonzero.
13819 For floating point we further ensure that T is not denormal.
13820 Similar logic is present in nonzero_address in rtlanal.h.
13821
13822 If the return value is based on the assumption that signed overflow
13823 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13824 change *STRICT_OVERFLOW_P. */
13825
13826 bool
13827 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13828 {
13829 bool sub_strict_overflow_p;
13830 switch (TREE_CODE (t))
13831 {
13832 case INTEGER_CST:
13833 return !integer_zerop (t);
13834
13835 case ADDR_EXPR:
13836 {
13837 tree base = TREE_OPERAND (t, 0);
13838
13839 if (!DECL_P (base))
13840 base = get_base_address (base);
13841
13842 if (base && TREE_CODE (base) == TARGET_EXPR)
13843 base = TARGET_EXPR_SLOT (base);
13844
13845 if (!base)
13846 return false;
13847
13848 /* For objects in symbol table check if we know they are non-zero.
13849 Don't do anything for variables and functions before symtab is built;
13850 it is quite possible that they will be declared weak later. */
13851 int nonzero_addr = maybe_nonzero_address (base);
13852 if (nonzero_addr >= 0)
13853 return nonzero_addr;
13854
13855 /* Constants are never weak. */
13856 if (CONSTANT_CLASS_P (base))
13857 return true;
13858
13859 return false;
13860 }
13861
13862 case COND_EXPR:
13863 sub_strict_overflow_p = false;
13864 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13865 &sub_strict_overflow_p)
13866 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13867 &sub_strict_overflow_p))
13868 {
13869 if (sub_strict_overflow_p)
13870 *strict_overflow_p = true;
13871 return true;
13872 }
13873 break;
13874
13875 case SSA_NAME:
13876 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13877 break;
13878 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
13879
13880 default:
13881 break;
13882 }
13883 return false;
13884 }
13885
13886 #define integer_valued_real_p(X) \
13887 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13888
13889 #define RECURSE(X) \
13890 ((integer_valued_real_p) (X, depth + 1))
13891
13892 /* Return true if the floating point result of (CODE OP0) has an
13893 integer value. We also allow +Inf, -Inf and NaN to be considered
13894 integer values. Return false for signaling NaN.
13895
13896 DEPTH is the current nesting depth of the query. */
13897
13898 bool
13899 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13900 {
13901 switch (code)
13902 {
13903 case FLOAT_EXPR:
13904 return true;
13905
13906 case ABS_EXPR:
13907 return RECURSE (op0);
13908
13909 CASE_CONVERT:
13910 {
13911 tree type = TREE_TYPE (op0);
13912 if (TREE_CODE (type) == INTEGER_TYPE)
13913 return true;
13914 if (TREE_CODE (type) == REAL_TYPE)
13915 return RECURSE (op0);
13916 break;
13917 }
13918
13919 default:
13920 break;
13921 }
13922 return false;
13923 }
13924
13925 /* Return true if the floating point result of (CODE OP0 OP1) has an
13926 integer value. We also allow +Inf, -Inf and NaN to be considered
13927 integer values. Return false for signaling NaN.
13928
13929 DEPTH is the current nesting depth of the query. */
13930
13931 bool
13932 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13933 {
13934 switch (code)
13935 {
13936 case PLUS_EXPR:
13937 case MINUS_EXPR:
13938 case MULT_EXPR:
13939 case MIN_EXPR:
13940 case MAX_EXPR:
13941 return RECURSE (op0) && RECURSE (op1);
13942
13943 default:
13944 break;
13945 }
13946 return false;
13947 }
13948
13949 /* Return true if the floating point result of calling FNDECL with arguments
13950 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13951 considered integer values. Return false for signaling NaN. If FNDECL
13952 takes fewer than 2 arguments, the remaining ARGn are null.
13953
13954 DEPTH is the current nesting depth of the query. */
13955
13956 bool
13957 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13958 {
13959 switch (fn)
13960 {
13961 CASE_CFN_CEIL:
13962 CASE_CFN_CEIL_FN:
13963 CASE_CFN_FLOOR:
13964 CASE_CFN_FLOOR_FN:
13965 CASE_CFN_NEARBYINT:
13966 CASE_CFN_NEARBYINT_FN:
13967 CASE_CFN_RINT:
13968 CASE_CFN_RINT_FN:
13969 CASE_CFN_ROUND:
13970 CASE_CFN_ROUND_FN:
13971 CASE_CFN_ROUNDEVEN:
13972 CASE_CFN_ROUNDEVEN_FN:
13973 CASE_CFN_TRUNC:
13974 CASE_CFN_TRUNC_FN:
13975 return true;
13976
13977 CASE_CFN_FMIN:
13978 CASE_CFN_FMIN_FN:
13979 CASE_CFN_FMAX:
13980 CASE_CFN_FMAX_FN:
13981 return RECURSE (arg0) && RECURSE (arg1);
13982
13983 default:
13984 break;
13985 }
13986 return false;
13987 }
13988
13989 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13990 has an integer value. We also allow +Inf, -Inf and NaN to be
13991 considered integer values. Return false for signaling NaN.
13992
13993 DEPTH is the current nesting depth of the query. */
13994
13995 bool
13996 integer_valued_real_single_p (tree t, int depth)
13997 {
13998 switch (TREE_CODE (t))
13999 {
14000 case REAL_CST:
14001 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
14002
14003 case COND_EXPR:
14004 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14005
14006 case SSA_NAME:
14007 /* Limit the depth of recursion to avoid quadratic behavior.
14008 This is expected to catch almost all occurrences in practice.
14009 If this code misses important cases that unbounded recursion
14010 would not, passes that need this information could be revised
14011 to provide it through dataflow propagation. */
14012 return (!name_registered_for_update_p (t)
14013 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
14014 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
14015 depth));
14016
14017 default:
14018 break;
14019 }
14020 return false;
14021 }
14022
14023 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
14024 has an integer value. We also allow +Inf, -Inf and NaN to be
14025 considered integer values. Return false for signaling NaN.
14026
14027 DEPTH is the current nesting depth of the query. */
14028
14029 static bool
14030 integer_valued_real_invalid_p (tree t, int depth)
14031 {
14032 switch (TREE_CODE (t))
14033 {
14034 case COMPOUND_EXPR:
14035 case MODIFY_EXPR:
14036 case BIND_EXPR:
14037 return RECURSE (TREE_OPERAND (t, 1));
14038
14039 case SAVE_EXPR:
14040 return RECURSE (TREE_OPERAND (t, 0));
14041
14042 default:
14043 break;
14044 }
14045 return false;
14046 }
14047
14048 #undef RECURSE
14049 #undef integer_valued_real_p
14050
14051 /* Return true if the floating point expression T has an integer value.
14052 We also allow +Inf, -Inf and NaN to be considered integer values.
14053 Return false for signaling NaN.
14054
14055 DEPTH is the current nesting depth of the query. */
14056
14057 bool
14058 integer_valued_real_p (tree t, int depth)
14059 {
14060 if (t == error_mark_node)
14061 return false;
14062
14063 STRIP_ANY_LOCATION_WRAPPER (t);
14064
14065 tree_code code = TREE_CODE (t);
14066 switch (TREE_CODE_CLASS (code))
14067 {
14068 case tcc_binary:
14069 case tcc_comparison:
14070 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
14071 TREE_OPERAND (t, 1), depth);
14072
14073 case tcc_unary:
14074 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
14075
14076 case tcc_constant:
14077 case tcc_declaration:
14078 case tcc_reference:
14079 return integer_valued_real_single_p (t, depth);
14080
14081 default:
14082 break;
14083 }
14084
14085 switch (code)
14086 {
14087 case COND_EXPR:
14088 case SSA_NAME:
14089 return integer_valued_real_single_p (t, depth);
14090
14091 case CALL_EXPR:
14092 {
14093 tree arg0 = (call_expr_nargs (t) > 0
14094 ? CALL_EXPR_ARG (t, 0)
14095 : NULL_TREE);
14096 tree arg1 = (call_expr_nargs (t) > 1
14097 ? CALL_EXPR_ARG (t, 1)
14098 : NULL_TREE);
14099 return integer_valued_real_call_p (get_call_combined_fn (t),
14100 arg0, arg1, depth);
14101 }
14102
14103 default:
14104 return integer_valued_real_invalid_p (t, depth);
14105 }
14106 }
14107
14108 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14109 attempt to fold the expression to a constant without modifying TYPE,
14110 OP0 or OP1.
14111
14112 If the expression could be simplified to a constant, then return
14113 the constant. If the expression would not be simplified to a
14114 constant, then return NULL_TREE. */
14115
14116 tree
14117 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14118 {
14119 tree tem = fold_binary (code, type, op0, op1);
14120 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14121 }
14122
14123 /* Given the components of a unary expression CODE, TYPE and OP0,
14124 attempt to fold the expression to a constant without modifying
14125 TYPE or OP0.
14126
14127 If the expression could be simplified to a constant, then return
14128 the constant. If the expression would not be simplified to a
14129 constant, then return NULL_TREE. */
14130
14131 tree
14132 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14133 {
14134 tree tem = fold_unary (code, type, op0);
14135 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14136 }
14137
14138 /* If EXP represents referencing an element in a constant string
14139 (either via pointer arithmetic or array indexing), return the
14140 tree representing the value accessed, otherwise return NULL. */
14141
14142 tree
14143 fold_read_from_constant_string (tree exp)
14144 {
14145 if ((TREE_CODE (exp) == INDIRECT_REF
14146 || TREE_CODE (exp) == ARRAY_REF)
14147 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14148 {
14149 tree exp1 = TREE_OPERAND (exp, 0);
14150 tree index;
14151 tree string;
14152 location_t loc = EXPR_LOCATION (exp);
14153
14154 if (TREE_CODE (exp) == INDIRECT_REF)
14155 string = string_constant (exp1, &index, NULL, NULL);
14156 else
14157 {
14158 tree low_bound = array_ref_low_bound (exp);
14159 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
14160
14161 /* Optimize the special-case of a zero lower bound.
14162
14163 We convert the low_bound to sizetype to avoid some problems
14164 with constant folding. (E.g. suppose the lower bound is 1,
14165 and its mode is QI. Without the conversion,l (ARRAY
14166 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14167 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14168 if (! integer_zerop (low_bound))
14169 index = size_diffop_loc (loc, index,
14170 fold_convert_loc (loc, sizetype, low_bound));
14171
14172 string = exp1;
14173 }
14174
14175 scalar_int_mode char_mode;
14176 if (string
14177 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14178 && TREE_CODE (string) == STRING_CST
14179 && TREE_CODE (index) == INTEGER_CST
14180 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14181 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
14182 &char_mode)
14183 && GET_MODE_SIZE (char_mode) == 1)
14184 return build_int_cst_type (TREE_TYPE (exp),
14185 (TREE_STRING_POINTER (string)
14186 [TREE_INT_CST_LOW (index)]));
14187 }
14188 return NULL;
14189 }
14190
14191 /* Folds a read from vector element at IDX of vector ARG. */
14192
14193 tree
14194 fold_read_from_vector (tree arg, poly_uint64 idx)
14195 {
14196 unsigned HOST_WIDE_INT i;
14197 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
14198 && known_ge (idx, 0u)
14199 && idx.is_constant (&i))
14200 {
14201 if (TREE_CODE (arg) == VECTOR_CST)
14202 return VECTOR_CST_ELT (arg, i);
14203 else if (TREE_CODE (arg) == CONSTRUCTOR)
14204 {
14205 if (i >= CONSTRUCTOR_NELTS (arg))
14206 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
14207 return CONSTRUCTOR_ELT (arg, i)->value;
14208 }
14209 }
14210 return NULL_TREE;
14211 }
14212
14213 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14214 an integer constant, real, or fixed-point constant.
14215
14216 TYPE is the type of the result. */
14217
14218 static tree
14219 fold_negate_const (tree arg0, tree type)
14220 {
14221 tree t = NULL_TREE;
14222
14223 switch (TREE_CODE (arg0))
14224 {
14225 case REAL_CST:
14226 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14227 break;
14228
14229 case FIXED_CST:
14230 {
14231 FIXED_VALUE_TYPE f;
14232 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14233 &(TREE_FIXED_CST (arg0)), NULL,
14234 TYPE_SATURATING (type));
14235 t = build_fixed (type, f);
14236 /* Propagate overflow flags. */
14237 if (overflow_p | TREE_OVERFLOW (arg0))
14238 TREE_OVERFLOW (t) = 1;
14239 break;
14240 }
14241
14242 default:
14243 if (poly_int_tree_p (arg0))
14244 {
14245 wi::overflow_type overflow;
14246 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
14247 t = force_fit_type (type, res, 1,
14248 (overflow && ! TYPE_UNSIGNED (type))
14249 || TREE_OVERFLOW (arg0));
14250 break;
14251 }
14252
14253 gcc_unreachable ();
14254 }
14255
14256 return t;
14257 }
14258
14259 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14260 an integer constant or real constant.
14261
14262 TYPE is the type of the result. */
14263
14264 tree
14265 fold_abs_const (tree arg0, tree type)
14266 {
14267 tree t = NULL_TREE;
14268
14269 switch (TREE_CODE (arg0))
14270 {
14271 case INTEGER_CST:
14272 {
14273 /* If the value is unsigned or non-negative, then the absolute value
14274 is the same as the ordinary value. */
14275 wide_int val = wi::to_wide (arg0);
14276 wi::overflow_type overflow = wi::OVF_NONE;
14277 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
14278 ;
14279
14280 /* If the value is negative, then the absolute value is
14281 its negation. */
14282 else
14283 val = wi::neg (val, &overflow);
14284
14285 /* Force to the destination type, set TREE_OVERFLOW for signed
14286 TYPE only. */
14287 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
14288 }
14289 break;
14290
14291 case REAL_CST:
14292 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14293 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14294 else
14295 t = arg0;
14296 break;
14297
14298 default:
14299 gcc_unreachable ();
14300 }
14301
14302 return t;
14303 }
14304
14305 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14306 constant. TYPE is the type of the result. */
14307
14308 static tree
14309 fold_not_const (const_tree arg0, tree type)
14310 {
14311 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14312
14313 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
14314 }
14315
14316 /* Given CODE, a relational operator, the target type, TYPE and two
14317 constant operands OP0 and OP1, return the result of the
14318 relational operation. If the result is not a compile time
14319 constant, then return NULL_TREE. */
14320
14321 static tree
14322 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14323 {
14324 int result, invert;
14325
14326 /* From here on, the only cases we handle are when the result is
14327 known to be a constant. */
14328
14329 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14330 {
14331 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14332 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14333
14334 /* Handle the cases where either operand is a NaN. */
14335 if (real_isnan (c0) || real_isnan (c1))
14336 {
14337 switch (code)
14338 {
14339 case EQ_EXPR:
14340 case ORDERED_EXPR:
14341 result = 0;
14342 break;
14343
14344 case NE_EXPR:
14345 case UNORDERED_EXPR:
14346 case UNLT_EXPR:
14347 case UNLE_EXPR:
14348 case UNGT_EXPR:
14349 case UNGE_EXPR:
14350 case UNEQ_EXPR:
14351 result = 1;
14352 break;
14353
14354 case LT_EXPR:
14355 case LE_EXPR:
14356 case GT_EXPR:
14357 case GE_EXPR:
14358 case LTGT_EXPR:
14359 if (flag_trapping_math)
14360 return NULL_TREE;
14361 result = 0;
14362 break;
14363
14364 default:
14365 gcc_unreachable ();
14366 }
14367
14368 return constant_boolean_node (result, type);
14369 }
14370
14371 return constant_boolean_node (real_compare (code, c0, c1), type);
14372 }
14373
14374 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14375 {
14376 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14377 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14378 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14379 }
14380
14381 /* Handle equality/inequality of complex constants. */
14382 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14383 {
14384 tree rcond = fold_relational_const (code, type,
14385 TREE_REALPART (op0),
14386 TREE_REALPART (op1));
14387 tree icond = fold_relational_const (code, type,
14388 TREE_IMAGPART (op0),
14389 TREE_IMAGPART (op1));
14390 if (code == EQ_EXPR)
14391 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14392 else if (code == NE_EXPR)
14393 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14394 else
14395 return NULL_TREE;
14396 }
14397
14398 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14399 {
14400 if (!VECTOR_TYPE_P (type))
14401 {
14402 /* Have vector comparison with scalar boolean result. */
14403 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
14404 && known_eq (VECTOR_CST_NELTS (op0),
14405 VECTOR_CST_NELTS (op1)));
14406 unsigned HOST_WIDE_INT nunits;
14407 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
14408 return NULL_TREE;
14409 for (unsigned i = 0; i < nunits; i++)
14410 {
14411 tree elem0 = VECTOR_CST_ELT (op0, i);
14412 tree elem1 = VECTOR_CST_ELT (op1, i);
14413 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
14414 if (tmp == NULL_TREE)
14415 return NULL_TREE;
14416 if (integer_zerop (tmp))
14417 return constant_boolean_node (code == NE_EXPR, type);
14418 }
14419 return constant_boolean_node (code == EQ_EXPR, type);
14420 }
14421 tree_vector_builder elts;
14422 if (!elts.new_binary_operation (type, op0, op1, false))
14423 return NULL_TREE;
14424 unsigned int count = elts.encoded_nelts ();
14425 for (unsigned i = 0; i < count; i++)
14426 {
14427 tree elem_type = TREE_TYPE (type);
14428 tree elem0 = VECTOR_CST_ELT (op0, i);
14429 tree elem1 = VECTOR_CST_ELT (op1, i);
14430
14431 tree tem = fold_relational_const (code, elem_type,
14432 elem0, elem1);
14433
14434 if (tem == NULL_TREE)
14435 return NULL_TREE;
14436
14437 elts.quick_push (build_int_cst (elem_type,
14438 integer_zerop (tem) ? 0 : -1));
14439 }
14440
14441 return elts.build ();
14442 }
14443
14444 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14445
14446 To compute GT, swap the arguments and do LT.
14447 To compute GE, do LT and invert the result.
14448 To compute LE, swap the arguments, do LT and invert the result.
14449 To compute NE, do EQ and invert the result.
14450
14451 Therefore, the code below must handle only EQ and LT. */
14452
14453 if (code == LE_EXPR || code == GT_EXPR)
14454 {
14455 std::swap (op0, op1);
14456 code = swap_tree_comparison (code);
14457 }
14458
14459 /* Note that it is safe to invert for real values here because we
14460 have already handled the one case that it matters. */
14461
14462 invert = 0;
14463 if (code == NE_EXPR || code == GE_EXPR)
14464 {
14465 invert = 1;
14466 code = invert_tree_comparison (code, false);
14467 }
14468
14469 /* Compute a result for LT or EQ if args permit;
14470 Otherwise return T. */
14471 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14472 {
14473 if (code == EQ_EXPR)
14474 result = tree_int_cst_equal (op0, op1);
14475 else
14476 result = tree_int_cst_lt (op0, op1);
14477 }
14478 else
14479 return NULL_TREE;
14480
14481 if (invert)
14482 result ^= 1;
14483 return constant_boolean_node (result, type);
14484 }
14485
14486 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14487 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14488 itself. */
14489
14490 tree
14491 fold_build_cleanup_point_expr (tree type, tree expr)
14492 {
14493 /* If the expression does not have side effects then we don't have to wrap
14494 it with a cleanup point expression. */
14495 if (!TREE_SIDE_EFFECTS (expr))
14496 return expr;
14497
14498 /* If the expression is a return, check to see if the expression inside the
14499 return has no side effects or the right hand side of the modify expression
14500 inside the return. If either don't have side effects set we don't need to
14501 wrap the expression in a cleanup point expression. Note we don't check the
14502 left hand side of the modify because it should always be a return decl. */
14503 if (TREE_CODE (expr) == RETURN_EXPR)
14504 {
14505 tree op = TREE_OPERAND (expr, 0);
14506 if (!op || !TREE_SIDE_EFFECTS (op))
14507 return expr;
14508 op = TREE_OPERAND (op, 1);
14509 if (!TREE_SIDE_EFFECTS (op))
14510 return expr;
14511 }
14512
14513 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
14514 }
14515
14516 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14517 of an indirection through OP0, or NULL_TREE if no simplification is
14518 possible. */
14519
14520 tree
14521 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14522 {
14523 tree sub = op0;
14524 tree subtype;
14525 poly_uint64 const_op01;
14526
14527 STRIP_NOPS (sub);
14528 subtype = TREE_TYPE (sub);
14529 if (!POINTER_TYPE_P (subtype)
14530 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
14531 return NULL_TREE;
14532
14533 if (TREE_CODE (sub) == ADDR_EXPR)
14534 {
14535 tree op = TREE_OPERAND (sub, 0);
14536 tree optype = TREE_TYPE (op);
14537
14538 /* *&CONST_DECL -> to the value of the const decl. */
14539 if (TREE_CODE (op) == CONST_DECL)
14540 return DECL_INITIAL (op);
14541 /* *&p => p; make sure to handle *&"str"[cst] here. */
14542 if (type == optype)
14543 {
14544 tree fop = fold_read_from_constant_string (op);
14545 if (fop)
14546 return fop;
14547 else
14548 return op;
14549 }
14550 /* *(foo *)&fooarray => fooarray[0] */
14551 else if (TREE_CODE (optype) == ARRAY_TYPE
14552 && type == TREE_TYPE (optype)
14553 && (!in_gimple_form
14554 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14555 {
14556 tree type_domain = TYPE_DOMAIN (optype);
14557 tree min_val = size_zero_node;
14558 if (type_domain && TYPE_MIN_VALUE (type_domain))
14559 min_val = TYPE_MIN_VALUE (type_domain);
14560 if (in_gimple_form
14561 && TREE_CODE (min_val) != INTEGER_CST)
14562 return NULL_TREE;
14563 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14564 NULL_TREE, NULL_TREE);
14565 }
14566 /* *(foo *)&complexfoo => __real__ complexfoo */
14567 else if (TREE_CODE (optype) == COMPLEX_TYPE
14568 && type == TREE_TYPE (optype))
14569 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14570 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14571 else if (VECTOR_TYPE_P (optype)
14572 && type == TREE_TYPE (optype))
14573 {
14574 tree part_width = TYPE_SIZE (type);
14575 tree index = bitsize_int (0);
14576 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
14577 index);
14578 }
14579 }
14580
14581 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14582 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
14583 {
14584 tree op00 = TREE_OPERAND (sub, 0);
14585 tree op01 = TREE_OPERAND (sub, 1);
14586
14587 STRIP_NOPS (op00);
14588 if (TREE_CODE (op00) == ADDR_EXPR)
14589 {
14590 tree op00type;
14591 op00 = TREE_OPERAND (op00, 0);
14592 op00type = TREE_TYPE (op00);
14593
14594 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14595 if (VECTOR_TYPE_P (op00type)
14596 && type == TREE_TYPE (op00type)
14597 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
14598 but we want to treat offsets with MSB set as negative.
14599 For the code below negative offsets are invalid and
14600 TYPE_SIZE of the element is something unsigned, so
14601 check whether op01 fits into poly_int64, which implies
14602 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
14603 then just use poly_uint64 because we want to treat the
14604 value as unsigned. */
14605 && tree_fits_poly_int64_p (op01))
14606 {
14607 tree part_width = TYPE_SIZE (type);
14608 poly_uint64 max_offset
14609 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
14610 * TYPE_VECTOR_SUBPARTS (op00type));
14611 if (known_lt (const_op01, max_offset))
14612 {
14613 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
14614 return fold_build3_loc (loc,
14615 BIT_FIELD_REF, type, op00,
14616 part_width, index);
14617 }
14618 }
14619 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14620 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14621 && type == TREE_TYPE (op00type))
14622 {
14623 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
14624 const_op01))
14625 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14626 }
14627 /* ((foo *)&fooarray)[1] => fooarray[1] */
14628 else if (TREE_CODE (op00type) == ARRAY_TYPE
14629 && type == TREE_TYPE (op00type))
14630 {
14631 tree type_domain = TYPE_DOMAIN (op00type);
14632 tree min_val = size_zero_node;
14633 if (type_domain && TYPE_MIN_VALUE (type_domain))
14634 min_val = TYPE_MIN_VALUE (type_domain);
14635 poly_uint64 type_size, index;
14636 if (poly_int_tree_p (min_val)
14637 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
14638 && multiple_p (const_op01, type_size, &index))
14639 {
14640 poly_offset_int off = index + wi::to_poly_offset (min_val);
14641 op01 = wide_int_to_tree (sizetype, off);
14642 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14643 NULL_TREE, NULL_TREE);
14644 }
14645 }
14646 }
14647 }
14648
14649 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14650 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14651 && type == TREE_TYPE (TREE_TYPE (subtype))
14652 && (!in_gimple_form
14653 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14654 {
14655 tree type_domain;
14656 tree min_val = size_zero_node;
14657 sub = build_fold_indirect_ref_loc (loc, sub);
14658 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14659 if (type_domain && TYPE_MIN_VALUE (type_domain))
14660 min_val = TYPE_MIN_VALUE (type_domain);
14661 if (in_gimple_form
14662 && TREE_CODE (min_val) != INTEGER_CST)
14663 return NULL_TREE;
14664 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14665 NULL_TREE);
14666 }
14667
14668 return NULL_TREE;
14669 }
14670
14671 /* Builds an expression for an indirection through T, simplifying some
14672 cases. */
14673
14674 tree
14675 build_fold_indirect_ref_loc (location_t loc, tree t)
14676 {
14677 tree type = TREE_TYPE (TREE_TYPE (t));
14678 tree sub = fold_indirect_ref_1 (loc, type, t);
14679
14680 if (sub)
14681 return sub;
14682
14683 return build1_loc (loc, INDIRECT_REF, type, t);
14684 }
14685
14686 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14687
14688 tree
14689 fold_indirect_ref_loc (location_t loc, tree t)
14690 {
14691 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14692
14693 if (sub)
14694 return sub;
14695 else
14696 return t;
14697 }
14698
14699 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14700 whose result is ignored. The type of the returned tree need not be
14701 the same as the original expression. */
14702
14703 tree
14704 fold_ignored_result (tree t)
14705 {
14706 if (!TREE_SIDE_EFFECTS (t))
14707 return integer_zero_node;
14708
14709 for (;;)
14710 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14711 {
14712 case tcc_unary:
14713 t = TREE_OPERAND (t, 0);
14714 break;
14715
14716 case tcc_binary:
14717 case tcc_comparison:
14718 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14719 t = TREE_OPERAND (t, 0);
14720 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14721 t = TREE_OPERAND (t, 1);
14722 else
14723 return t;
14724 break;
14725
14726 case tcc_expression:
14727 switch (TREE_CODE (t))
14728 {
14729 case COMPOUND_EXPR:
14730 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14731 return t;
14732 t = TREE_OPERAND (t, 0);
14733 break;
14734
14735 case COND_EXPR:
14736 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14737 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14738 return t;
14739 t = TREE_OPERAND (t, 0);
14740 break;
14741
14742 default:
14743 return t;
14744 }
14745 break;
14746
14747 default:
14748 return t;
14749 }
14750 }
14751
14752 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14753
14754 tree
14755 round_up_loc (location_t loc, tree value, unsigned int divisor)
14756 {
14757 tree div = NULL_TREE;
14758
14759 if (divisor == 1)
14760 return value;
14761
14762 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14763 have to do anything. Only do this when we are not given a const,
14764 because in that case, this check is more expensive than just
14765 doing it. */
14766 if (TREE_CODE (value) != INTEGER_CST)
14767 {
14768 div = build_int_cst (TREE_TYPE (value), divisor);
14769
14770 if (multiple_of_p (TREE_TYPE (value), value, div))
14771 return value;
14772 }
14773
14774 /* If divisor is a power of two, simplify this to bit manipulation. */
14775 if (pow2_or_zerop (divisor))
14776 {
14777 if (TREE_CODE (value) == INTEGER_CST)
14778 {
14779 wide_int val = wi::to_wide (value);
14780 bool overflow_p;
14781
14782 if ((val & (divisor - 1)) == 0)
14783 return value;
14784
14785 overflow_p = TREE_OVERFLOW (value);
14786 val += divisor - 1;
14787 val &= (int) -divisor;
14788 if (val == 0)
14789 overflow_p = true;
14790
14791 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14792 }
14793 else
14794 {
14795 tree t;
14796
14797 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14798 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14799 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14800 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14801 }
14802 }
14803 else
14804 {
14805 if (!div)
14806 div = build_int_cst (TREE_TYPE (value), divisor);
14807 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14808 value = size_binop_loc (loc, MULT_EXPR, value, div);
14809 }
14810
14811 return value;
14812 }
14813
14814 /* Likewise, but round down. */
14815
14816 tree
14817 round_down_loc (location_t loc, tree value, int divisor)
14818 {
14819 tree div = NULL_TREE;
14820
14821 gcc_assert (divisor > 0);
14822 if (divisor == 1)
14823 return value;
14824
14825 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14826 have to do anything. Only do this when we are not given a const,
14827 because in that case, this check is more expensive than just
14828 doing it. */
14829 if (TREE_CODE (value) != INTEGER_CST)
14830 {
14831 div = build_int_cst (TREE_TYPE (value), divisor);
14832
14833 if (multiple_of_p (TREE_TYPE (value), value, div))
14834 return value;
14835 }
14836
14837 /* If divisor is a power of two, simplify this to bit manipulation. */
14838 if (pow2_or_zerop (divisor))
14839 {
14840 tree t;
14841
14842 t = build_int_cst (TREE_TYPE (value), -divisor);
14843 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14844 }
14845 else
14846 {
14847 if (!div)
14848 div = build_int_cst (TREE_TYPE (value), divisor);
14849 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14850 value = size_binop_loc (loc, MULT_EXPR, value, div);
14851 }
14852
14853 return value;
14854 }
14855
14856 /* Returns the pointer to the base of the object addressed by EXP and
14857 extracts the information about the offset of the access, storing it
14858 to PBITPOS and POFFSET. */
14859
14860 static tree
14861 split_address_to_core_and_offset (tree exp,
14862 poly_int64_pod *pbitpos, tree *poffset)
14863 {
14864 tree core;
14865 machine_mode mode;
14866 int unsignedp, reversep, volatilep;
14867 poly_int64 bitsize;
14868 location_t loc = EXPR_LOCATION (exp);
14869
14870 if (TREE_CODE (exp) == ADDR_EXPR)
14871 {
14872 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14873 poffset, &mode, &unsignedp, &reversep,
14874 &volatilep);
14875 core = build_fold_addr_expr_loc (loc, core);
14876 }
14877 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
14878 {
14879 core = TREE_OPERAND (exp, 0);
14880 STRIP_NOPS (core);
14881 *pbitpos = 0;
14882 *poffset = TREE_OPERAND (exp, 1);
14883 if (poly_int_tree_p (*poffset))
14884 {
14885 poly_offset_int tem
14886 = wi::sext (wi::to_poly_offset (*poffset),
14887 TYPE_PRECISION (TREE_TYPE (*poffset)));
14888 tem <<= LOG2_BITS_PER_UNIT;
14889 if (tem.to_shwi (pbitpos))
14890 *poffset = NULL_TREE;
14891 }
14892 }
14893 else
14894 {
14895 core = exp;
14896 *pbitpos = 0;
14897 *poffset = NULL_TREE;
14898 }
14899
14900 return core;
14901 }
14902
14903 /* Returns true if addresses of E1 and E2 differ by a constant, false
14904 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14905
14906 bool
14907 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
14908 {
14909 tree core1, core2;
14910 poly_int64 bitpos1, bitpos2;
14911 tree toffset1, toffset2, tdiff, type;
14912
14913 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14914 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14915
14916 poly_int64 bytepos1, bytepos2;
14917 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
14918 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
14919 || !operand_equal_p (core1, core2, 0))
14920 return false;
14921
14922 if (toffset1 && toffset2)
14923 {
14924 type = TREE_TYPE (toffset1);
14925 if (type != TREE_TYPE (toffset2))
14926 toffset2 = fold_convert (type, toffset2);
14927
14928 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14929 if (!cst_and_fits_in_hwi (tdiff))
14930 return false;
14931
14932 *diff = int_cst_value (tdiff);
14933 }
14934 else if (toffset1 || toffset2)
14935 {
14936 /* If only one of the offsets is non-constant, the difference cannot
14937 be a constant. */
14938 return false;
14939 }
14940 else
14941 *diff = 0;
14942
14943 *diff += bytepos1 - bytepos2;
14944 return true;
14945 }
14946
14947 /* Return OFF converted to a pointer offset type suitable as offset for
14948 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14949 tree
14950 convert_to_ptrofftype_loc (location_t loc, tree off)
14951 {
14952 return fold_convert_loc (loc, sizetype, off);
14953 }
14954
14955 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14956 tree
14957 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14958 {
14959 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14960 ptr, convert_to_ptrofftype_loc (loc, off));
14961 }
14962
14963 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14964 tree
14965 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14966 {
14967 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14968 ptr, size_int (off));
14969 }
14970
14971 /* Return a pointer P to a NUL-terminated string representing the sequence
14972 of constant characters referred to by SRC (or a subsequence of such
14973 characters within it if SRC is a reference to a string plus some
14974 constant offset). If STRLEN is non-null, store the number of bytes
14975 in the string constant including the terminating NUL char. *STRLEN is
14976 typically strlen(P) + 1 in the absence of embedded NUL characters. */
14977
14978 const char *
14979 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen /* = NULL */)
14980 {
14981 tree offset_node;
14982 tree mem_size;
14983
14984 if (strlen)
14985 *strlen = 0;
14986
14987 src = string_constant (src, &offset_node, &mem_size, NULL);
14988 if (src == 0)
14989 return NULL;
14990
14991 unsigned HOST_WIDE_INT offset = 0;
14992 if (offset_node != NULL_TREE)
14993 {
14994 if (!tree_fits_uhwi_p (offset_node))
14995 return NULL;
14996 else
14997 offset = tree_to_uhwi (offset_node);
14998 }
14999
15000 if (!tree_fits_uhwi_p (mem_size))
15001 return NULL;
15002
15003 /* STRING_LENGTH is the size of the string literal, including any
15004 embedded NULs. STRING_SIZE is the size of the array the string
15005 literal is stored in. */
15006 unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
15007 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size);
15008
15009 /* Ideally this would turn into a gcc_checking_assert over time. */
15010 if (string_length > string_size)
15011 string_length = string_size;
15012
15013 const char *string = TREE_STRING_POINTER (src);
15014
15015 /* Ideally this would turn into a gcc_checking_assert over time. */
15016 if (string_length > string_size)
15017 string_length = string_size;
15018
15019 if (string_length == 0
15020 || offset >= string_size)
15021 return NULL;
15022
15023 if (strlen)
15024 {
15025 /* Compute and store the length of the substring at OFFSET.
15026 All offsets past the initial length refer to null strings. */
15027 if (offset < string_length)
15028 *strlen = string_length - offset;
15029 else
15030 *strlen = 1;
15031 }
15032 else
15033 {
15034 tree eltype = TREE_TYPE (TREE_TYPE (src));
15035 /* Support only properly NUL-terminated single byte strings. */
15036 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
15037 return NULL;
15038 if (string[string_length - 1] != '\0')
15039 return NULL;
15040 }
15041
15042 return offset < string_length ? string + offset : "";
15043 }
15044
15045 /* Given a tree T, compute which bits in T may be nonzero. */
15046
15047 wide_int
15048 tree_nonzero_bits (const_tree t)
15049 {
15050 switch (TREE_CODE (t))
15051 {
15052 case INTEGER_CST:
15053 return wi::to_wide (t);
15054 case SSA_NAME:
15055 return get_nonzero_bits (t);
15056 case NON_LVALUE_EXPR:
15057 case SAVE_EXPR:
15058 return tree_nonzero_bits (TREE_OPERAND (t, 0));
15059 case BIT_AND_EXPR:
15060 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15061 tree_nonzero_bits (TREE_OPERAND (t, 1)));
15062 case BIT_IOR_EXPR:
15063 case BIT_XOR_EXPR:
15064 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15065 tree_nonzero_bits (TREE_OPERAND (t, 1)));
15066 case COND_EXPR:
15067 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
15068 tree_nonzero_bits (TREE_OPERAND (t, 2)));
15069 CASE_CONVERT:
15070 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15071 TYPE_PRECISION (TREE_TYPE (t)),
15072 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
15073 case PLUS_EXPR:
15074 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
15075 {
15076 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
15077 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
15078 if (wi::bit_and (nzbits1, nzbits2) == 0)
15079 return wi::bit_or (nzbits1, nzbits2);
15080 }
15081 break;
15082 case LSHIFT_EXPR:
15083 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
15084 {
15085 tree type = TREE_TYPE (t);
15086 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
15087 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
15088 TYPE_PRECISION (type));
15089 return wi::neg_p (arg1)
15090 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
15091 : wi::lshift (nzbits, arg1);
15092 }
15093 break;
15094 case RSHIFT_EXPR:
15095 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
15096 {
15097 tree type = TREE_TYPE (t);
15098 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
15099 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
15100 TYPE_PRECISION (type));
15101 return wi::neg_p (arg1)
15102 ? wi::lshift (nzbits, -arg1)
15103 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
15104 }
15105 break;
15106 default:
15107 break;
15108 }
15109
15110 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
15111 }
15112
15113 #if CHECKING_P
15114
15115 namespace selftest {
15116
15117 /* Helper functions for writing tests of folding trees. */
15118
15119 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
15120
15121 static void
15122 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
15123 tree constant)
15124 {
15125 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
15126 }
15127
15128 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
15129 wrapping WRAPPED_EXPR. */
15130
15131 static void
15132 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
15133 tree wrapped_expr)
15134 {
15135 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
15136 ASSERT_NE (wrapped_expr, result);
15137 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
15138 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
15139 }
15140
15141 /* Verify that various arithmetic binary operations are folded
15142 correctly. */
15143
15144 static void
15145 test_arithmetic_folding ()
15146 {
15147 tree type = integer_type_node;
15148 tree x = create_tmp_var_raw (type, "x");
15149 tree zero = build_zero_cst (type);
15150 tree one = build_int_cst (type, 1);
15151
15152 /* Addition. */
15153 /* 1 <-- (0 + 1) */
15154 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
15155 one);
15156 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
15157 one);
15158
15159 /* (nonlvalue)x <-- (x + 0) */
15160 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
15161 x);
15162
15163 /* Subtraction. */
15164 /* 0 <-- (x - x) */
15165 assert_binop_folds_to_const (x, MINUS_EXPR, x,
15166 zero);
15167 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
15168 x);
15169
15170 /* Multiplication. */
15171 /* 0 <-- (x * 0) */
15172 assert_binop_folds_to_const (x, MULT_EXPR, zero,
15173 zero);
15174
15175 /* (nonlvalue)x <-- (x * 1) */
15176 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
15177 x);
15178 }
15179
15180 /* Verify that various binary operations on vectors are folded
15181 correctly. */
15182
15183 static void
15184 test_vector_folding ()
15185 {
15186 tree inner_type = integer_type_node;
15187 tree type = build_vector_type (inner_type, 4);
15188 tree zero = build_zero_cst (type);
15189 tree one = build_one_cst (type);
15190 tree index = build_index_vector (type, 0, 1);
15191
15192 /* Verify equality tests that return a scalar boolean result. */
15193 tree res_type = boolean_type_node;
15194 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
15195 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
15196 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
15197 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
15198 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
15199 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
15200 index, one)));
15201 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
15202 index, index)));
15203 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
15204 index, index)));
15205 }
15206
15207 /* Verify folding of VEC_DUPLICATE_EXPRs. */
15208
15209 static void
15210 test_vec_duplicate_folding ()
15211 {
15212 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
15213 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
15214 /* This will be 1 if VEC_MODE isn't a vector mode. */
15215 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
15216
15217 tree type = build_vector_type (ssizetype, nunits);
15218 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
15219 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
15220 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
15221 }
15222
15223 /* Run all of the selftests within this file. */
15224
15225 void
15226 fold_const_c_tests ()
15227 {
15228 test_arithmetic_folding ();
15229 test_vector_folding ();
15230 test_vec_duplicate_folding ();
15231 }
15232
15233 } // namespace selftest
15234
15235 #endif /* CHECKING_P */