]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/fold-const.c
Check for bitwise identity when encoding VECTOR_CSTs (PR 92768)
[thirdparty/gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-vrp.h"
79 #include "tree-ssanames.h"
80 #include "selftest.h"
81 #include "stringpool.h"
82 #include "attribs.h"
83 #include "tree-vector-builder.h"
84 #include "vec-perm-indices.h"
85
86 /* Nonzero if we are folding constants inside an initializer; zero
87 otherwise. */
88 int folding_initializer = 0;
89
90 /* The following constants represent a bit based encoding of GCC's
91 comparison operators. This encoding simplifies transformations
92 on relational comparison operators, such as AND and OR. */
93 enum comparison_code {
94 COMPCODE_FALSE = 0,
95 COMPCODE_LT = 1,
96 COMPCODE_EQ = 2,
97 COMPCODE_LE = 3,
98 COMPCODE_GT = 4,
99 COMPCODE_LTGT = 5,
100 COMPCODE_GE = 6,
101 COMPCODE_ORD = 7,
102 COMPCODE_UNORD = 8,
103 COMPCODE_UNLT = 9,
104 COMPCODE_UNEQ = 10,
105 COMPCODE_UNLE = 11,
106 COMPCODE_UNGT = 12,
107 COMPCODE_NE = 13,
108 COMPCODE_UNGE = 14,
109 COMPCODE_TRUE = 15
110 };
111
112 static bool negate_expr_p (tree);
113 static tree negate_expr (tree);
114 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static bool twoval_comparison_p (tree, tree *, tree *);
118 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static bool simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
130 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
131 static tree fold_binary_op_with_conditional_arg (location_t,
132 enum tree_code, tree,
133 tree, tree,
134 tree, tree, int);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (const_tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_convert_const (enum tree_code, tree, tree);
139 static tree fold_view_convert_expr (tree, tree);
140 static tree fold_negate_expr (location_t, tree);
141
142
143 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
144 Otherwise, return LOC. */
145
146 static location_t
147 expr_location_or (tree t, location_t loc)
148 {
149 location_t tloc = EXPR_LOCATION (t);
150 return tloc == UNKNOWN_LOCATION ? loc : tloc;
151 }
152
153 /* Similar to protected_set_expr_location, but never modify x in place,
154 if location can and needs to be set, unshare it. */
155
156 static inline tree
157 protected_set_expr_location_unshare (tree x, location_t loc)
158 {
159 if (CAN_HAVE_LOCATION_P (x)
160 && EXPR_LOCATION (x) != loc
161 && !(TREE_CODE (x) == SAVE_EXPR
162 || TREE_CODE (x) == TARGET_EXPR
163 || TREE_CODE (x) == BIND_EXPR))
164 {
165 x = copy_node (x);
166 SET_EXPR_LOCATION (x, loc);
167 }
168 return x;
169 }
170 \f
171 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
172 division and returns the quotient. Otherwise returns
173 NULL_TREE. */
174
175 tree
176 div_if_zero_remainder (const_tree arg1, const_tree arg2)
177 {
178 widest_int quo;
179
180 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
181 SIGNED, &quo))
182 return wide_int_to_tree (TREE_TYPE (arg1), quo);
183
184 return NULL_TREE;
185 }
186 \f
187 /* This is nonzero if we should defer warnings about undefined
188 overflow. This facility exists because these warnings are a
189 special case. The code to estimate loop iterations does not want
190 to issue any warnings, since it works with expressions which do not
191 occur in user code. Various bits of cleanup code call fold(), but
192 only use the result if it has certain characteristics (e.g., is a
193 constant); that code only wants to issue a warning if the result is
194 used. */
195
196 static int fold_deferring_overflow_warnings;
197
198 /* If a warning about undefined overflow is deferred, this is the
199 warning. Note that this may cause us to turn two warnings into
200 one, but that is fine since it is sufficient to only give one
201 warning per expression. */
202
203 static const char* fold_deferred_overflow_warning;
204
205 /* If a warning about undefined overflow is deferred, this is the
206 level at which the warning should be emitted. */
207
208 static enum warn_strict_overflow_code fold_deferred_overflow_code;
209
210 /* Start deferring overflow warnings. We could use a stack here to
211 permit nested calls, but at present it is not necessary. */
212
213 void
214 fold_defer_overflow_warnings (void)
215 {
216 ++fold_deferring_overflow_warnings;
217 }
218
219 /* Stop deferring overflow warnings. If there is a pending warning,
220 and ISSUE is true, then issue the warning if appropriate. STMT is
221 the statement with which the warning should be associated (used for
222 location information); STMT may be NULL. CODE is the level of the
223 warning--a warn_strict_overflow_code value. This function will use
224 the smaller of CODE and the deferred code when deciding whether to
225 issue the warning. CODE may be zero to mean to always use the
226 deferred code. */
227
228 void
229 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
230 {
231 const char *warnmsg;
232 location_t locus;
233
234 gcc_assert (fold_deferring_overflow_warnings > 0);
235 --fold_deferring_overflow_warnings;
236 if (fold_deferring_overflow_warnings > 0)
237 {
238 if (fold_deferred_overflow_warning != NULL
239 && code != 0
240 && code < (int) fold_deferred_overflow_code)
241 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
242 return;
243 }
244
245 warnmsg = fold_deferred_overflow_warning;
246 fold_deferred_overflow_warning = NULL;
247
248 if (!issue || warnmsg == NULL)
249 return;
250
251 if (gimple_no_warning_p (stmt))
252 return;
253
254 /* Use the smallest code level when deciding to issue the
255 warning. */
256 if (code == 0 || code > (int) fold_deferred_overflow_code)
257 code = fold_deferred_overflow_code;
258
259 if (!issue_strict_overflow_warning (code))
260 return;
261
262 if (stmt == NULL)
263 locus = input_location;
264 else
265 locus = gimple_location (stmt);
266 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
267 }
268
269 /* Stop deferring overflow warnings, ignoring any deferred
270 warnings. */
271
272 void
273 fold_undefer_and_ignore_overflow_warnings (void)
274 {
275 fold_undefer_overflow_warnings (false, NULL, 0);
276 }
277
278 /* Whether we are deferring overflow warnings. */
279
280 bool
281 fold_deferring_overflow_warnings_p (void)
282 {
283 return fold_deferring_overflow_warnings > 0;
284 }
285
286 /* This is called when we fold something based on the fact that signed
287 overflow is undefined. */
288
289 void
290 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
291 {
292 if (fold_deferring_overflow_warnings > 0)
293 {
294 if (fold_deferred_overflow_warning == NULL
295 || wc < fold_deferred_overflow_code)
296 {
297 fold_deferred_overflow_warning = gmsgid;
298 fold_deferred_overflow_code = wc;
299 }
300 }
301 else if (issue_strict_overflow_warning (wc))
302 warning (OPT_Wstrict_overflow, gmsgid);
303 }
304 \f
305 /* Return true if the built-in mathematical function specified by CODE
306 is odd, i.e. -f(x) == f(-x). */
307
308 bool
309 negate_mathfn_p (combined_fn fn)
310 {
311 switch (fn)
312 {
313 CASE_CFN_ASIN:
314 CASE_CFN_ASINH:
315 CASE_CFN_ATAN:
316 CASE_CFN_ATANH:
317 CASE_CFN_CASIN:
318 CASE_CFN_CASINH:
319 CASE_CFN_CATAN:
320 CASE_CFN_CATANH:
321 CASE_CFN_CBRT:
322 CASE_CFN_CPROJ:
323 CASE_CFN_CSIN:
324 CASE_CFN_CSINH:
325 CASE_CFN_CTAN:
326 CASE_CFN_CTANH:
327 CASE_CFN_ERF:
328 CASE_CFN_LLROUND:
329 CASE_CFN_LROUND:
330 CASE_CFN_ROUND:
331 CASE_CFN_ROUNDEVEN:
332 CASE_CFN_ROUNDEVEN_FN:
333 CASE_CFN_SIN:
334 CASE_CFN_SINH:
335 CASE_CFN_TAN:
336 CASE_CFN_TANH:
337 CASE_CFN_TRUNC:
338 return true;
339
340 CASE_CFN_LLRINT:
341 CASE_CFN_LRINT:
342 CASE_CFN_NEARBYINT:
343 CASE_CFN_RINT:
344 return !flag_rounding_math;
345
346 default:
347 break;
348 }
349 return false;
350 }
351
352 /* Check whether we may negate an integer constant T without causing
353 overflow. */
354
355 bool
356 may_negate_without_overflow_p (const_tree t)
357 {
358 tree type;
359
360 gcc_assert (TREE_CODE (t) == INTEGER_CST);
361
362 type = TREE_TYPE (t);
363 if (TYPE_UNSIGNED (type))
364 return false;
365
366 return !wi::only_sign_bit_p (wi::to_wide (t));
367 }
368
369 /* Determine whether an expression T can be cheaply negated using
370 the function negate_expr without introducing undefined overflow. */
371
372 static bool
373 negate_expr_p (tree t)
374 {
375 tree type;
376
377 if (t == 0)
378 return false;
379
380 type = TREE_TYPE (t);
381
382 STRIP_SIGN_NOPS (t);
383 switch (TREE_CODE (t))
384 {
385 case INTEGER_CST:
386 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
387 return true;
388
389 /* Check that -CST will not overflow type. */
390 return may_negate_without_overflow_p (t);
391 case BIT_NOT_EXPR:
392 return (INTEGRAL_TYPE_P (type)
393 && TYPE_OVERFLOW_WRAPS (type));
394
395 case FIXED_CST:
396 return true;
397
398 case NEGATE_EXPR:
399 return !TYPE_OVERFLOW_SANITIZED (type);
400
401 case REAL_CST:
402 /* We want to canonicalize to positive real constants. Pretend
403 that only negative ones can be easily negated. */
404 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
405
406 case COMPLEX_CST:
407 return negate_expr_p (TREE_REALPART (t))
408 && negate_expr_p (TREE_IMAGPART (t));
409
410 case VECTOR_CST:
411 {
412 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
413 return true;
414
415 /* Steps don't prevent negation. */
416 unsigned int count = vector_cst_encoded_nelts (t);
417 for (unsigned int i = 0; i < count; ++i)
418 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
419 return false;
420
421 return true;
422 }
423
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
427
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
430
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
433 || HONOR_SIGNED_ZEROS (element_mode (type))
434 || (ANY_INTEGRAL_TYPE_P (type)
435 && ! TYPE_OVERFLOW_WRAPS (type)))
436 return false;
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
442
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
446 && !HONOR_SIGNED_ZEROS (element_mode (type))
447 && (! ANY_INTEGRAL_TYPE_P (type)
448 || TYPE_OVERFLOW_WRAPS (type));
449
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (type))
452 break;
453 /* INT_MIN/n * n doesn't overflow while negating one operand it does
454 if n is a (negative) power of two. */
455 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
456 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
457 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
458 && (wi::popcount
459 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
460 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
461 && (wi::popcount
462 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
463 break;
464
465 /* Fall through. */
466
467 case RDIV_EXPR:
468 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
469 return negate_expr_p (TREE_OPERAND (t, 1))
470 || negate_expr_p (TREE_OPERAND (t, 0));
471 break;
472
473 case TRUNC_DIV_EXPR:
474 case ROUND_DIV_EXPR:
475 case EXACT_DIV_EXPR:
476 if (TYPE_UNSIGNED (type))
477 break;
478 /* In general we can't negate A in A / B, because if A is INT_MIN and
479 B is not 1 we change the sign of the result. */
480 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
481 && negate_expr_p (TREE_OPERAND (t, 0)))
482 return true;
483 /* In general we can't negate B in A / B, because if A is INT_MIN and
484 B is 1, we may turn this into INT_MIN / -1 which is undefined
485 and actually traps on some architectures. */
486 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
487 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
488 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
489 && ! integer_onep (TREE_OPERAND (t, 1))))
490 return negate_expr_p (TREE_OPERAND (t, 1));
491 break;
492
493 case NOP_EXPR:
494 /* Negate -((double)float) as (double)(-float). */
495 if (TREE_CODE (type) == REAL_TYPE)
496 {
497 tree tem = strip_float_extensions (t);
498 if (tem != t)
499 return negate_expr_p (tem);
500 }
501 break;
502
503 case CALL_EXPR:
504 /* Negate -f(x) as f(-x). */
505 if (negate_mathfn_p (get_call_combined_fn (t)))
506 return negate_expr_p (CALL_EXPR_ARG (t, 0));
507 break;
508
509 case RSHIFT_EXPR:
510 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
511 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
512 {
513 tree op1 = TREE_OPERAND (t, 1);
514 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
515 return true;
516 }
517 break;
518
519 default:
520 break;
521 }
522 return false;
523 }
524
525 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
526 simplification is possible.
527 If negate_expr_p would return true for T, NULL_TREE will never be
528 returned. */
529
530 static tree
531 fold_negate_expr_1 (location_t loc, tree t)
532 {
533 tree type = TREE_TYPE (t);
534 tree tem;
535
536 switch (TREE_CODE (t))
537 {
538 /* Convert - (~A) to A + 1. */
539 case BIT_NOT_EXPR:
540 if (INTEGRAL_TYPE_P (type))
541 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
542 build_one_cst (type));
543 break;
544
545 case INTEGER_CST:
546 tem = fold_negate_const (t, type);
547 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
548 || (ANY_INTEGRAL_TYPE_P (type)
549 && !TYPE_OVERFLOW_TRAPS (type)
550 && TYPE_OVERFLOW_WRAPS (type))
551 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
552 return tem;
553 break;
554
555 case POLY_INT_CST:
556 case REAL_CST:
557 case FIXED_CST:
558 tem = fold_negate_const (t, type);
559 return tem;
560
561 case COMPLEX_CST:
562 {
563 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
564 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
565 if (rpart && ipart)
566 return build_complex (type, rpart, ipart);
567 }
568 break;
569
570 case VECTOR_CST:
571 {
572 tree_vector_builder elts;
573 elts.new_unary_operation (type, t, true);
574 unsigned int count = elts.encoded_nelts ();
575 for (unsigned int i = 0; i < count; ++i)
576 {
577 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
578 if (elt == NULL_TREE)
579 return NULL_TREE;
580 elts.quick_push (elt);
581 }
582
583 return elts.build ();
584 }
585
586 case COMPLEX_EXPR:
587 if (negate_expr_p (t))
588 return fold_build2_loc (loc, COMPLEX_EXPR, type,
589 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
590 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
591 break;
592
593 case CONJ_EXPR:
594 if (negate_expr_p (t))
595 return fold_build1_loc (loc, CONJ_EXPR, type,
596 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
597 break;
598
599 case NEGATE_EXPR:
600 if (!TYPE_OVERFLOW_SANITIZED (type))
601 return TREE_OPERAND (t, 0);
602 break;
603
604 case PLUS_EXPR:
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
606 && !HONOR_SIGNED_ZEROS (element_mode (type)))
607 {
608 /* -(A + B) -> (-B) - A. */
609 if (negate_expr_p (TREE_OPERAND (t, 1)))
610 {
611 tem = negate_expr (TREE_OPERAND (t, 1));
612 return fold_build2_loc (loc, MINUS_EXPR, type,
613 tem, TREE_OPERAND (t, 0));
614 }
615
616 /* -(A + B) -> (-A) - B. */
617 if (negate_expr_p (TREE_OPERAND (t, 0)))
618 {
619 tem = negate_expr (TREE_OPERAND (t, 0));
620 return fold_build2_loc (loc, MINUS_EXPR, type,
621 tem, TREE_OPERAND (t, 1));
622 }
623 }
624 break;
625
626 case MINUS_EXPR:
627 /* - (A - B) -> B - A */
628 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
629 && !HONOR_SIGNED_ZEROS (element_mode (type)))
630 return fold_build2_loc (loc, MINUS_EXPR, type,
631 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
632 break;
633
634 case MULT_EXPR:
635 if (TYPE_UNSIGNED (type))
636 break;
637
638 /* Fall through. */
639
640 case RDIV_EXPR:
641 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
642 {
643 tem = TREE_OPERAND (t, 1);
644 if (negate_expr_p (tem))
645 return fold_build2_loc (loc, TREE_CODE (t), type,
646 TREE_OPERAND (t, 0), negate_expr (tem));
647 tem = TREE_OPERAND (t, 0);
648 if (negate_expr_p (tem))
649 return fold_build2_loc (loc, TREE_CODE (t), type,
650 negate_expr (tem), TREE_OPERAND (t, 1));
651 }
652 break;
653
654 case TRUNC_DIV_EXPR:
655 case ROUND_DIV_EXPR:
656 case EXACT_DIV_EXPR:
657 if (TYPE_UNSIGNED (type))
658 break;
659 /* In general we can't negate A in A / B, because if A is INT_MIN and
660 B is not 1 we change the sign of the result. */
661 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
662 && negate_expr_p (TREE_OPERAND (t, 0)))
663 return fold_build2_loc (loc, TREE_CODE (t), type,
664 negate_expr (TREE_OPERAND (t, 0)),
665 TREE_OPERAND (t, 1));
666 /* In general we can't negate B in A / B, because if A is INT_MIN and
667 B is 1, we may turn this into INT_MIN / -1 which is undefined
668 and actually traps on some architectures. */
669 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
670 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
671 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
672 && ! integer_onep (TREE_OPERAND (t, 1))))
673 && negate_expr_p (TREE_OPERAND (t, 1)))
674 return fold_build2_loc (loc, TREE_CODE (t), type,
675 TREE_OPERAND (t, 0),
676 negate_expr (TREE_OPERAND (t, 1)));
677 break;
678
679 case NOP_EXPR:
680 /* Convert -((double)float) into (double)(-float). */
681 if (TREE_CODE (type) == REAL_TYPE)
682 {
683 tem = strip_float_extensions (t);
684 if (tem != t && negate_expr_p (tem))
685 return fold_convert_loc (loc, type, negate_expr (tem));
686 }
687 break;
688
689 case CALL_EXPR:
690 /* Negate -f(x) as f(-x). */
691 if (negate_mathfn_p (get_call_combined_fn (t))
692 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
693 {
694 tree fndecl, arg;
695
696 fndecl = get_callee_fndecl (t);
697 arg = negate_expr (CALL_EXPR_ARG (t, 0));
698 return build_call_expr_loc (loc, fndecl, 1, arg);
699 }
700 break;
701
702 case RSHIFT_EXPR:
703 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
704 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
705 {
706 tree op1 = TREE_OPERAND (t, 1);
707 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
708 {
709 tree ntype = TYPE_UNSIGNED (type)
710 ? signed_type_for (type)
711 : unsigned_type_for (type);
712 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
713 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
714 return fold_convert_loc (loc, type, temp);
715 }
716 }
717 break;
718
719 default:
720 break;
721 }
722
723 return NULL_TREE;
724 }
725
726 /* A wrapper for fold_negate_expr_1. */
727
728 static tree
729 fold_negate_expr (location_t loc, tree t)
730 {
731 tree type = TREE_TYPE (t);
732 STRIP_SIGN_NOPS (t);
733 tree tem = fold_negate_expr_1 (loc, t);
734 if (tem == NULL_TREE)
735 return NULL_TREE;
736 return fold_convert_loc (loc, type, tem);
737 }
738
739 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
740 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
741 return NULL_TREE. */
742
743 static tree
744 negate_expr (tree t)
745 {
746 tree type, tem;
747 location_t loc;
748
749 if (t == NULL_TREE)
750 return NULL_TREE;
751
752 loc = EXPR_LOCATION (t);
753 type = TREE_TYPE (t);
754 STRIP_SIGN_NOPS (t);
755
756 tem = fold_negate_expr (loc, t);
757 if (!tem)
758 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
759 return fold_convert_loc (loc, type, tem);
760 }
761 \f
762 /* Split a tree IN into a constant, literal and variable parts that could be
763 combined with CODE to make IN. "constant" means an expression with
764 TREE_CONSTANT but that isn't an actual constant. CODE must be a
765 commutative arithmetic operation. Store the constant part into *CONP,
766 the literal in *LITP and return the variable part. If a part isn't
767 present, set it to null. If the tree does not decompose in this way,
768 return the entire tree as the variable part and the other parts as null.
769
770 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
771 case, we negate an operand that was subtracted. Except if it is a
772 literal for which we use *MINUS_LITP instead.
773
774 If NEGATE_P is true, we are negating all of IN, again except a literal
775 for which we use *MINUS_LITP instead. If a variable part is of pointer
776 type, it is negated after converting to TYPE. This prevents us from
777 generating illegal MINUS pointer expression. LOC is the location of
778 the converted variable part.
779
780 If IN is itself a literal or constant, return it as appropriate.
781
782 Note that we do not guarantee that any of the three values will be the
783 same type as IN, but they will have the same signedness and mode. */
784
785 static tree
786 split_tree (tree in, tree type, enum tree_code code,
787 tree *minus_varp, tree *conp, tree *minus_conp,
788 tree *litp, tree *minus_litp, int negate_p)
789 {
790 tree var = 0;
791 *minus_varp = 0;
792 *conp = 0;
793 *minus_conp = 0;
794 *litp = 0;
795 *minus_litp = 0;
796
797 /* Strip any conversions that don't change the machine mode or signedness. */
798 STRIP_SIGN_NOPS (in);
799
800 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
801 || TREE_CODE (in) == FIXED_CST)
802 *litp = in;
803 else if (TREE_CODE (in) == code
804 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
805 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
806 /* We can associate addition and subtraction together (even
807 though the C standard doesn't say so) for integers because
808 the value is not affected. For reals, the value might be
809 affected, so we can't. */
810 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
811 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
812 || (code == MINUS_EXPR
813 && (TREE_CODE (in) == PLUS_EXPR
814 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
815 {
816 tree op0 = TREE_OPERAND (in, 0);
817 tree op1 = TREE_OPERAND (in, 1);
818 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
819 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
820
821 /* First see if either of the operands is a literal, then a constant. */
822 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
823 || TREE_CODE (op0) == FIXED_CST)
824 *litp = op0, op0 = 0;
825 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
826 || TREE_CODE (op1) == FIXED_CST)
827 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
828
829 if (op0 != 0 && TREE_CONSTANT (op0))
830 *conp = op0, op0 = 0;
831 else if (op1 != 0 && TREE_CONSTANT (op1))
832 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
833
834 /* If we haven't dealt with either operand, this is not a case we can
835 decompose. Otherwise, VAR is either of the ones remaining, if any. */
836 if (op0 != 0 && op1 != 0)
837 var = in;
838 else if (op0 != 0)
839 var = op0;
840 else
841 var = op1, neg_var_p = neg1_p;
842
843 /* Now do any needed negations. */
844 if (neg_litp_p)
845 *minus_litp = *litp, *litp = 0;
846 if (neg_conp_p && *conp)
847 *minus_conp = *conp, *conp = 0;
848 if (neg_var_p && var)
849 *minus_varp = var, var = 0;
850 }
851 else if (TREE_CONSTANT (in))
852 *conp = in;
853 else if (TREE_CODE (in) == BIT_NOT_EXPR
854 && code == PLUS_EXPR)
855 {
856 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
857 when IN is constant. */
858 *litp = build_minus_one_cst (type);
859 *minus_varp = TREE_OPERAND (in, 0);
860 }
861 else
862 var = in;
863
864 if (negate_p)
865 {
866 if (*litp)
867 *minus_litp = *litp, *litp = 0;
868 else if (*minus_litp)
869 *litp = *minus_litp, *minus_litp = 0;
870 if (*conp)
871 *minus_conp = *conp, *conp = 0;
872 else if (*minus_conp)
873 *conp = *minus_conp, *minus_conp = 0;
874 if (var)
875 *minus_varp = var, var = 0;
876 else if (*minus_varp)
877 var = *minus_varp, *minus_varp = 0;
878 }
879
880 if (*litp
881 && TREE_OVERFLOW_P (*litp))
882 *litp = drop_tree_overflow (*litp);
883 if (*minus_litp
884 && TREE_OVERFLOW_P (*minus_litp))
885 *minus_litp = drop_tree_overflow (*minus_litp);
886
887 return var;
888 }
889
890 /* Re-associate trees split by the above function. T1 and T2 are
891 either expressions to associate or null. Return the new
892 expression, if any. LOC is the location of the new expression. If
893 we build an operation, do it in TYPE and with CODE. */
894
895 static tree
896 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
897 {
898 if (t1 == 0)
899 {
900 gcc_assert (t2 == 0 || code != MINUS_EXPR);
901 return t2;
902 }
903 else if (t2 == 0)
904 return t1;
905
906 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
907 try to fold this since we will have infinite recursion. But do
908 deal with any NEGATE_EXPRs. */
909 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
910 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
911 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
912 {
913 if (code == PLUS_EXPR)
914 {
915 if (TREE_CODE (t1) == NEGATE_EXPR)
916 return build2_loc (loc, MINUS_EXPR, type,
917 fold_convert_loc (loc, type, t2),
918 fold_convert_loc (loc, type,
919 TREE_OPERAND (t1, 0)));
920 else if (TREE_CODE (t2) == NEGATE_EXPR)
921 return build2_loc (loc, MINUS_EXPR, type,
922 fold_convert_loc (loc, type, t1),
923 fold_convert_loc (loc, type,
924 TREE_OPERAND (t2, 0)));
925 else if (integer_zerop (t2))
926 return fold_convert_loc (loc, type, t1);
927 }
928 else if (code == MINUS_EXPR)
929 {
930 if (integer_zerop (t2))
931 return fold_convert_loc (loc, type, t1);
932 }
933
934 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
935 fold_convert_loc (loc, type, t2));
936 }
937
938 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
939 fold_convert_loc (loc, type, t2));
940 }
941 \f
942 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
943 for use in int_const_binop, size_binop and size_diffop. */
944
945 static bool
946 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
947 {
948 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
949 return false;
950 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
951 return false;
952
953 switch (code)
954 {
955 case LSHIFT_EXPR:
956 case RSHIFT_EXPR:
957 case LROTATE_EXPR:
958 case RROTATE_EXPR:
959 return true;
960
961 default:
962 break;
963 }
964
965 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
966 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
967 && TYPE_MODE (type1) == TYPE_MODE (type2);
968 }
969
970 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
971 a new constant in RES. Return FALSE if we don't know how to
972 evaluate CODE at compile-time. */
973
974 bool
975 wide_int_binop (wide_int &res,
976 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
977 signop sign, wi::overflow_type *overflow)
978 {
979 wide_int tmp;
980 *overflow = wi::OVF_NONE;
981 switch (code)
982 {
983 case BIT_IOR_EXPR:
984 res = wi::bit_or (arg1, arg2);
985 break;
986
987 case BIT_XOR_EXPR:
988 res = wi::bit_xor (arg1, arg2);
989 break;
990
991 case BIT_AND_EXPR:
992 res = wi::bit_and (arg1, arg2);
993 break;
994
995 case RSHIFT_EXPR:
996 case LSHIFT_EXPR:
997 if (wi::neg_p (arg2))
998 {
999 tmp = -arg2;
1000 if (code == RSHIFT_EXPR)
1001 code = LSHIFT_EXPR;
1002 else
1003 code = RSHIFT_EXPR;
1004 }
1005 else
1006 tmp = arg2;
1007
1008 if (code == RSHIFT_EXPR)
1009 /* It's unclear from the C standard whether shifts can overflow.
1010 The following code ignores overflow; perhaps a C standard
1011 interpretation ruling is needed. */
1012 res = wi::rshift (arg1, tmp, sign);
1013 else
1014 res = wi::lshift (arg1, tmp);
1015 break;
1016
1017 case RROTATE_EXPR:
1018 case LROTATE_EXPR:
1019 if (wi::neg_p (arg2))
1020 {
1021 tmp = -arg2;
1022 if (code == RROTATE_EXPR)
1023 code = LROTATE_EXPR;
1024 else
1025 code = RROTATE_EXPR;
1026 }
1027 else
1028 tmp = arg2;
1029
1030 if (code == RROTATE_EXPR)
1031 res = wi::rrotate (arg1, tmp);
1032 else
1033 res = wi::lrotate (arg1, tmp);
1034 break;
1035
1036 case PLUS_EXPR:
1037 res = wi::add (arg1, arg2, sign, overflow);
1038 break;
1039
1040 case MINUS_EXPR:
1041 res = wi::sub (arg1, arg2, sign, overflow);
1042 break;
1043
1044 case MULT_EXPR:
1045 res = wi::mul (arg1, arg2, sign, overflow);
1046 break;
1047
1048 case MULT_HIGHPART_EXPR:
1049 res = wi::mul_high (arg1, arg2, sign);
1050 break;
1051
1052 case TRUNC_DIV_EXPR:
1053 case EXACT_DIV_EXPR:
1054 if (arg2 == 0)
1055 return false;
1056 res = wi::div_trunc (arg1, arg2, sign, overflow);
1057 break;
1058
1059 case FLOOR_DIV_EXPR:
1060 if (arg2 == 0)
1061 return false;
1062 res = wi::div_floor (arg1, arg2, sign, overflow);
1063 break;
1064
1065 case CEIL_DIV_EXPR:
1066 if (arg2 == 0)
1067 return false;
1068 res = wi::div_ceil (arg1, arg2, sign, overflow);
1069 break;
1070
1071 case ROUND_DIV_EXPR:
1072 if (arg2 == 0)
1073 return false;
1074 res = wi::div_round (arg1, arg2, sign, overflow);
1075 break;
1076
1077 case TRUNC_MOD_EXPR:
1078 if (arg2 == 0)
1079 return false;
1080 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1081 break;
1082
1083 case FLOOR_MOD_EXPR:
1084 if (arg2 == 0)
1085 return false;
1086 res = wi::mod_floor (arg1, arg2, sign, overflow);
1087 break;
1088
1089 case CEIL_MOD_EXPR:
1090 if (arg2 == 0)
1091 return false;
1092 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1093 break;
1094
1095 case ROUND_MOD_EXPR:
1096 if (arg2 == 0)
1097 return false;
1098 res = wi::mod_round (arg1, arg2, sign, overflow);
1099 break;
1100
1101 case MIN_EXPR:
1102 res = wi::min (arg1, arg2, sign);
1103 break;
1104
1105 case MAX_EXPR:
1106 res = wi::max (arg1, arg2, sign);
1107 break;
1108
1109 default:
1110 return false;
1111 }
1112 return true;
1113 }
1114
1115 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1116 produce a new constant in RES. Return FALSE if we don't know how
1117 to evaluate CODE at compile-time. */
1118
1119 static bool
1120 poly_int_binop (poly_wide_int &res, enum tree_code code,
1121 const_tree arg1, const_tree arg2,
1122 signop sign, wi::overflow_type *overflow)
1123 {
1124 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1125 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1126 switch (code)
1127 {
1128 case PLUS_EXPR:
1129 res = wi::add (wi::to_poly_wide (arg1),
1130 wi::to_poly_wide (arg2), sign, overflow);
1131 break;
1132
1133 case MINUS_EXPR:
1134 res = wi::sub (wi::to_poly_wide (arg1),
1135 wi::to_poly_wide (arg2), sign, overflow);
1136 break;
1137
1138 case MULT_EXPR:
1139 if (TREE_CODE (arg2) == INTEGER_CST)
1140 res = wi::mul (wi::to_poly_wide (arg1),
1141 wi::to_wide (arg2), sign, overflow);
1142 else if (TREE_CODE (arg1) == INTEGER_CST)
1143 res = wi::mul (wi::to_poly_wide (arg2),
1144 wi::to_wide (arg1), sign, overflow);
1145 else
1146 return NULL_TREE;
1147 break;
1148
1149 case LSHIFT_EXPR:
1150 if (TREE_CODE (arg2) == INTEGER_CST)
1151 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1152 else
1153 return false;
1154 break;
1155
1156 case BIT_IOR_EXPR:
1157 if (TREE_CODE (arg2) != INTEGER_CST
1158 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1159 &res))
1160 return false;
1161 break;
1162
1163 default:
1164 return false;
1165 }
1166 return true;
1167 }
1168
1169 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1170 produce a new constant. Return NULL_TREE if we don't know how to
1171 evaluate CODE at compile-time. */
1172
1173 tree
1174 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1175 int overflowable)
1176 {
1177 poly_wide_int poly_res;
1178 tree type = TREE_TYPE (arg1);
1179 signop sign = TYPE_SIGN (type);
1180 wi::overflow_type overflow = wi::OVF_NONE;
1181
1182 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1183 {
1184 wide_int warg1 = wi::to_wide (arg1), res;
1185 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1186 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1187 return NULL_TREE;
1188 poly_res = res;
1189 }
1190 else if (!poly_int_tree_p (arg1)
1191 || !poly_int_tree_p (arg2)
1192 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1193 return NULL_TREE;
1194 return force_fit_type (type, poly_res, overflowable,
1195 (((sign == SIGNED || overflowable == -1)
1196 && overflow)
1197 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1198 }
1199
1200 /* Return true if binary operation OP distributes over addition in operand
1201 OPNO, with the other operand being held constant. OPNO counts from 1. */
1202
1203 static bool
1204 distributes_over_addition_p (tree_code op, int opno)
1205 {
1206 switch (op)
1207 {
1208 case PLUS_EXPR:
1209 case MINUS_EXPR:
1210 case MULT_EXPR:
1211 return true;
1212
1213 case LSHIFT_EXPR:
1214 return opno == 1;
1215
1216 default:
1217 return false;
1218 }
1219 }
1220
1221 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1222 constant. We assume ARG1 and ARG2 have the same data type, or at least
1223 are the same kind of constant and the same machine mode. Return zero if
1224 combining the constants is not allowed in the current operating mode. */
1225
1226 static tree
1227 const_binop (enum tree_code code, tree arg1, tree arg2)
1228 {
1229 /* Sanity check for the recursive cases. */
1230 if (!arg1 || !arg2)
1231 return NULL_TREE;
1232
1233 STRIP_NOPS (arg1);
1234 STRIP_NOPS (arg2);
1235
1236 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1237 {
1238 if (code == POINTER_PLUS_EXPR)
1239 return int_const_binop (PLUS_EXPR,
1240 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1241
1242 return int_const_binop (code, arg1, arg2);
1243 }
1244
1245 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1246 {
1247 machine_mode mode;
1248 REAL_VALUE_TYPE d1;
1249 REAL_VALUE_TYPE d2;
1250 REAL_VALUE_TYPE value;
1251 REAL_VALUE_TYPE result;
1252 bool inexact;
1253 tree t, type;
1254
1255 /* The following codes are handled by real_arithmetic. */
1256 switch (code)
1257 {
1258 case PLUS_EXPR:
1259 case MINUS_EXPR:
1260 case MULT_EXPR:
1261 case RDIV_EXPR:
1262 case MIN_EXPR:
1263 case MAX_EXPR:
1264 break;
1265
1266 default:
1267 return NULL_TREE;
1268 }
1269
1270 d1 = TREE_REAL_CST (arg1);
1271 d2 = TREE_REAL_CST (arg2);
1272
1273 type = TREE_TYPE (arg1);
1274 mode = TYPE_MODE (type);
1275
1276 /* Don't perform operation if we honor signaling NaNs and
1277 either operand is a signaling NaN. */
1278 if (HONOR_SNANS (mode)
1279 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1280 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1281 return NULL_TREE;
1282
1283 /* Don't perform operation if it would raise a division
1284 by zero exception. */
1285 if (code == RDIV_EXPR
1286 && real_equal (&d2, &dconst0)
1287 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1288 return NULL_TREE;
1289
1290 /* If either operand is a NaN, just return it. Otherwise, set up
1291 for floating-point trap; we return an overflow. */
1292 if (REAL_VALUE_ISNAN (d1))
1293 {
1294 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1295 is off. */
1296 d1.signalling = 0;
1297 t = build_real (type, d1);
1298 return t;
1299 }
1300 else if (REAL_VALUE_ISNAN (d2))
1301 {
1302 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1303 is off. */
1304 d2.signalling = 0;
1305 t = build_real (type, d2);
1306 return t;
1307 }
1308
1309 inexact = real_arithmetic (&value, code, &d1, &d2);
1310 real_convert (&result, mode, &value);
1311
1312 /* Don't constant fold this floating point operation if
1313 the result has overflowed and flag_trapping_math. */
1314 if (flag_trapping_math
1315 && MODE_HAS_INFINITIES (mode)
1316 && REAL_VALUE_ISINF (result)
1317 && !REAL_VALUE_ISINF (d1)
1318 && !REAL_VALUE_ISINF (d2))
1319 return NULL_TREE;
1320
1321 /* Don't constant fold this floating point operation if the
1322 result may dependent upon the run-time rounding mode and
1323 flag_rounding_math is set, or if GCC's software emulation
1324 is unable to accurately represent the result. */
1325 if ((flag_rounding_math
1326 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1327 && (inexact || !real_identical (&result, &value)))
1328 return NULL_TREE;
1329
1330 t = build_real (type, result);
1331
1332 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1333 return t;
1334 }
1335
1336 if (TREE_CODE (arg1) == FIXED_CST)
1337 {
1338 FIXED_VALUE_TYPE f1;
1339 FIXED_VALUE_TYPE f2;
1340 FIXED_VALUE_TYPE result;
1341 tree t, type;
1342 int sat_p;
1343 bool overflow_p;
1344
1345 /* The following codes are handled by fixed_arithmetic. */
1346 switch (code)
1347 {
1348 case PLUS_EXPR:
1349 case MINUS_EXPR:
1350 case MULT_EXPR:
1351 case TRUNC_DIV_EXPR:
1352 if (TREE_CODE (arg2) != FIXED_CST)
1353 return NULL_TREE;
1354 f2 = TREE_FIXED_CST (arg2);
1355 break;
1356
1357 case LSHIFT_EXPR:
1358 case RSHIFT_EXPR:
1359 {
1360 if (TREE_CODE (arg2) != INTEGER_CST)
1361 return NULL_TREE;
1362 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1363 f2.data.high = w2.elt (1);
1364 f2.data.low = w2.ulow ();
1365 f2.mode = SImode;
1366 }
1367 break;
1368
1369 default:
1370 return NULL_TREE;
1371 }
1372
1373 f1 = TREE_FIXED_CST (arg1);
1374 type = TREE_TYPE (arg1);
1375 sat_p = TYPE_SATURATING (type);
1376 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1377 t = build_fixed (type, result);
1378 /* Propagate overflow flags. */
1379 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1380 TREE_OVERFLOW (t) = 1;
1381 return t;
1382 }
1383
1384 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1385 {
1386 tree type = TREE_TYPE (arg1);
1387 tree r1 = TREE_REALPART (arg1);
1388 tree i1 = TREE_IMAGPART (arg1);
1389 tree r2 = TREE_REALPART (arg2);
1390 tree i2 = TREE_IMAGPART (arg2);
1391 tree real, imag;
1392
1393 switch (code)
1394 {
1395 case PLUS_EXPR:
1396 case MINUS_EXPR:
1397 real = const_binop (code, r1, r2);
1398 imag = const_binop (code, i1, i2);
1399 break;
1400
1401 case MULT_EXPR:
1402 if (COMPLEX_FLOAT_TYPE_P (type))
1403 return do_mpc_arg2 (arg1, arg2, type,
1404 /* do_nonfinite= */ folding_initializer,
1405 mpc_mul);
1406
1407 real = const_binop (MINUS_EXPR,
1408 const_binop (MULT_EXPR, r1, r2),
1409 const_binop (MULT_EXPR, i1, i2));
1410 imag = const_binop (PLUS_EXPR,
1411 const_binop (MULT_EXPR, r1, i2),
1412 const_binop (MULT_EXPR, i1, r2));
1413 break;
1414
1415 case RDIV_EXPR:
1416 if (COMPLEX_FLOAT_TYPE_P (type))
1417 return do_mpc_arg2 (arg1, arg2, type,
1418 /* do_nonfinite= */ folding_initializer,
1419 mpc_div);
1420 /* Fallthru. */
1421 case TRUNC_DIV_EXPR:
1422 case CEIL_DIV_EXPR:
1423 case FLOOR_DIV_EXPR:
1424 case ROUND_DIV_EXPR:
1425 if (flag_complex_method == 0)
1426 {
1427 /* Keep this algorithm in sync with
1428 tree-complex.c:expand_complex_div_straight().
1429
1430 Expand complex division to scalars, straightforward algorithm.
1431 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1432 t = br*br + bi*bi
1433 */
1434 tree magsquared
1435 = const_binop (PLUS_EXPR,
1436 const_binop (MULT_EXPR, r2, r2),
1437 const_binop (MULT_EXPR, i2, i2));
1438 tree t1
1439 = const_binop (PLUS_EXPR,
1440 const_binop (MULT_EXPR, r1, r2),
1441 const_binop (MULT_EXPR, i1, i2));
1442 tree t2
1443 = const_binop (MINUS_EXPR,
1444 const_binop (MULT_EXPR, i1, r2),
1445 const_binop (MULT_EXPR, r1, i2));
1446
1447 real = const_binop (code, t1, magsquared);
1448 imag = const_binop (code, t2, magsquared);
1449 }
1450 else
1451 {
1452 /* Keep this algorithm in sync with
1453 tree-complex.c:expand_complex_div_wide().
1454
1455 Expand complex division to scalars, modified algorithm to minimize
1456 overflow with wide input ranges. */
1457 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1458 fold_abs_const (r2, TREE_TYPE (type)),
1459 fold_abs_const (i2, TREE_TYPE (type)));
1460
1461 if (integer_nonzerop (compare))
1462 {
1463 /* In the TRUE branch, we compute
1464 ratio = br/bi;
1465 div = (br * ratio) + bi;
1466 tr = (ar * ratio) + ai;
1467 ti = (ai * ratio) - ar;
1468 tr = tr / div;
1469 ti = ti / div; */
1470 tree ratio = const_binop (code, r2, i2);
1471 tree div = const_binop (PLUS_EXPR, i2,
1472 const_binop (MULT_EXPR, r2, ratio));
1473 real = const_binop (MULT_EXPR, r1, ratio);
1474 real = const_binop (PLUS_EXPR, real, i1);
1475 real = const_binop (code, real, div);
1476
1477 imag = const_binop (MULT_EXPR, i1, ratio);
1478 imag = const_binop (MINUS_EXPR, imag, r1);
1479 imag = const_binop (code, imag, div);
1480 }
1481 else
1482 {
1483 /* In the FALSE branch, we compute
1484 ratio = d/c;
1485 divisor = (d * ratio) + c;
1486 tr = (b * ratio) + a;
1487 ti = b - (a * ratio);
1488 tr = tr / div;
1489 ti = ti / div; */
1490 tree ratio = const_binop (code, i2, r2);
1491 tree div = const_binop (PLUS_EXPR, r2,
1492 const_binop (MULT_EXPR, i2, ratio));
1493
1494 real = const_binop (MULT_EXPR, i1, ratio);
1495 real = const_binop (PLUS_EXPR, real, r1);
1496 real = const_binop (code, real, div);
1497
1498 imag = const_binop (MULT_EXPR, r1, ratio);
1499 imag = const_binop (MINUS_EXPR, i1, imag);
1500 imag = const_binop (code, imag, div);
1501 }
1502 }
1503 break;
1504
1505 default:
1506 return NULL_TREE;
1507 }
1508
1509 if (real && imag)
1510 return build_complex (type, real, imag);
1511 }
1512
1513 if (TREE_CODE (arg1) == VECTOR_CST
1514 && TREE_CODE (arg2) == VECTOR_CST
1515 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1516 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1517 {
1518 tree type = TREE_TYPE (arg1);
1519 bool step_ok_p;
1520 if (VECTOR_CST_STEPPED_P (arg1)
1521 && VECTOR_CST_STEPPED_P (arg2))
1522 /* We can operate directly on the encoding if:
1523
1524 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1525 implies
1526 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1527
1528 Addition and subtraction are the supported operators
1529 for which this is true. */
1530 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1531 else if (VECTOR_CST_STEPPED_P (arg1))
1532 /* We can operate directly on stepped encodings if:
1533
1534 a3 - a2 == a2 - a1
1535 implies:
1536 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1537
1538 which is true if (x -> x op c) distributes over addition. */
1539 step_ok_p = distributes_over_addition_p (code, 1);
1540 else
1541 /* Similarly in reverse. */
1542 step_ok_p = distributes_over_addition_p (code, 2);
1543 tree_vector_builder elts;
1544 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1545 return NULL_TREE;
1546 unsigned int count = elts.encoded_nelts ();
1547 for (unsigned int i = 0; i < count; ++i)
1548 {
1549 tree elem1 = VECTOR_CST_ELT (arg1, i);
1550 tree elem2 = VECTOR_CST_ELT (arg2, i);
1551
1552 tree elt = const_binop (code, elem1, elem2);
1553
1554 /* It is possible that const_binop cannot handle the given
1555 code and return NULL_TREE */
1556 if (elt == NULL_TREE)
1557 return NULL_TREE;
1558 elts.quick_push (elt);
1559 }
1560
1561 return elts.build ();
1562 }
1563
1564 /* Shifts allow a scalar offset for a vector. */
1565 if (TREE_CODE (arg1) == VECTOR_CST
1566 && TREE_CODE (arg2) == INTEGER_CST)
1567 {
1568 tree type = TREE_TYPE (arg1);
1569 bool step_ok_p = distributes_over_addition_p (code, 1);
1570 tree_vector_builder elts;
1571 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1572 return NULL_TREE;
1573 unsigned int count = elts.encoded_nelts ();
1574 for (unsigned int i = 0; i < count; ++i)
1575 {
1576 tree elem1 = VECTOR_CST_ELT (arg1, i);
1577
1578 tree elt = const_binop (code, elem1, arg2);
1579
1580 /* It is possible that const_binop cannot handle the given
1581 code and return NULL_TREE. */
1582 if (elt == NULL_TREE)
1583 return NULL_TREE;
1584 elts.quick_push (elt);
1585 }
1586
1587 return elts.build ();
1588 }
1589 return NULL_TREE;
1590 }
1591
1592 /* Overload that adds a TYPE parameter to be able to dispatch
1593 to fold_relational_const. */
1594
1595 tree
1596 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1597 {
1598 if (TREE_CODE_CLASS (code) == tcc_comparison)
1599 return fold_relational_const (code, type, arg1, arg2);
1600
1601 /* ??? Until we make the const_binop worker take the type of the
1602 result as argument put those cases that need it here. */
1603 switch (code)
1604 {
1605 case VEC_SERIES_EXPR:
1606 if (CONSTANT_CLASS_P (arg1)
1607 && CONSTANT_CLASS_P (arg2))
1608 return build_vec_series (type, arg1, arg2);
1609 return NULL_TREE;
1610
1611 case COMPLEX_EXPR:
1612 if ((TREE_CODE (arg1) == REAL_CST
1613 && TREE_CODE (arg2) == REAL_CST)
1614 || (TREE_CODE (arg1) == INTEGER_CST
1615 && TREE_CODE (arg2) == INTEGER_CST))
1616 return build_complex (type, arg1, arg2);
1617 return NULL_TREE;
1618
1619 case POINTER_DIFF_EXPR:
1620 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1621 {
1622 poly_offset_int res = (wi::to_poly_offset (arg1)
1623 - wi::to_poly_offset (arg2));
1624 return force_fit_type (type, res, 1,
1625 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1626 }
1627 return NULL_TREE;
1628
1629 case VEC_PACK_TRUNC_EXPR:
1630 case VEC_PACK_FIX_TRUNC_EXPR:
1631 case VEC_PACK_FLOAT_EXPR:
1632 {
1633 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1634
1635 if (TREE_CODE (arg1) != VECTOR_CST
1636 || TREE_CODE (arg2) != VECTOR_CST)
1637 return NULL_TREE;
1638
1639 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1640 return NULL_TREE;
1641
1642 out_nelts = in_nelts * 2;
1643 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1644 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1645
1646 tree_vector_builder elts (type, out_nelts, 1);
1647 for (i = 0; i < out_nelts; i++)
1648 {
1649 tree elt = (i < in_nelts
1650 ? VECTOR_CST_ELT (arg1, i)
1651 : VECTOR_CST_ELT (arg2, i - in_nelts));
1652 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1653 ? NOP_EXPR
1654 : code == VEC_PACK_FLOAT_EXPR
1655 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1656 TREE_TYPE (type), elt);
1657 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1658 return NULL_TREE;
1659 elts.quick_push (elt);
1660 }
1661
1662 return elts.build ();
1663 }
1664
1665 case VEC_WIDEN_MULT_LO_EXPR:
1666 case VEC_WIDEN_MULT_HI_EXPR:
1667 case VEC_WIDEN_MULT_EVEN_EXPR:
1668 case VEC_WIDEN_MULT_ODD_EXPR:
1669 {
1670 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1671
1672 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1673 return NULL_TREE;
1674
1675 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1676 return NULL_TREE;
1677 out_nelts = in_nelts / 2;
1678 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1679 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1680
1681 if (code == VEC_WIDEN_MULT_LO_EXPR)
1682 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1683 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1684 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1685 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1686 scale = 1, ofs = 0;
1687 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1688 scale = 1, ofs = 1;
1689
1690 tree_vector_builder elts (type, out_nelts, 1);
1691 for (out = 0; out < out_nelts; out++)
1692 {
1693 unsigned int in = (out << scale) + ofs;
1694 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1695 VECTOR_CST_ELT (arg1, in));
1696 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1697 VECTOR_CST_ELT (arg2, in));
1698
1699 if (t1 == NULL_TREE || t2 == NULL_TREE)
1700 return NULL_TREE;
1701 tree elt = const_binop (MULT_EXPR, t1, t2);
1702 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1703 return NULL_TREE;
1704 elts.quick_push (elt);
1705 }
1706
1707 return elts.build ();
1708 }
1709
1710 default:;
1711 }
1712
1713 if (TREE_CODE_CLASS (code) != tcc_binary)
1714 return NULL_TREE;
1715
1716 /* Make sure type and arg0 have the same saturating flag. */
1717 gcc_checking_assert (TYPE_SATURATING (type)
1718 == TYPE_SATURATING (TREE_TYPE (arg1)));
1719
1720 return const_binop (code, arg1, arg2);
1721 }
1722
1723 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1724 Return zero if computing the constants is not possible. */
1725
1726 tree
1727 const_unop (enum tree_code code, tree type, tree arg0)
1728 {
1729 /* Don't perform the operation, other than NEGATE and ABS, if
1730 flag_signaling_nans is on and the operand is a signaling NaN. */
1731 if (TREE_CODE (arg0) == REAL_CST
1732 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1733 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1734 && code != NEGATE_EXPR
1735 && code != ABS_EXPR
1736 && code != ABSU_EXPR)
1737 return NULL_TREE;
1738
1739 switch (code)
1740 {
1741 CASE_CONVERT:
1742 case FLOAT_EXPR:
1743 case FIX_TRUNC_EXPR:
1744 case FIXED_CONVERT_EXPR:
1745 return fold_convert_const (code, type, arg0);
1746
1747 case ADDR_SPACE_CONVERT_EXPR:
1748 /* If the source address is 0, and the source address space
1749 cannot have a valid object at 0, fold to dest type null. */
1750 if (integer_zerop (arg0)
1751 && !(targetm.addr_space.zero_address_valid
1752 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1753 return fold_convert_const (code, type, arg0);
1754 break;
1755
1756 case VIEW_CONVERT_EXPR:
1757 return fold_view_convert_expr (type, arg0);
1758
1759 case NEGATE_EXPR:
1760 {
1761 /* Can't call fold_negate_const directly here as that doesn't
1762 handle all cases and we might not be able to negate some
1763 constants. */
1764 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1765 if (tem && CONSTANT_CLASS_P (tem))
1766 return tem;
1767 break;
1768 }
1769
1770 case ABS_EXPR:
1771 case ABSU_EXPR:
1772 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1773 return fold_abs_const (arg0, type);
1774 break;
1775
1776 case CONJ_EXPR:
1777 if (TREE_CODE (arg0) == COMPLEX_CST)
1778 {
1779 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1780 TREE_TYPE (type));
1781 return build_complex (type, TREE_REALPART (arg0), ipart);
1782 }
1783 break;
1784
1785 case BIT_NOT_EXPR:
1786 if (TREE_CODE (arg0) == INTEGER_CST)
1787 return fold_not_const (arg0, type);
1788 else if (POLY_INT_CST_P (arg0))
1789 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1790 /* Perform BIT_NOT_EXPR on each element individually. */
1791 else if (TREE_CODE (arg0) == VECTOR_CST)
1792 {
1793 tree elem;
1794
1795 /* This can cope with stepped encodings because ~x == -1 - x. */
1796 tree_vector_builder elements;
1797 elements.new_unary_operation (type, arg0, true);
1798 unsigned int i, count = elements.encoded_nelts ();
1799 for (i = 0; i < count; ++i)
1800 {
1801 elem = VECTOR_CST_ELT (arg0, i);
1802 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1803 if (elem == NULL_TREE)
1804 break;
1805 elements.quick_push (elem);
1806 }
1807 if (i == count)
1808 return elements.build ();
1809 }
1810 break;
1811
1812 case TRUTH_NOT_EXPR:
1813 if (TREE_CODE (arg0) == INTEGER_CST)
1814 return constant_boolean_node (integer_zerop (arg0), type);
1815 break;
1816
1817 case REALPART_EXPR:
1818 if (TREE_CODE (arg0) == COMPLEX_CST)
1819 return fold_convert (type, TREE_REALPART (arg0));
1820 break;
1821
1822 case IMAGPART_EXPR:
1823 if (TREE_CODE (arg0) == COMPLEX_CST)
1824 return fold_convert (type, TREE_IMAGPART (arg0));
1825 break;
1826
1827 case VEC_UNPACK_LO_EXPR:
1828 case VEC_UNPACK_HI_EXPR:
1829 case VEC_UNPACK_FLOAT_LO_EXPR:
1830 case VEC_UNPACK_FLOAT_HI_EXPR:
1831 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1832 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1833 {
1834 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1835 enum tree_code subcode;
1836
1837 if (TREE_CODE (arg0) != VECTOR_CST)
1838 return NULL_TREE;
1839
1840 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1841 return NULL_TREE;
1842 out_nelts = in_nelts / 2;
1843 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1844
1845 unsigned int offset = 0;
1846 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1847 || code == VEC_UNPACK_FLOAT_LO_EXPR
1848 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1849 offset = out_nelts;
1850
1851 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1852 subcode = NOP_EXPR;
1853 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1854 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1855 subcode = FLOAT_EXPR;
1856 else
1857 subcode = FIX_TRUNC_EXPR;
1858
1859 tree_vector_builder elts (type, out_nelts, 1);
1860 for (i = 0; i < out_nelts; i++)
1861 {
1862 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1863 VECTOR_CST_ELT (arg0, i + offset));
1864 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1865 return NULL_TREE;
1866 elts.quick_push (elt);
1867 }
1868
1869 return elts.build ();
1870 }
1871
1872 case VEC_DUPLICATE_EXPR:
1873 if (CONSTANT_CLASS_P (arg0))
1874 return build_vector_from_val (type, arg0);
1875 return NULL_TREE;
1876
1877 default:
1878 break;
1879 }
1880
1881 return NULL_TREE;
1882 }
1883
1884 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1885 indicates which particular sizetype to create. */
1886
1887 tree
1888 size_int_kind (poly_int64 number, enum size_type_kind kind)
1889 {
1890 return build_int_cst (sizetype_tab[(int) kind], number);
1891 }
1892 \f
1893 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1894 is a tree code. The type of the result is taken from the operands.
1895 Both must be equivalent integer types, ala int_binop_types_match_p.
1896 If the operands are constant, so is the result. */
1897
1898 tree
1899 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1900 {
1901 tree type = TREE_TYPE (arg0);
1902
1903 if (arg0 == error_mark_node || arg1 == error_mark_node)
1904 return error_mark_node;
1905
1906 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1907 TREE_TYPE (arg1)));
1908
1909 /* Handle the special case of two poly_int constants faster. */
1910 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1911 {
1912 /* And some specific cases even faster than that. */
1913 if (code == PLUS_EXPR)
1914 {
1915 if (integer_zerop (arg0)
1916 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1917 return arg1;
1918 if (integer_zerop (arg1)
1919 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1920 return arg0;
1921 }
1922 else if (code == MINUS_EXPR)
1923 {
1924 if (integer_zerop (arg1)
1925 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1926 return arg0;
1927 }
1928 else if (code == MULT_EXPR)
1929 {
1930 if (integer_onep (arg0)
1931 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1932 return arg1;
1933 }
1934
1935 /* Handle general case of two integer constants. For sizetype
1936 constant calculations we always want to know about overflow,
1937 even in the unsigned case. */
1938 tree res = int_const_binop (code, arg0, arg1, -1);
1939 if (res != NULL_TREE)
1940 return res;
1941 }
1942
1943 return fold_build2_loc (loc, code, type, arg0, arg1);
1944 }
1945
1946 /* Given two values, either both of sizetype or both of bitsizetype,
1947 compute the difference between the two values. Return the value
1948 in signed type corresponding to the type of the operands. */
1949
1950 tree
1951 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1952 {
1953 tree type = TREE_TYPE (arg0);
1954 tree ctype;
1955
1956 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1957 TREE_TYPE (arg1)));
1958
1959 /* If the type is already signed, just do the simple thing. */
1960 if (!TYPE_UNSIGNED (type))
1961 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1962
1963 if (type == sizetype)
1964 ctype = ssizetype;
1965 else if (type == bitsizetype)
1966 ctype = sbitsizetype;
1967 else
1968 ctype = signed_type_for (type);
1969
1970 /* If either operand is not a constant, do the conversions to the signed
1971 type and subtract. The hardware will do the right thing with any
1972 overflow in the subtraction. */
1973 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1974 return size_binop_loc (loc, MINUS_EXPR,
1975 fold_convert_loc (loc, ctype, arg0),
1976 fold_convert_loc (loc, ctype, arg1));
1977
1978 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1979 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1980 overflow) and negate (which can't either). Special-case a result
1981 of zero while we're here. */
1982 if (tree_int_cst_equal (arg0, arg1))
1983 return build_int_cst (ctype, 0);
1984 else if (tree_int_cst_lt (arg1, arg0))
1985 return fold_convert_loc (loc, ctype,
1986 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1987 else
1988 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1989 fold_convert_loc (loc, ctype,
1990 size_binop_loc (loc,
1991 MINUS_EXPR,
1992 arg1, arg0)));
1993 }
1994 \f
1995 /* A subroutine of fold_convert_const handling conversions of an
1996 INTEGER_CST to another integer type. */
1997
1998 static tree
1999 fold_convert_const_int_from_int (tree type, const_tree arg1)
2000 {
2001 /* Given an integer constant, make new constant with new type,
2002 appropriately sign-extended or truncated. Use widest_int
2003 so that any extension is done according ARG1's type. */
2004 return force_fit_type (type, wi::to_widest (arg1),
2005 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2006 TREE_OVERFLOW (arg1));
2007 }
2008
2009 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2010 to an integer type. */
2011
2012 static tree
2013 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2014 {
2015 bool overflow = false;
2016 tree t;
2017
2018 /* The following code implements the floating point to integer
2019 conversion rules required by the Java Language Specification,
2020 that IEEE NaNs are mapped to zero and values that overflow
2021 the target precision saturate, i.e. values greater than
2022 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2023 are mapped to INT_MIN. These semantics are allowed by the
2024 C and C++ standards that simply state that the behavior of
2025 FP-to-integer conversion is unspecified upon overflow. */
2026
2027 wide_int val;
2028 REAL_VALUE_TYPE r;
2029 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2030
2031 switch (code)
2032 {
2033 case FIX_TRUNC_EXPR:
2034 real_trunc (&r, VOIDmode, &x);
2035 break;
2036
2037 default:
2038 gcc_unreachable ();
2039 }
2040
2041 /* If R is NaN, return zero and show we have an overflow. */
2042 if (REAL_VALUE_ISNAN (r))
2043 {
2044 overflow = true;
2045 val = wi::zero (TYPE_PRECISION (type));
2046 }
2047
2048 /* See if R is less than the lower bound or greater than the
2049 upper bound. */
2050
2051 if (! overflow)
2052 {
2053 tree lt = TYPE_MIN_VALUE (type);
2054 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2055 if (real_less (&r, &l))
2056 {
2057 overflow = true;
2058 val = wi::to_wide (lt);
2059 }
2060 }
2061
2062 if (! overflow)
2063 {
2064 tree ut = TYPE_MAX_VALUE (type);
2065 if (ut)
2066 {
2067 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2068 if (real_less (&u, &r))
2069 {
2070 overflow = true;
2071 val = wi::to_wide (ut);
2072 }
2073 }
2074 }
2075
2076 if (! overflow)
2077 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2078
2079 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2080 return t;
2081 }
2082
2083 /* A subroutine of fold_convert_const handling conversions of a
2084 FIXED_CST to an integer type. */
2085
2086 static tree
2087 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2088 {
2089 tree t;
2090 double_int temp, temp_trunc;
2091 scalar_mode mode;
2092
2093 /* Right shift FIXED_CST to temp by fbit. */
2094 temp = TREE_FIXED_CST (arg1).data;
2095 mode = TREE_FIXED_CST (arg1).mode;
2096 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2097 {
2098 temp = temp.rshift (GET_MODE_FBIT (mode),
2099 HOST_BITS_PER_DOUBLE_INT,
2100 SIGNED_FIXED_POINT_MODE_P (mode));
2101
2102 /* Left shift temp to temp_trunc by fbit. */
2103 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2104 HOST_BITS_PER_DOUBLE_INT,
2105 SIGNED_FIXED_POINT_MODE_P (mode));
2106 }
2107 else
2108 {
2109 temp = double_int_zero;
2110 temp_trunc = double_int_zero;
2111 }
2112
2113 /* If FIXED_CST is negative, we need to round the value toward 0.
2114 By checking if the fractional bits are not zero to add 1 to temp. */
2115 if (SIGNED_FIXED_POINT_MODE_P (mode)
2116 && temp_trunc.is_negative ()
2117 && TREE_FIXED_CST (arg1).data != temp_trunc)
2118 temp += double_int_one;
2119
2120 /* Given a fixed-point constant, make new constant with new type,
2121 appropriately sign-extended or truncated. */
2122 t = force_fit_type (type, temp, -1,
2123 (temp.is_negative ()
2124 && (TYPE_UNSIGNED (type)
2125 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2126 | TREE_OVERFLOW (arg1));
2127
2128 return t;
2129 }
2130
2131 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2132 to another floating point type. */
2133
2134 static tree
2135 fold_convert_const_real_from_real (tree type, const_tree arg1)
2136 {
2137 REAL_VALUE_TYPE value;
2138 tree t;
2139
2140 /* Don't perform the operation if flag_signaling_nans is on
2141 and the operand is a signaling NaN. */
2142 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2143 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2144 return NULL_TREE;
2145
2146 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2147 t = build_real (type, value);
2148
2149 /* If converting an infinity or NAN to a representation that doesn't
2150 have one, set the overflow bit so that we can produce some kind of
2151 error message at the appropriate point if necessary. It's not the
2152 most user-friendly message, but it's better than nothing. */
2153 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2154 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2155 TREE_OVERFLOW (t) = 1;
2156 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2157 && !MODE_HAS_NANS (TYPE_MODE (type)))
2158 TREE_OVERFLOW (t) = 1;
2159 /* Regular overflow, conversion produced an infinity in a mode that
2160 can't represent them. */
2161 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2162 && REAL_VALUE_ISINF (value)
2163 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2164 TREE_OVERFLOW (t) = 1;
2165 else
2166 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2167 return t;
2168 }
2169
2170 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2171 to a floating point type. */
2172
2173 static tree
2174 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2175 {
2176 REAL_VALUE_TYPE value;
2177 tree t;
2178
2179 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2180 &TREE_FIXED_CST (arg1));
2181 t = build_real (type, value);
2182
2183 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2184 return t;
2185 }
2186
2187 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2188 to another fixed-point type. */
2189
2190 static tree
2191 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2192 {
2193 FIXED_VALUE_TYPE value;
2194 tree t;
2195 bool overflow_p;
2196
2197 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2198 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2199 t = build_fixed (type, value);
2200
2201 /* Propagate overflow flags. */
2202 if (overflow_p | TREE_OVERFLOW (arg1))
2203 TREE_OVERFLOW (t) = 1;
2204 return t;
2205 }
2206
2207 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2208 to a fixed-point type. */
2209
2210 static tree
2211 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2212 {
2213 FIXED_VALUE_TYPE value;
2214 tree t;
2215 bool overflow_p;
2216 double_int di;
2217
2218 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2219
2220 di.low = TREE_INT_CST_ELT (arg1, 0);
2221 if (TREE_INT_CST_NUNITS (arg1) == 1)
2222 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2223 else
2224 di.high = TREE_INT_CST_ELT (arg1, 1);
2225
2226 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2227 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2228 TYPE_SATURATING (type));
2229 t = build_fixed (type, value);
2230
2231 /* Propagate overflow flags. */
2232 if (overflow_p | TREE_OVERFLOW (arg1))
2233 TREE_OVERFLOW (t) = 1;
2234 return t;
2235 }
2236
2237 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2238 to a fixed-point type. */
2239
2240 static tree
2241 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2242 {
2243 FIXED_VALUE_TYPE value;
2244 tree t;
2245 bool overflow_p;
2246
2247 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2248 &TREE_REAL_CST (arg1),
2249 TYPE_SATURATING (type));
2250 t = build_fixed (type, value);
2251
2252 /* Propagate overflow flags. */
2253 if (overflow_p | TREE_OVERFLOW (arg1))
2254 TREE_OVERFLOW (t) = 1;
2255 return t;
2256 }
2257
2258 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2259 type TYPE. If no simplification can be done return NULL_TREE. */
2260
2261 static tree
2262 fold_convert_const (enum tree_code code, tree type, tree arg1)
2263 {
2264 tree arg_type = TREE_TYPE (arg1);
2265 if (arg_type == type)
2266 return arg1;
2267
2268 /* We can't widen types, since the runtime value could overflow the
2269 original type before being extended to the new type. */
2270 if (POLY_INT_CST_P (arg1)
2271 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2272 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2273 return build_poly_int_cst (type,
2274 poly_wide_int::from (poly_int_cst_value (arg1),
2275 TYPE_PRECISION (type),
2276 TYPE_SIGN (arg_type)));
2277
2278 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2279 || TREE_CODE (type) == OFFSET_TYPE)
2280 {
2281 if (TREE_CODE (arg1) == INTEGER_CST)
2282 return fold_convert_const_int_from_int (type, arg1);
2283 else if (TREE_CODE (arg1) == REAL_CST)
2284 return fold_convert_const_int_from_real (code, type, arg1);
2285 else if (TREE_CODE (arg1) == FIXED_CST)
2286 return fold_convert_const_int_from_fixed (type, arg1);
2287 }
2288 else if (TREE_CODE (type) == REAL_TYPE)
2289 {
2290 if (TREE_CODE (arg1) == INTEGER_CST)
2291 return build_real_from_int_cst (type, arg1);
2292 else if (TREE_CODE (arg1) == REAL_CST)
2293 return fold_convert_const_real_from_real (type, arg1);
2294 else if (TREE_CODE (arg1) == FIXED_CST)
2295 return fold_convert_const_real_from_fixed (type, arg1);
2296 }
2297 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2298 {
2299 if (TREE_CODE (arg1) == FIXED_CST)
2300 return fold_convert_const_fixed_from_fixed (type, arg1);
2301 else if (TREE_CODE (arg1) == INTEGER_CST)
2302 return fold_convert_const_fixed_from_int (type, arg1);
2303 else if (TREE_CODE (arg1) == REAL_CST)
2304 return fold_convert_const_fixed_from_real (type, arg1);
2305 }
2306 else if (TREE_CODE (type) == VECTOR_TYPE)
2307 {
2308 if (TREE_CODE (arg1) == VECTOR_CST
2309 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2310 {
2311 tree elttype = TREE_TYPE (type);
2312 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2313 /* We can't handle steps directly when extending, since the
2314 values need to wrap at the original precision first. */
2315 bool step_ok_p
2316 = (INTEGRAL_TYPE_P (elttype)
2317 && INTEGRAL_TYPE_P (arg1_elttype)
2318 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2319 tree_vector_builder v;
2320 if (!v.new_unary_operation (type, arg1, step_ok_p))
2321 return NULL_TREE;
2322 unsigned int len = v.encoded_nelts ();
2323 for (unsigned int i = 0; i < len; ++i)
2324 {
2325 tree elt = VECTOR_CST_ELT (arg1, i);
2326 tree cvt = fold_convert_const (code, elttype, elt);
2327 if (cvt == NULL_TREE)
2328 return NULL_TREE;
2329 v.quick_push (cvt);
2330 }
2331 return v.build ();
2332 }
2333 }
2334 return NULL_TREE;
2335 }
2336
2337 /* Construct a vector of zero elements of vector type TYPE. */
2338
2339 static tree
2340 build_zero_vector (tree type)
2341 {
2342 tree t;
2343
2344 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2345 return build_vector_from_val (type, t);
2346 }
2347
2348 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2349
2350 bool
2351 fold_convertible_p (const_tree type, const_tree arg)
2352 {
2353 tree orig = TREE_TYPE (arg);
2354
2355 if (type == orig)
2356 return true;
2357
2358 if (TREE_CODE (arg) == ERROR_MARK
2359 || TREE_CODE (type) == ERROR_MARK
2360 || TREE_CODE (orig) == ERROR_MARK)
2361 return false;
2362
2363 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2364 return true;
2365
2366 switch (TREE_CODE (type))
2367 {
2368 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2369 case POINTER_TYPE: case REFERENCE_TYPE:
2370 case OFFSET_TYPE:
2371 return (INTEGRAL_TYPE_P (orig)
2372 || (POINTER_TYPE_P (orig)
2373 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2374 || TREE_CODE (orig) == OFFSET_TYPE);
2375
2376 case REAL_TYPE:
2377 case FIXED_POINT_TYPE:
2378 case VOID_TYPE:
2379 return TREE_CODE (type) == TREE_CODE (orig);
2380
2381 case VECTOR_TYPE:
2382 return (VECTOR_TYPE_P (orig)
2383 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2384 TYPE_VECTOR_SUBPARTS (orig))
2385 && fold_convertible_p (TREE_TYPE (type), TREE_TYPE (orig)));
2386
2387 default:
2388 return false;
2389 }
2390 }
2391
2392 /* Convert expression ARG to type TYPE. Used by the middle-end for
2393 simple conversions in preference to calling the front-end's convert. */
2394
2395 tree
2396 fold_convert_loc (location_t loc, tree type, tree arg)
2397 {
2398 tree orig = TREE_TYPE (arg);
2399 tree tem;
2400
2401 if (type == orig)
2402 return arg;
2403
2404 if (TREE_CODE (arg) == ERROR_MARK
2405 || TREE_CODE (type) == ERROR_MARK
2406 || TREE_CODE (orig) == ERROR_MARK)
2407 return error_mark_node;
2408
2409 switch (TREE_CODE (type))
2410 {
2411 case POINTER_TYPE:
2412 case REFERENCE_TYPE:
2413 /* Handle conversions between pointers to different address spaces. */
2414 if (POINTER_TYPE_P (orig)
2415 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2416 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2417 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2418 /* fall through */
2419
2420 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2421 case OFFSET_TYPE:
2422 if (TREE_CODE (arg) == INTEGER_CST)
2423 {
2424 tem = fold_convert_const (NOP_EXPR, type, arg);
2425 if (tem != NULL_TREE)
2426 return tem;
2427 }
2428 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2429 || TREE_CODE (orig) == OFFSET_TYPE)
2430 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2431 if (TREE_CODE (orig) == COMPLEX_TYPE)
2432 return fold_convert_loc (loc, type,
2433 fold_build1_loc (loc, REALPART_EXPR,
2434 TREE_TYPE (orig), arg));
2435 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2436 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2437 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2438
2439 case REAL_TYPE:
2440 if (TREE_CODE (arg) == INTEGER_CST)
2441 {
2442 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2443 if (tem != NULL_TREE)
2444 return tem;
2445 }
2446 else if (TREE_CODE (arg) == REAL_CST)
2447 {
2448 tem = fold_convert_const (NOP_EXPR, type, arg);
2449 if (tem != NULL_TREE)
2450 return tem;
2451 }
2452 else if (TREE_CODE (arg) == FIXED_CST)
2453 {
2454 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2455 if (tem != NULL_TREE)
2456 return tem;
2457 }
2458
2459 switch (TREE_CODE (orig))
2460 {
2461 case INTEGER_TYPE:
2462 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2463 case POINTER_TYPE: case REFERENCE_TYPE:
2464 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2465
2466 case REAL_TYPE:
2467 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2468
2469 case FIXED_POINT_TYPE:
2470 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2471
2472 case COMPLEX_TYPE:
2473 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2474 return fold_convert_loc (loc, type, tem);
2475
2476 default:
2477 gcc_unreachable ();
2478 }
2479
2480 case FIXED_POINT_TYPE:
2481 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2482 || TREE_CODE (arg) == REAL_CST)
2483 {
2484 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2485 if (tem != NULL_TREE)
2486 goto fold_convert_exit;
2487 }
2488
2489 switch (TREE_CODE (orig))
2490 {
2491 case FIXED_POINT_TYPE:
2492 case INTEGER_TYPE:
2493 case ENUMERAL_TYPE:
2494 case BOOLEAN_TYPE:
2495 case REAL_TYPE:
2496 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2497
2498 case COMPLEX_TYPE:
2499 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2500 return fold_convert_loc (loc, type, tem);
2501
2502 default:
2503 gcc_unreachable ();
2504 }
2505
2506 case COMPLEX_TYPE:
2507 switch (TREE_CODE (orig))
2508 {
2509 case INTEGER_TYPE:
2510 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2511 case POINTER_TYPE: case REFERENCE_TYPE:
2512 case REAL_TYPE:
2513 case FIXED_POINT_TYPE:
2514 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2515 fold_convert_loc (loc, TREE_TYPE (type), arg),
2516 fold_convert_loc (loc, TREE_TYPE (type),
2517 integer_zero_node));
2518 case COMPLEX_TYPE:
2519 {
2520 tree rpart, ipart;
2521
2522 if (TREE_CODE (arg) == COMPLEX_EXPR)
2523 {
2524 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2525 TREE_OPERAND (arg, 0));
2526 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2527 TREE_OPERAND (arg, 1));
2528 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2529 }
2530
2531 arg = save_expr (arg);
2532 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2533 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2534 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2535 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2536 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2537 }
2538
2539 default:
2540 gcc_unreachable ();
2541 }
2542
2543 case VECTOR_TYPE:
2544 if (integer_zerop (arg))
2545 return build_zero_vector (type);
2546 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2547 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2548 || TREE_CODE (orig) == VECTOR_TYPE);
2549 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2550
2551 case VOID_TYPE:
2552 tem = fold_ignored_result (arg);
2553 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2554
2555 default:
2556 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2557 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2558 gcc_unreachable ();
2559 }
2560 fold_convert_exit:
2561 protected_set_expr_location_unshare (tem, loc);
2562 return tem;
2563 }
2564 \f
2565 /* Return false if expr can be assumed not to be an lvalue, true
2566 otherwise. */
2567
2568 static bool
2569 maybe_lvalue_p (const_tree x)
2570 {
2571 /* We only need to wrap lvalue tree codes. */
2572 switch (TREE_CODE (x))
2573 {
2574 case VAR_DECL:
2575 case PARM_DECL:
2576 case RESULT_DECL:
2577 case LABEL_DECL:
2578 case FUNCTION_DECL:
2579 case SSA_NAME:
2580
2581 case COMPONENT_REF:
2582 case MEM_REF:
2583 case INDIRECT_REF:
2584 case ARRAY_REF:
2585 case ARRAY_RANGE_REF:
2586 case BIT_FIELD_REF:
2587 case OBJ_TYPE_REF:
2588
2589 case REALPART_EXPR:
2590 case IMAGPART_EXPR:
2591 case PREINCREMENT_EXPR:
2592 case PREDECREMENT_EXPR:
2593 case SAVE_EXPR:
2594 case TRY_CATCH_EXPR:
2595 case WITH_CLEANUP_EXPR:
2596 case COMPOUND_EXPR:
2597 case MODIFY_EXPR:
2598 case TARGET_EXPR:
2599 case COND_EXPR:
2600 case BIND_EXPR:
2601 break;
2602
2603 default:
2604 /* Assume the worst for front-end tree codes. */
2605 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2606 break;
2607 return false;
2608 }
2609
2610 return true;
2611 }
2612
2613 /* Return an expr equal to X but certainly not valid as an lvalue. */
2614
2615 tree
2616 non_lvalue_loc (location_t loc, tree x)
2617 {
2618 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2619 us. */
2620 if (in_gimple_form)
2621 return x;
2622
2623 if (! maybe_lvalue_p (x))
2624 return x;
2625 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2626 }
2627
2628 /* When pedantic, return an expr equal to X but certainly not valid as a
2629 pedantic lvalue. Otherwise, return X. */
2630
2631 static tree
2632 pedantic_non_lvalue_loc (location_t loc, tree x)
2633 {
2634 return protected_set_expr_location_unshare (x, loc);
2635 }
2636 \f
2637 /* Given a tree comparison code, return the code that is the logical inverse.
2638 It is generally not safe to do this for floating-point comparisons, except
2639 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2640 ERROR_MARK in this case. */
2641
2642 enum tree_code
2643 invert_tree_comparison (enum tree_code code, bool honor_nans)
2644 {
2645 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2646 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2647 return ERROR_MARK;
2648
2649 switch (code)
2650 {
2651 case EQ_EXPR:
2652 return NE_EXPR;
2653 case NE_EXPR:
2654 return EQ_EXPR;
2655 case GT_EXPR:
2656 return honor_nans ? UNLE_EXPR : LE_EXPR;
2657 case GE_EXPR:
2658 return honor_nans ? UNLT_EXPR : LT_EXPR;
2659 case LT_EXPR:
2660 return honor_nans ? UNGE_EXPR : GE_EXPR;
2661 case LE_EXPR:
2662 return honor_nans ? UNGT_EXPR : GT_EXPR;
2663 case LTGT_EXPR:
2664 return UNEQ_EXPR;
2665 case UNEQ_EXPR:
2666 return LTGT_EXPR;
2667 case UNGT_EXPR:
2668 return LE_EXPR;
2669 case UNGE_EXPR:
2670 return LT_EXPR;
2671 case UNLT_EXPR:
2672 return GE_EXPR;
2673 case UNLE_EXPR:
2674 return GT_EXPR;
2675 case ORDERED_EXPR:
2676 return UNORDERED_EXPR;
2677 case UNORDERED_EXPR:
2678 return ORDERED_EXPR;
2679 default:
2680 gcc_unreachable ();
2681 }
2682 }
2683
2684 /* Similar, but return the comparison that results if the operands are
2685 swapped. This is safe for floating-point. */
2686
2687 enum tree_code
2688 swap_tree_comparison (enum tree_code code)
2689 {
2690 switch (code)
2691 {
2692 case EQ_EXPR:
2693 case NE_EXPR:
2694 case ORDERED_EXPR:
2695 case UNORDERED_EXPR:
2696 case LTGT_EXPR:
2697 case UNEQ_EXPR:
2698 return code;
2699 case GT_EXPR:
2700 return LT_EXPR;
2701 case GE_EXPR:
2702 return LE_EXPR;
2703 case LT_EXPR:
2704 return GT_EXPR;
2705 case LE_EXPR:
2706 return GE_EXPR;
2707 case UNGT_EXPR:
2708 return UNLT_EXPR;
2709 case UNGE_EXPR:
2710 return UNLE_EXPR;
2711 case UNLT_EXPR:
2712 return UNGT_EXPR;
2713 case UNLE_EXPR:
2714 return UNGE_EXPR;
2715 default:
2716 gcc_unreachable ();
2717 }
2718 }
2719
2720
2721 /* Convert a comparison tree code from an enum tree_code representation
2722 into a compcode bit-based encoding. This function is the inverse of
2723 compcode_to_comparison. */
2724
2725 static enum comparison_code
2726 comparison_to_compcode (enum tree_code code)
2727 {
2728 switch (code)
2729 {
2730 case LT_EXPR:
2731 return COMPCODE_LT;
2732 case EQ_EXPR:
2733 return COMPCODE_EQ;
2734 case LE_EXPR:
2735 return COMPCODE_LE;
2736 case GT_EXPR:
2737 return COMPCODE_GT;
2738 case NE_EXPR:
2739 return COMPCODE_NE;
2740 case GE_EXPR:
2741 return COMPCODE_GE;
2742 case ORDERED_EXPR:
2743 return COMPCODE_ORD;
2744 case UNORDERED_EXPR:
2745 return COMPCODE_UNORD;
2746 case UNLT_EXPR:
2747 return COMPCODE_UNLT;
2748 case UNEQ_EXPR:
2749 return COMPCODE_UNEQ;
2750 case UNLE_EXPR:
2751 return COMPCODE_UNLE;
2752 case UNGT_EXPR:
2753 return COMPCODE_UNGT;
2754 case LTGT_EXPR:
2755 return COMPCODE_LTGT;
2756 case UNGE_EXPR:
2757 return COMPCODE_UNGE;
2758 default:
2759 gcc_unreachable ();
2760 }
2761 }
2762
2763 /* Convert a compcode bit-based encoding of a comparison operator back
2764 to GCC's enum tree_code representation. This function is the
2765 inverse of comparison_to_compcode. */
2766
2767 static enum tree_code
2768 compcode_to_comparison (enum comparison_code code)
2769 {
2770 switch (code)
2771 {
2772 case COMPCODE_LT:
2773 return LT_EXPR;
2774 case COMPCODE_EQ:
2775 return EQ_EXPR;
2776 case COMPCODE_LE:
2777 return LE_EXPR;
2778 case COMPCODE_GT:
2779 return GT_EXPR;
2780 case COMPCODE_NE:
2781 return NE_EXPR;
2782 case COMPCODE_GE:
2783 return GE_EXPR;
2784 case COMPCODE_ORD:
2785 return ORDERED_EXPR;
2786 case COMPCODE_UNORD:
2787 return UNORDERED_EXPR;
2788 case COMPCODE_UNLT:
2789 return UNLT_EXPR;
2790 case COMPCODE_UNEQ:
2791 return UNEQ_EXPR;
2792 case COMPCODE_UNLE:
2793 return UNLE_EXPR;
2794 case COMPCODE_UNGT:
2795 return UNGT_EXPR;
2796 case COMPCODE_LTGT:
2797 return LTGT_EXPR;
2798 case COMPCODE_UNGE:
2799 return UNGE_EXPR;
2800 default:
2801 gcc_unreachable ();
2802 }
2803 }
2804
2805 /* Return true if COND1 tests the opposite condition of COND2. */
2806
2807 bool
2808 inverse_conditions_p (const_tree cond1, const_tree cond2)
2809 {
2810 return (COMPARISON_CLASS_P (cond1)
2811 && COMPARISON_CLASS_P (cond2)
2812 && (invert_tree_comparison
2813 (TREE_CODE (cond1),
2814 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2815 && operand_equal_p (TREE_OPERAND (cond1, 0),
2816 TREE_OPERAND (cond2, 0), 0)
2817 && operand_equal_p (TREE_OPERAND (cond1, 1),
2818 TREE_OPERAND (cond2, 1), 0));
2819 }
2820
2821 /* Return a tree for the comparison which is the combination of
2822 doing the AND or OR (depending on CODE) of the two operations LCODE
2823 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2824 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2825 if this makes the transformation invalid. */
2826
2827 tree
2828 combine_comparisons (location_t loc,
2829 enum tree_code code, enum tree_code lcode,
2830 enum tree_code rcode, tree truth_type,
2831 tree ll_arg, tree lr_arg)
2832 {
2833 bool honor_nans = HONOR_NANS (ll_arg);
2834 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2835 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2836 int compcode;
2837
2838 switch (code)
2839 {
2840 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2841 compcode = lcompcode & rcompcode;
2842 break;
2843
2844 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2845 compcode = lcompcode | rcompcode;
2846 break;
2847
2848 default:
2849 return NULL_TREE;
2850 }
2851
2852 if (!honor_nans)
2853 {
2854 /* Eliminate unordered comparisons, as well as LTGT and ORD
2855 which are not used unless the mode has NaNs. */
2856 compcode &= ~COMPCODE_UNORD;
2857 if (compcode == COMPCODE_LTGT)
2858 compcode = COMPCODE_NE;
2859 else if (compcode == COMPCODE_ORD)
2860 compcode = COMPCODE_TRUE;
2861 }
2862 else if (flag_trapping_math)
2863 {
2864 /* Check that the original operation and the optimized ones will trap
2865 under the same condition. */
2866 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2867 && (lcompcode != COMPCODE_EQ)
2868 && (lcompcode != COMPCODE_ORD);
2869 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2870 && (rcompcode != COMPCODE_EQ)
2871 && (rcompcode != COMPCODE_ORD);
2872 bool trap = (compcode & COMPCODE_UNORD) == 0
2873 && (compcode != COMPCODE_EQ)
2874 && (compcode != COMPCODE_ORD);
2875
2876 /* In a short-circuited boolean expression the LHS might be
2877 such that the RHS, if evaluated, will never trap. For
2878 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2879 if neither x nor y is NaN. (This is a mixed blessing: for
2880 example, the expression above will never trap, hence
2881 optimizing it to x < y would be invalid). */
2882 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2883 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2884 rtrap = false;
2885
2886 /* If the comparison was short-circuited, and only the RHS
2887 trapped, we may now generate a spurious trap. */
2888 if (rtrap && !ltrap
2889 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2890 return NULL_TREE;
2891
2892 /* If we changed the conditions that cause a trap, we lose. */
2893 if ((ltrap || rtrap) != trap)
2894 return NULL_TREE;
2895 }
2896
2897 if (compcode == COMPCODE_TRUE)
2898 return constant_boolean_node (true, truth_type);
2899 else if (compcode == COMPCODE_FALSE)
2900 return constant_boolean_node (false, truth_type);
2901 else
2902 {
2903 enum tree_code tcode;
2904
2905 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2906 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2907 }
2908 }
2909 \f
2910 /* Return nonzero if two operands (typically of the same tree node)
2911 are necessarily equal. FLAGS modifies behavior as follows:
2912
2913 If OEP_ONLY_CONST is set, only return nonzero for constants.
2914 This function tests whether the operands are indistinguishable;
2915 it does not test whether they are equal using C's == operation.
2916 The distinction is important for IEEE floating point, because
2917 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2918 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2919
2920 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2921 even though it may hold multiple values during a function.
2922 This is because a GCC tree node guarantees that nothing else is
2923 executed between the evaluation of its "operands" (which may often
2924 be evaluated in arbitrary order). Hence if the operands themselves
2925 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2926 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2927 unset means assuming isochronic (or instantaneous) tree equivalence.
2928 Unless comparing arbitrary expression trees, such as from different
2929 statements, this flag can usually be left unset.
2930
2931 If OEP_PURE_SAME is set, then pure functions with identical arguments
2932 are considered the same. It is used when the caller has other ways
2933 to ensure that global memory is unchanged in between.
2934
2935 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2936 not values of expressions.
2937
2938 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2939 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2940
2941 If OEP_BITWISE is set, then require the values to be bitwise identical
2942 rather than simply numerically equal. Do not take advantage of things
2943 like math-related flags or undefined behavior; only return true for
2944 values that are provably bitwise identical in all circumstances.
2945
2946 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2947 any operand with side effect. This is unnecesarily conservative in the
2948 case we know that arg0 and arg1 are in disjoint code paths (such as in
2949 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2950 addresses with TREE_CONSTANT flag set so we know that &var == &var
2951 even if var is volatile. */
2952
2953 bool
2954 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
2955 unsigned int flags)
2956 {
2957 bool r;
2958 if (verify_hash_value (arg0, arg1, flags, &r))
2959 return r;
2960
2961 STRIP_ANY_LOCATION_WRAPPER (arg0);
2962 STRIP_ANY_LOCATION_WRAPPER (arg1);
2963
2964 /* If either is ERROR_MARK, they aren't equal. */
2965 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2966 || TREE_TYPE (arg0) == error_mark_node
2967 || TREE_TYPE (arg1) == error_mark_node)
2968 return false;
2969
2970 /* Similar, if either does not have a type (like a template id),
2971 they aren't equal. */
2972 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2973 return false;
2974
2975 /* Bitwise identity makes no sense if the values have different layouts. */
2976 if ((flags & OEP_BITWISE)
2977 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
2978 return false;
2979
2980 /* We cannot consider pointers to different address space equal. */
2981 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2982 && POINTER_TYPE_P (TREE_TYPE (arg1))
2983 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2984 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2985 return false;
2986
2987 /* Check equality of integer constants before bailing out due to
2988 precision differences. */
2989 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2990 {
2991 /* Address of INTEGER_CST is not defined; check that we did not forget
2992 to drop the OEP_ADDRESS_OF flags. */
2993 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2994 return tree_int_cst_equal (arg0, arg1);
2995 }
2996
2997 if (!(flags & OEP_ADDRESS_OF))
2998 {
2999 /* If both types don't have the same signedness, then we can't consider
3000 them equal. We must check this before the STRIP_NOPS calls
3001 because they may change the signedness of the arguments. As pointers
3002 strictly don't have a signedness, require either two pointers or
3003 two non-pointers as well. */
3004 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3005 || POINTER_TYPE_P (TREE_TYPE (arg0))
3006 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3007 return false;
3008
3009 /* If both types don't have the same precision, then it is not safe
3010 to strip NOPs. */
3011 if (element_precision (TREE_TYPE (arg0))
3012 != element_precision (TREE_TYPE (arg1)))
3013 return false;
3014
3015 STRIP_NOPS (arg0);
3016 STRIP_NOPS (arg1);
3017 }
3018 #if 0
3019 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3020 sanity check once the issue is solved. */
3021 else
3022 /* Addresses of conversions and SSA_NAMEs (and many other things)
3023 are not defined. Check that we did not forget to drop the
3024 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3025 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3026 && TREE_CODE (arg0) != SSA_NAME);
3027 #endif
3028
3029 /* In case both args are comparisons but with different comparison
3030 code, try to swap the comparison operands of one arg to produce
3031 a match and compare that variant. */
3032 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3033 && COMPARISON_CLASS_P (arg0)
3034 && COMPARISON_CLASS_P (arg1))
3035 {
3036 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3037
3038 if (TREE_CODE (arg0) == swap_code)
3039 return operand_equal_p (TREE_OPERAND (arg0, 0),
3040 TREE_OPERAND (arg1, 1), flags)
3041 && operand_equal_p (TREE_OPERAND (arg0, 1),
3042 TREE_OPERAND (arg1, 0), flags);
3043 }
3044
3045 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3046 {
3047 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3048 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3049 ;
3050 else if (flags & OEP_ADDRESS_OF)
3051 {
3052 /* If we are interested in comparing addresses ignore
3053 MEM_REF wrappings of the base that can appear just for
3054 TBAA reasons. */
3055 if (TREE_CODE (arg0) == MEM_REF
3056 && DECL_P (arg1)
3057 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3058 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3059 && integer_zerop (TREE_OPERAND (arg0, 1)))
3060 return true;
3061 else if (TREE_CODE (arg1) == MEM_REF
3062 && DECL_P (arg0)
3063 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3064 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3065 && integer_zerop (TREE_OPERAND (arg1, 1)))
3066 return true;
3067 return false;
3068 }
3069 else
3070 return false;
3071 }
3072
3073 /* When not checking adddresses, this is needed for conversions and for
3074 COMPONENT_REF. Might as well play it safe and always test this. */
3075 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3076 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3077 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3078 && !(flags & OEP_ADDRESS_OF)))
3079 return false;
3080
3081 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3082 We don't care about side effects in that case because the SAVE_EXPR
3083 takes care of that for us. In all other cases, two expressions are
3084 equal if they have no side effects. If we have two identical
3085 expressions with side effects that should be treated the same due
3086 to the only side effects being identical SAVE_EXPR's, that will
3087 be detected in the recursive calls below.
3088 If we are taking an invariant address of two identical objects
3089 they are necessarily equal as well. */
3090 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3091 && (TREE_CODE (arg0) == SAVE_EXPR
3092 || (flags & OEP_MATCH_SIDE_EFFECTS)
3093 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3094 return true;
3095
3096 /* Next handle constant cases, those for which we can return 1 even
3097 if ONLY_CONST is set. */
3098 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3099 switch (TREE_CODE (arg0))
3100 {
3101 case INTEGER_CST:
3102 return tree_int_cst_equal (arg0, arg1);
3103
3104 case FIXED_CST:
3105 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3106 TREE_FIXED_CST (arg1));
3107
3108 case REAL_CST:
3109 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3110 return true;
3111
3112 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3113 {
3114 /* If we do not distinguish between signed and unsigned zero,
3115 consider them equal. */
3116 if (real_zerop (arg0) && real_zerop (arg1))
3117 return true;
3118 }
3119 return false;
3120
3121 case VECTOR_CST:
3122 {
3123 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3124 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3125 return false;
3126
3127 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3128 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3129 return false;
3130
3131 unsigned int count = vector_cst_encoded_nelts (arg0);
3132 for (unsigned int i = 0; i < count; ++i)
3133 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3134 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3135 return false;
3136 return true;
3137 }
3138
3139 case COMPLEX_CST:
3140 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3141 flags)
3142 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3143 flags));
3144
3145 case STRING_CST:
3146 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3147 && ! memcmp (TREE_STRING_POINTER (arg0),
3148 TREE_STRING_POINTER (arg1),
3149 TREE_STRING_LENGTH (arg0)));
3150
3151 case ADDR_EXPR:
3152 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3153 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3154 flags | OEP_ADDRESS_OF
3155 | OEP_MATCH_SIDE_EFFECTS);
3156 case CONSTRUCTOR:
3157 /* In GIMPLE empty constructors are allowed in initializers of
3158 aggregates. */
3159 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3160 default:
3161 break;
3162 }
3163
3164 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3165 two instances of undefined behavior will give identical results. */
3166 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3167 return false;
3168
3169 /* Define macros to test an operand from arg0 and arg1 for equality and a
3170 variant that allows null and views null as being different from any
3171 non-null value. In the latter case, if either is null, the both
3172 must be; otherwise, do the normal comparison. */
3173 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3174 TREE_OPERAND (arg1, N), flags)
3175
3176 #define OP_SAME_WITH_NULL(N) \
3177 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3178 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3179
3180 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3181 {
3182 case tcc_unary:
3183 /* Two conversions are equal only if signedness and modes match. */
3184 switch (TREE_CODE (arg0))
3185 {
3186 CASE_CONVERT:
3187 case FIX_TRUNC_EXPR:
3188 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3189 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3190 return false;
3191 break;
3192 default:
3193 break;
3194 }
3195
3196 return OP_SAME (0);
3197
3198
3199 case tcc_comparison:
3200 case tcc_binary:
3201 if (OP_SAME (0) && OP_SAME (1))
3202 return true;
3203
3204 /* For commutative ops, allow the other order. */
3205 return (commutative_tree_code (TREE_CODE (arg0))
3206 && operand_equal_p (TREE_OPERAND (arg0, 0),
3207 TREE_OPERAND (arg1, 1), flags)
3208 && operand_equal_p (TREE_OPERAND (arg0, 1),
3209 TREE_OPERAND (arg1, 0), flags));
3210
3211 case tcc_reference:
3212 /* If either of the pointer (or reference) expressions we are
3213 dereferencing contain a side effect, these cannot be equal,
3214 but their addresses can be. */
3215 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3216 && (TREE_SIDE_EFFECTS (arg0)
3217 || TREE_SIDE_EFFECTS (arg1)))
3218 return false;
3219
3220 switch (TREE_CODE (arg0))
3221 {
3222 case INDIRECT_REF:
3223 if (!(flags & OEP_ADDRESS_OF))
3224 {
3225 if (TYPE_ALIGN (TREE_TYPE (arg0))
3226 != TYPE_ALIGN (TREE_TYPE (arg1)))
3227 return false;
3228 /* Verify that the access types are compatible. */
3229 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3230 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3231 return false;
3232 }
3233 flags &= ~OEP_ADDRESS_OF;
3234 return OP_SAME (0);
3235
3236 case IMAGPART_EXPR:
3237 /* Require the same offset. */
3238 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3239 TYPE_SIZE (TREE_TYPE (arg1)),
3240 flags & ~OEP_ADDRESS_OF))
3241 return false;
3242
3243 /* Fallthru. */
3244 case REALPART_EXPR:
3245 case VIEW_CONVERT_EXPR:
3246 return OP_SAME (0);
3247
3248 case TARGET_MEM_REF:
3249 case MEM_REF:
3250 if (!(flags & OEP_ADDRESS_OF))
3251 {
3252 /* Require equal access sizes */
3253 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3254 && (!TYPE_SIZE (TREE_TYPE (arg0))
3255 || !TYPE_SIZE (TREE_TYPE (arg1))
3256 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3257 TYPE_SIZE (TREE_TYPE (arg1)),
3258 flags)))
3259 return false;
3260 /* Verify that access happens in similar types. */
3261 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3262 return false;
3263 /* Verify that accesses are TBAA compatible. */
3264 if (!alias_ptr_types_compatible_p
3265 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3266 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3267 || (MR_DEPENDENCE_CLIQUE (arg0)
3268 != MR_DEPENDENCE_CLIQUE (arg1))
3269 || (MR_DEPENDENCE_BASE (arg0)
3270 != MR_DEPENDENCE_BASE (arg1)))
3271 return false;
3272 /* Verify that alignment is compatible. */
3273 if (TYPE_ALIGN (TREE_TYPE (arg0))
3274 != TYPE_ALIGN (TREE_TYPE (arg1)))
3275 return false;
3276 }
3277 flags &= ~OEP_ADDRESS_OF;
3278 return (OP_SAME (0) && OP_SAME (1)
3279 /* TARGET_MEM_REF require equal extra operands. */
3280 && (TREE_CODE (arg0) != TARGET_MEM_REF
3281 || (OP_SAME_WITH_NULL (2)
3282 && OP_SAME_WITH_NULL (3)
3283 && OP_SAME_WITH_NULL (4))));
3284
3285 case ARRAY_REF:
3286 case ARRAY_RANGE_REF:
3287 if (!OP_SAME (0))
3288 return false;
3289 flags &= ~OEP_ADDRESS_OF;
3290 /* Compare the array index by value if it is constant first as we
3291 may have different types but same value here. */
3292 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3293 TREE_OPERAND (arg1, 1))
3294 || OP_SAME (1))
3295 && OP_SAME_WITH_NULL (2)
3296 && OP_SAME_WITH_NULL (3)
3297 /* Compare low bound and element size as with OEP_ADDRESS_OF
3298 we have to account for the offset of the ref. */
3299 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3300 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3301 || (operand_equal_p (array_ref_low_bound
3302 (CONST_CAST_TREE (arg0)),
3303 array_ref_low_bound
3304 (CONST_CAST_TREE (arg1)), flags)
3305 && operand_equal_p (array_ref_element_size
3306 (CONST_CAST_TREE (arg0)),
3307 array_ref_element_size
3308 (CONST_CAST_TREE (arg1)),
3309 flags))));
3310
3311 case COMPONENT_REF:
3312 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3313 may be NULL when we're called to compare MEM_EXPRs. */
3314 if (!OP_SAME_WITH_NULL (0)
3315 || !OP_SAME (1))
3316 return false;
3317 flags &= ~OEP_ADDRESS_OF;
3318 return OP_SAME_WITH_NULL (2);
3319
3320 case BIT_FIELD_REF:
3321 if (!OP_SAME (0))
3322 return false;
3323 flags &= ~OEP_ADDRESS_OF;
3324 return OP_SAME (1) && OP_SAME (2);
3325
3326 /* Virtual table call. */
3327 case OBJ_TYPE_REF:
3328 {
3329 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3330 OBJ_TYPE_REF_EXPR (arg1), flags))
3331 return false;
3332 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3333 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3334 return false;
3335 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3336 OBJ_TYPE_REF_OBJECT (arg1), flags))
3337 return false;
3338 if (!types_same_for_odr (obj_type_ref_class (arg0),
3339 obj_type_ref_class (arg1)))
3340 return false;
3341 return true;
3342 }
3343
3344 default:
3345 return false;
3346 }
3347
3348 case tcc_expression:
3349 switch (TREE_CODE (arg0))
3350 {
3351 case ADDR_EXPR:
3352 /* Be sure we pass right ADDRESS_OF flag. */
3353 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3354 return operand_equal_p (TREE_OPERAND (arg0, 0),
3355 TREE_OPERAND (arg1, 0),
3356 flags | OEP_ADDRESS_OF);
3357
3358 case TRUTH_NOT_EXPR:
3359 return OP_SAME (0);
3360
3361 case TRUTH_ANDIF_EXPR:
3362 case TRUTH_ORIF_EXPR:
3363 return OP_SAME (0) && OP_SAME (1);
3364
3365 case WIDEN_MULT_PLUS_EXPR:
3366 case WIDEN_MULT_MINUS_EXPR:
3367 if (!OP_SAME (2))
3368 return false;
3369 /* The multiplcation operands are commutative. */
3370 /* FALLTHRU */
3371
3372 case TRUTH_AND_EXPR:
3373 case TRUTH_OR_EXPR:
3374 case TRUTH_XOR_EXPR:
3375 if (OP_SAME (0) && OP_SAME (1))
3376 return true;
3377
3378 /* Otherwise take into account this is a commutative operation. */
3379 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3380 TREE_OPERAND (arg1, 1), flags)
3381 && operand_equal_p (TREE_OPERAND (arg0, 1),
3382 TREE_OPERAND (arg1, 0), flags));
3383
3384 case COND_EXPR:
3385 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3386 return false;
3387 flags &= ~OEP_ADDRESS_OF;
3388 return OP_SAME (0);
3389
3390 case BIT_INSERT_EXPR:
3391 /* BIT_INSERT_EXPR has an implict operand as the type precision
3392 of op1. Need to check to make sure they are the same. */
3393 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3394 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3395 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3396 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3397 return false;
3398 /* FALLTHRU */
3399
3400 case VEC_COND_EXPR:
3401 case DOT_PROD_EXPR:
3402 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3403
3404 case MODIFY_EXPR:
3405 case INIT_EXPR:
3406 case COMPOUND_EXPR:
3407 case PREDECREMENT_EXPR:
3408 case PREINCREMENT_EXPR:
3409 case POSTDECREMENT_EXPR:
3410 case POSTINCREMENT_EXPR:
3411 if (flags & OEP_LEXICOGRAPHIC)
3412 return OP_SAME (0) && OP_SAME (1);
3413 return false;
3414
3415 case CLEANUP_POINT_EXPR:
3416 case EXPR_STMT:
3417 case SAVE_EXPR:
3418 if (flags & OEP_LEXICOGRAPHIC)
3419 return OP_SAME (0);
3420 return false;
3421
3422 default:
3423 return false;
3424 }
3425
3426 case tcc_vl_exp:
3427 switch (TREE_CODE (arg0))
3428 {
3429 case CALL_EXPR:
3430 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3431 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3432 /* If not both CALL_EXPRs are either internal or normal function
3433 functions, then they are not equal. */
3434 return false;
3435 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3436 {
3437 /* If the CALL_EXPRs call different internal functions, then they
3438 are not equal. */
3439 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3440 return false;
3441 }
3442 else
3443 {
3444 /* If the CALL_EXPRs call different functions, then they are not
3445 equal. */
3446 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3447 flags))
3448 return false;
3449 }
3450
3451 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3452 {
3453 unsigned int cef = call_expr_flags (arg0);
3454 if (flags & OEP_PURE_SAME)
3455 cef &= ECF_CONST | ECF_PURE;
3456 else
3457 cef &= ECF_CONST;
3458 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3459 return false;
3460 }
3461
3462 /* Now see if all the arguments are the same. */
3463 {
3464 const_call_expr_arg_iterator iter0, iter1;
3465 const_tree a0, a1;
3466 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3467 a1 = first_const_call_expr_arg (arg1, &iter1);
3468 a0 && a1;
3469 a0 = next_const_call_expr_arg (&iter0),
3470 a1 = next_const_call_expr_arg (&iter1))
3471 if (! operand_equal_p (a0, a1, flags))
3472 return false;
3473
3474 /* If we get here and both argument lists are exhausted
3475 then the CALL_EXPRs are equal. */
3476 return ! (a0 || a1);
3477 }
3478 default:
3479 return false;
3480 }
3481
3482 case tcc_declaration:
3483 /* Consider __builtin_sqrt equal to sqrt. */
3484 return (TREE_CODE (arg0) == FUNCTION_DECL
3485 && fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3486 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3487 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3488 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3489
3490 case tcc_exceptional:
3491 if (TREE_CODE (arg0) == CONSTRUCTOR)
3492 {
3493 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3494 return false;
3495
3496 /* In GIMPLE constructors are used only to build vectors from
3497 elements. Individual elements in the constructor must be
3498 indexed in increasing order and form an initial sequence.
3499
3500 We make no effort to compare constructors in generic.
3501 (see sem_variable::equals in ipa-icf which can do so for
3502 constants). */
3503 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3504 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3505 return false;
3506
3507 /* Be sure that vectors constructed have the same representation.
3508 We only tested element precision and modes to match.
3509 Vectors may be BLKmode and thus also check that the number of
3510 parts match. */
3511 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3512 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3513 return false;
3514
3515 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3516 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3517 unsigned int len = vec_safe_length (v0);
3518
3519 if (len != vec_safe_length (v1))
3520 return false;
3521
3522 for (unsigned int i = 0; i < len; i++)
3523 {
3524 constructor_elt *c0 = &(*v0)[i];
3525 constructor_elt *c1 = &(*v1)[i];
3526
3527 if (!operand_equal_p (c0->value, c1->value, flags)
3528 /* In GIMPLE the indexes can be either NULL or matching i.
3529 Double check this so we won't get false
3530 positives for GENERIC. */
3531 || (c0->index
3532 && (TREE_CODE (c0->index) != INTEGER_CST
3533 || compare_tree_int (c0->index, i)))
3534 || (c1->index
3535 && (TREE_CODE (c1->index) != INTEGER_CST
3536 || compare_tree_int (c1->index, i))))
3537 return false;
3538 }
3539 return true;
3540 }
3541 else if (TREE_CODE (arg0) == STATEMENT_LIST
3542 && (flags & OEP_LEXICOGRAPHIC))
3543 {
3544 /* Compare the STATEMENT_LISTs. */
3545 tree_stmt_iterator tsi1, tsi2;
3546 tree body1 = CONST_CAST_TREE (arg0);
3547 tree body2 = CONST_CAST_TREE (arg1);
3548 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3549 tsi_next (&tsi1), tsi_next (&tsi2))
3550 {
3551 /* The lists don't have the same number of statements. */
3552 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3553 return false;
3554 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3555 return true;
3556 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3557 flags & (OEP_LEXICOGRAPHIC
3558 | OEP_NO_HASH_CHECK)))
3559 return false;
3560 }
3561 }
3562 return false;
3563
3564 case tcc_statement:
3565 switch (TREE_CODE (arg0))
3566 {
3567 case RETURN_EXPR:
3568 if (flags & OEP_LEXICOGRAPHIC)
3569 return OP_SAME_WITH_NULL (0);
3570 return false;
3571 case DEBUG_BEGIN_STMT:
3572 if (flags & OEP_LEXICOGRAPHIC)
3573 return true;
3574 return false;
3575 default:
3576 return false;
3577 }
3578
3579 default:
3580 return false;
3581 }
3582
3583 #undef OP_SAME
3584 #undef OP_SAME_WITH_NULL
3585 }
3586
3587 /* Generate a hash value for an expression. This can be used iteratively
3588 by passing a previous result as the HSTATE argument. */
3589
3590 void
3591 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3592 unsigned int flags)
3593 {
3594 int i;
3595 enum tree_code code;
3596 enum tree_code_class tclass;
3597
3598 if (t == NULL_TREE || t == error_mark_node)
3599 {
3600 hstate.merge_hash (0);
3601 return;
3602 }
3603
3604 STRIP_ANY_LOCATION_WRAPPER (t);
3605
3606 if (!(flags & OEP_ADDRESS_OF))
3607 STRIP_NOPS (t);
3608
3609 code = TREE_CODE (t);
3610
3611 switch (code)
3612 {
3613 /* Alas, constants aren't shared, so we can't rely on pointer
3614 identity. */
3615 case VOID_CST:
3616 hstate.merge_hash (0);
3617 return;
3618 case INTEGER_CST:
3619 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3620 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3621 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3622 return;
3623 case REAL_CST:
3624 {
3625 unsigned int val2;
3626 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3627 val2 = rvc_zero;
3628 else
3629 val2 = real_hash (TREE_REAL_CST_PTR (t));
3630 hstate.merge_hash (val2);
3631 return;
3632 }
3633 case FIXED_CST:
3634 {
3635 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3636 hstate.merge_hash (val2);
3637 return;
3638 }
3639 case STRING_CST:
3640 hstate.add ((const void *) TREE_STRING_POINTER (t),
3641 TREE_STRING_LENGTH (t));
3642 return;
3643 case COMPLEX_CST:
3644 hash_operand (TREE_REALPART (t), hstate, flags);
3645 hash_operand (TREE_IMAGPART (t), hstate, flags);
3646 return;
3647 case VECTOR_CST:
3648 {
3649 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3650 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3651 unsigned int count = vector_cst_encoded_nelts (t);
3652 for (unsigned int i = 0; i < count; ++i)
3653 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3654 return;
3655 }
3656 case SSA_NAME:
3657 /* We can just compare by pointer. */
3658 hstate.add_hwi (SSA_NAME_VERSION (t));
3659 return;
3660 case PLACEHOLDER_EXPR:
3661 /* The node itself doesn't matter. */
3662 return;
3663 case BLOCK:
3664 case OMP_CLAUSE:
3665 /* Ignore. */
3666 return;
3667 case TREE_LIST:
3668 /* A list of expressions, for a CALL_EXPR or as the elements of a
3669 VECTOR_CST. */
3670 for (; t; t = TREE_CHAIN (t))
3671 hash_operand (TREE_VALUE (t), hstate, flags);
3672 return;
3673 case CONSTRUCTOR:
3674 {
3675 unsigned HOST_WIDE_INT idx;
3676 tree field, value;
3677 flags &= ~OEP_ADDRESS_OF;
3678 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3679 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3680 {
3681 /* In GIMPLE the indexes can be either NULL or matching i. */
3682 if (field == NULL_TREE)
3683 field = bitsize_int (idx);
3684 hash_operand (field, hstate, flags);
3685 hash_operand (value, hstate, flags);
3686 }
3687 return;
3688 }
3689 case STATEMENT_LIST:
3690 {
3691 tree_stmt_iterator i;
3692 for (i = tsi_start (CONST_CAST_TREE (t));
3693 !tsi_end_p (i); tsi_next (&i))
3694 hash_operand (tsi_stmt (i), hstate, flags);
3695 return;
3696 }
3697 case TREE_VEC:
3698 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3699 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3700 return;
3701 case IDENTIFIER_NODE:
3702 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3703 return;
3704 case FUNCTION_DECL:
3705 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3706 Otherwise nodes that compare equal according to operand_equal_p might
3707 get different hash codes. However, don't do this for machine specific
3708 or front end builtins, since the function code is overloaded in those
3709 cases. */
3710 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3711 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3712 {
3713 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3714 code = TREE_CODE (t);
3715 }
3716 /* FALL THROUGH */
3717 default:
3718 if (POLY_INT_CST_P (t))
3719 {
3720 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3721 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3722 return;
3723 }
3724 tclass = TREE_CODE_CLASS (code);
3725
3726 if (tclass == tcc_declaration)
3727 {
3728 /* DECL's have a unique ID */
3729 hstate.add_hwi (DECL_UID (t));
3730 }
3731 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3732 {
3733 /* For comparisons that can be swapped, use the lower
3734 tree code. */
3735 enum tree_code ccode = swap_tree_comparison (code);
3736 if (code < ccode)
3737 ccode = code;
3738 hstate.add_object (ccode);
3739 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3740 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3741 }
3742 else if (CONVERT_EXPR_CODE_P (code))
3743 {
3744 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3745 operand_equal_p. */
3746 enum tree_code ccode = NOP_EXPR;
3747 hstate.add_object (ccode);
3748
3749 /* Don't hash the type, that can lead to having nodes which
3750 compare equal according to operand_equal_p, but which
3751 have different hash codes. Make sure to include signedness
3752 in the hash computation. */
3753 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3754 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3755 }
3756 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3757 else if (code == MEM_REF
3758 && (flags & OEP_ADDRESS_OF) != 0
3759 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3760 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3761 && integer_zerop (TREE_OPERAND (t, 1)))
3762 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3763 hstate, flags);
3764 /* Don't ICE on FE specific trees, or their arguments etc.
3765 during operand_equal_p hash verification. */
3766 else if (!IS_EXPR_CODE_CLASS (tclass))
3767 gcc_assert (flags & OEP_HASH_CHECK);
3768 else
3769 {
3770 unsigned int sflags = flags;
3771
3772 hstate.add_object (code);
3773
3774 switch (code)
3775 {
3776 case ADDR_EXPR:
3777 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3778 flags |= OEP_ADDRESS_OF;
3779 sflags = flags;
3780 break;
3781
3782 case INDIRECT_REF:
3783 case MEM_REF:
3784 case TARGET_MEM_REF:
3785 flags &= ~OEP_ADDRESS_OF;
3786 sflags = flags;
3787 break;
3788
3789 case ARRAY_REF:
3790 case ARRAY_RANGE_REF:
3791 case COMPONENT_REF:
3792 case BIT_FIELD_REF:
3793 sflags &= ~OEP_ADDRESS_OF;
3794 break;
3795
3796 case COND_EXPR:
3797 flags &= ~OEP_ADDRESS_OF;
3798 break;
3799
3800 case WIDEN_MULT_PLUS_EXPR:
3801 case WIDEN_MULT_MINUS_EXPR:
3802 {
3803 /* The multiplication operands are commutative. */
3804 inchash::hash one, two;
3805 hash_operand (TREE_OPERAND (t, 0), one, flags);
3806 hash_operand (TREE_OPERAND (t, 1), two, flags);
3807 hstate.add_commutative (one, two);
3808 hash_operand (TREE_OPERAND (t, 2), two, flags);
3809 return;
3810 }
3811
3812 case CALL_EXPR:
3813 if (CALL_EXPR_FN (t) == NULL_TREE)
3814 hstate.add_int (CALL_EXPR_IFN (t));
3815 break;
3816
3817 case TARGET_EXPR:
3818 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3819 Usually different TARGET_EXPRs just should use
3820 different temporaries in their slots. */
3821 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
3822 return;
3823
3824 /* Virtual table call. */
3825 case OBJ_TYPE_REF:
3826 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
3827 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
3828 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
3829 return;
3830 default:
3831 break;
3832 }
3833
3834 /* Don't hash the type, that can lead to having nodes which
3835 compare equal according to operand_equal_p, but which
3836 have different hash codes. */
3837 if (code == NON_LVALUE_EXPR)
3838 {
3839 /* Make sure to include signness in the hash computation. */
3840 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3841 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3842 }
3843
3844 else if (commutative_tree_code (code))
3845 {
3846 /* It's a commutative expression. We want to hash it the same
3847 however it appears. We do this by first hashing both operands
3848 and then rehashing based on the order of their independent
3849 hashes. */
3850 inchash::hash one, two;
3851 hash_operand (TREE_OPERAND (t, 0), one, flags);
3852 hash_operand (TREE_OPERAND (t, 1), two, flags);
3853 hstate.add_commutative (one, two);
3854 }
3855 else
3856 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3857 hash_operand (TREE_OPERAND (t, i), hstate,
3858 i == 0 ? flags : sflags);
3859 }
3860 return;
3861 }
3862 }
3863
3864 bool
3865 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
3866 unsigned int flags, bool *ret)
3867 {
3868 /* When checking, verify at the outermost operand_equal_p call that
3869 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
3870 hash value. */
3871 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
3872 {
3873 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
3874 {
3875 if (arg0 != arg1)
3876 {
3877 inchash::hash hstate0 (0), hstate1 (0);
3878 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
3879 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
3880 hashval_t h0 = hstate0.end ();
3881 hashval_t h1 = hstate1.end ();
3882 gcc_assert (h0 == h1);
3883 }
3884 *ret = true;
3885 }
3886 else
3887 *ret = false;
3888
3889 return true;
3890 }
3891
3892 return false;
3893 }
3894
3895
3896 static operand_compare default_compare_instance;
3897
3898 /* Conveinece wrapper around operand_compare class because usually we do
3899 not need to play with the valueizer. */
3900
3901 bool
3902 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3903 {
3904 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
3905 }
3906
3907 namespace inchash
3908 {
3909
3910 /* Generate a hash value for an expression. This can be used iteratively
3911 by passing a previous result as the HSTATE argument.
3912
3913 This function is intended to produce the same hash for expressions which
3914 would compare equal using operand_equal_p. */
3915 void
3916 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
3917 {
3918 default_compare_instance.hash_operand (t, hstate, flags);
3919 }
3920
3921 }
3922 \f
3923 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3924 with a different signedness or a narrower precision. */
3925
3926 static bool
3927 operand_equal_for_comparison_p (tree arg0, tree arg1)
3928 {
3929 if (operand_equal_p (arg0, arg1, 0))
3930 return true;
3931
3932 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3933 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3934 return false;
3935
3936 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3937 and see if the inner values are the same. This removes any
3938 signedness comparison, which doesn't matter here. */
3939 tree op0 = arg0;
3940 tree op1 = arg1;
3941 STRIP_NOPS (op0);
3942 STRIP_NOPS (op1);
3943 if (operand_equal_p (op0, op1, 0))
3944 return true;
3945
3946 /* Discard a single widening conversion from ARG1 and see if the inner
3947 value is the same as ARG0. */
3948 if (CONVERT_EXPR_P (arg1)
3949 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3950 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3951 < TYPE_PRECISION (TREE_TYPE (arg1))
3952 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3953 return true;
3954
3955 return false;
3956 }
3957 \f
3958 /* See if ARG is an expression that is either a comparison or is performing
3959 arithmetic on comparisons. The comparisons must only be comparing
3960 two different values, which will be stored in *CVAL1 and *CVAL2; if
3961 they are nonzero it means that some operands have already been found.
3962 No variables may be used anywhere else in the expression except in the
3963 comparisons.
3964
3965 If this is true, return 1. Otherwise, return zero. */
3966
3967 static bool
3968 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
3969 {
3970 enum tree_code code = TREE_CODE (arg);
3971 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3972
3973 /* We can handle some of the tcc_expression cases here. */
3974 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3975 tclass = tcc_unary;
3976 else if (tclass == tcc_expression
3977 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3978 || code == COMPOUND_EXPR))
3979 tclass = tcc_binary;
3980
3981 switch (tclass)
3982 {
3983 case tcc_unary:
3984 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
3985
3986 case tcc_binary:
3987 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3988 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
3989
3990 case tcc_constant:
3991 return true;
3992
3993 case tcc_expression:
3994 if (code == COND_EXPR)
3995 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3996 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
3997 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
3998 return false;
3999
4000 case tcc_comparison:
4001 /* First see if we can handle the first operand, then the second. For
4002 the second operand, we know *CVAL1 can't be zero. It must be that
4003 one side of the comparison is each of the values; test for the
4004 case where this isn't true by failing if the two operands
4005 are the same. */
4006
4007 if (operand_equal_p (TREE_OPERAND (arg, 0),
4008 TREE_OPERAND (arg, 1), 0))
4009 return false;
4010
4011 if (*cval1 == 0)
4012 *cval1 = TREE_OPERAND (arg, 0);
4013 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4014 ;
4015 else if (*cval2 == 0)
4016 *cval2 = TREE_OPERAND (arg, 0);
4017 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4018 ;
4019 else
4020 return false;
4021
4022 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4023 ;
4024 else if (*cval2 == 0)
4025 *cval2 = TREE_OPERAND (arg, 1);
4026 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4027 ;
4028 else
4029 return false;
4030
4031 return true;
4032
4033 default:
4034 return false;
4035 }
4036 }
4037 \f
4038 /* ARG is a tree that is known to contain just arithmetic operations and
4039 comparisons. Evaluate the operations in the tree substituting NEW0 for
4040 any occurrence of OLD0 as an operand of a comparison and likewise for
4041 NEW1 and OLD1. */
4042
4043 static tree
4044 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4045 tree old1, tree new1)
4046 {
4047 tree type = TREE_TYPE (arg);
4048 enum tree_code code = TREE_CODE (arg);
4049 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4050
4051 /* We can handle some of the tcc_expression cases here. */
4052 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4053 tclass = tcc_unary;
4054 else if (tclass == tcc_expression
4055 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4056 tclass = tcc_binary;
4057
4058 switch (tclass)
4059 {
4060 case tcc_unary:
4061 return fold_build1_loc (loc, code, type,
4062 eval_subst (loc, TREE_OPERAND (arg, 0),
4063 old0, new0, old1, new1));
4064
4065 case tcc_binary:
4066 return fold_build2_loc (loc, code, type,
4067 eval_subst (loc, TREE_OPERAND (arg, 0),
4068 old0, new0, old1, new1),
4069 eval_subst (loc, TREE_OPERAND (arg, 1),
4070 old0, new0, old1, new1));
4071
4072 case tcc_expression:
4073 switch (code)
4074 {
4075 case SAVE_EXPR:
4076 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4077 old1, new1);
4078
4079 case COMPOUND_EXPR:
4080 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4081 old1, new1);
4082
4083 case COND_EXPR:
4084 return fold_build3_loc (loc, code, type,
4085 eval_subst (loc, TREE_OPERAND (arg, 0),
4086 old0, new0, old1, new1),
4087 eval_subst (loc, TREE_OPERAND (arg, 1),
4088 old0, new0, old1, new1),
4089 eval_subst (loc, TREE_OPERAND (arg, 2),
4090 old0, new0, old1, new1));
4091 default:
4092 break;
4093 }
4094 /* Fall through - ??? */
4095
4096 case tcc_comparison:
4097 {
4098 tree arg0 = TREE_OPERAND (arg, 0);
4099 tree arg1 = TREE_OPERAND (arg, 1);
4100
4101 /* We need to check both for exact equality and tree equality. The
4102 former will be true if the operand has a side-effect. In that
4103 case, we know the operand occurred exactly once. */
4104
4105 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4106 arg0 = new0;
4107 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4108 arg0 = new1;
4109
4110 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4111 arg1 = new0;
4112 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4113 arg1 = new1;
4114
4115 return fold_build2_loc (loc, code, type, arg0, arg1);
4116 }
4117
4118 default:
4119 return arg;
4120 }
4121 }
4122 \f
4123 /* Return a tree for the case when the result of an expression is RESULT
4124 converted to TYPE and OMITTED was previously an operand of the expression
4125 but is now not needed (e.g., we folded OMITTED * 0).
4126
4127 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4128 the conversion of RESULT to TYPE. */
4129
4130 tree
4131 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4132 {
4133 tree t = fold_convert_loc (loc, type, result);
4134
4135 /* If the resulting operand is an empty statement, just return the omitted
4136 statement casted to void. */
4137 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4138 return build1_loc (loc, NOP_EXPR, void_type_node,
4139 fold_ignored_result (omitted));
4140
4141 if (TREE_SIDE_EFFECTS (omitted))
4142 return build2_loc (loc, COMPOUND_EXPR, type,
4143 fold_ignored_result (omitted), t);
4144
4145 return non_lvalue_loc (loc, t);
4146 }
4147
4148 /* Return a tree for the case when the result of an expression is RESULT
4149 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4150 of the expression but are now not needed.
4151
4152 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4153 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4154 evaluated before OMITTED2. Otherwise, if neither has side effects,
4155 just do the conversion of RESULT to TYPE. */
4156
4157 tree
4158 omit_two_operands_loc (location_t loc, tree type, tree result,
4159 tree omitted1, tree omitted2)
4160 {
4161 tree t = fold_convert_loc (loc, type, result);
4162
4163 if (TREE_SIDE_EFFECTS (omitted2))
4164 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4165 if (TREE_SIDE_EFFECTS (omitted1))
4166 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4167
4168 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4169 }
4170
4171 \f
4172 /* Return a simplified tree node for the truth-negation of ARG. This
4173 never alters ARG itself. We assume that ARG is an operation that
4174 returns a truth value (0 or 1).
4175
4176 FIXME: one would think we would fold the result, but it causes
4177 problems with the dominator optimizer. */
4178
4179 static tree
4180 fold_truth_not_expr (location_t loc, tree arg)
4181 {
4182 tree type = TREE_TYPE (arg);
4183 enum tree_code code = TREE_CODE (arg);
4184 location_t loc1, loc2;
4185
4186 /* If this is a comparison, we can simply invert it, except for
4187 floating-point non-equality comparisons, in which case we just
4188 enclose a TRUTH_NOT_EXPR around what we have. */
4189
4190 if (TREE_CODE_CLASS (code) == tcc_comparison)
4191 {
4192 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4193 if (FLOAT_TYPE_P (op_type)
4194 && flag_trapping_math
4195 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4196 && code != NE_EXPR && code != EQ_EXPR)
4197 return NULL_TREE;
4198
4199 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4200 if (code == ERROR_MARK)
4201 return NULL_TREE;
4202
4203 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4204 TREE_OPERAND (arg, 1));
4205 if (TREE_NO_WARNING (arg))
4206 TREE_NO_WARNING (ret) = 1;
4207 return ret;
4208 }
4209
4210 switch (code)
4211 {
4212 case INTEGER_CST:
4213 return constant_boolean_node (integer_zerop (arg), type);
4214
4215 case TRUTH_AND_EXPR:
4216 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4217 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4218 return build2_loc (loc, TRUTH_OR_EXPR, type,
4219 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4220 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4221
4222 case TRUTH_OR_EXPR:
4223 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4224 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4225 return build2_loc (loc, TRUTH_AND_EXPR, type,
4226 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4227 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4228
4229 case TRUTH_XOR_EXPR:
4230 /* Here we can invert either operand. We invert the first operand
4231 unless the second operand is a TRUTH_NOT_EXPR in which case our
4232 result is the XOR of the first operand with the inside of the
4233 negation of the second operand. */
4234
4235 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4236 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4237 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4238 else
4239 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4240 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4241 TREE_OPERAND (arg, 1));
4242
4243 case TRUTH_ANDIF_EXPR:
4244 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4245 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4246 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4247 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4248 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4249
4250 case TRUTH_ORIF_EXPR:
4251 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4252 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4253 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4254 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4255 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4256
4257 case TRUTH_NOT_EXPR:
4258 return TREE_OPERAND (arg, 0);
4259
4260 case COND_EXPR:
4261 {
4262 tree arg1 = TREE_OPERAND (arg, 1);
4263 tree arg2 = TREE_OPERAND (arg, 2);
4264
4265 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4266 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4267
4268 /* A COND_EXPR may have a throw as one operand, which
4269 then has void type. Just leave void operands
4270 as they are. */
4271 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4272 VOID_TYPE_P (TREE_TYPE (arg1))
4273 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4274 VOID_TYPE_P (TREE_TYPE (arg2))
4275 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4276 }
4277
4278 case COMPOUND_EXPR:
4279 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4280 return build2_loc (loc, COMPOUND_EXPR, type,
4281 TREE_OPERAND (arg, 0),
4282 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4283
4284 case NON_LVALUE_EXPR:
4285 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4286 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4287
4288 CASE_CONVERT:
4289 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4290 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4291
4292 /* fall through */
4293
4294 case FLOAT_EXPR:
4295 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4296 return build1_loc (loc, TREE_CODE (arg), type,
4297 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4298
4299 case BIT_AND_EXPR:
4300 if (!integer_onep (TREE_OPERAND (arg, 1)))
4301 return NULL_TREE;
4302 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4303
4304 case SAVE_EXPR:
4305 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4306
4307 case CLEANUP_POINT_EXPR:
4308 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4309 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4310 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4311
4312 default:
4313 return NULL_TREE;
4314 }
4315 }
4316
4317 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4318 assume that ARG is an operation that returns a truth value (0 or 1
4319 for scalars, 0 or -1 for vectors). Return the folded expression if
4320 folding is successful. Otherwise, return NULL_TREE. */
4321
4322 static tree
4323 fold_invert_truthvalue (location_t loc, tree arg)
4324 {
4325 tree type = TREE_TYPE (arg);
4326 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4327 ? BIT_NOT_EXPR
4328 : TRUTH_NOT_EXPR,
4329 type, arg);
4330 }
4331
4332 /* Return a simplified tree node for the truth-negation of ARG. This
4333 never alters ARG itself. We assume that ARG is an operation that
4334 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4335
4336 tree
4337 invert_truthvalue_loc (location_t loc, tree arg)
4338 {
4339 if (TREE_CODE (arg) == ERROR_MARK)
4340 return arg;
4341
4342 tree type = TREE_TYPE (arg);
4343 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4344 ? BIT_NOT_EXPR
4345 : TRUTH_NOT_EXPR,
4346 type, arg);
4347 }
4348 \f
4349 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4350 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4351 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4352 is the original memory reference used to preserve the alias set of
4353 the access. */
4354
4355 static tree
4356 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4357 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4358 int unsignedp, int reversep)
4359 {
4360 tree result, bftype;
4361
4362 /* Attempt not to lose the access path if possible. */
4363 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4364 {
4365 tree ninner = TREE_OPERAND (orig_inner, 0);
4366 machine_mode nmode;
4367 poly_int64 nbitsize, nbitpos;
4368 tree noffset;
4369 int nunsignedp, nreversep, nvolatilep = 0;
4370 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4371 &noffset, &nmode, &nunsignedp,
4372 &nreversep, &nvolatilep);
4373 if (base == inner
4374 && noffset == NULL_TREE
4375 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4376 && !reversep
4377 && !nreversep
4378 && !nvolatilep)
4379 {
4380 inner = ninner;
4381 bitpos -= nbitpos;
4382 }
4383 }
4384
4385 alias_set_type iset = get_alias_set (orig_inner);
4386 if (iset == 0 && get_alias_set (inner) != iset)
4387 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4388 build_fold_addr_expr (inner),
4389 build_int_cst (ptr_type_node, 0));
4390
4391 if (known_eq (bitpos, 0) && !reversep)
4392 {
4393 tree size = TYPE_SIZE (TREE_TYPE (inner));
4394 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4395 || POINTER_TYPE_P (TREE_TYPE (inner)))
4396 && tree_fits_shwi_p (size)
4397 && tree_to_shwi (size) == bitsize)
4398 return fold_convert_loc (loc, type, inner);
4399 }
4400
4401 bftype = type;
4402 if (TYPE_PRECISION (bftype) != bitsize
4403 || TYPE_UNSIGNED (bftype) == !unsignedp)
4404 bftype = build_nonstandard_integer_type (bitsize, 0);
4405
4406 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4407 bitsize_int (bitsize), bitsize_int (bitpos));
4408 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4409
4410 if (bftype != type)
4411 result = fold_convert_loc (loc, type, result);
4412
4413 return result;
4414 }
4415
4416 /* Optimize a bit-field compare.
4417
4418 There are two cases: First is a compare against a constant and the
4419 second is a comparison of two items where the fields are at the same
4420 bit position relative to the start of a chunk (byte, halfword, word)
4421 large enough to contain it. In these cases we can avoid the shift
4422 implicit in bitfield extractions.
4423
4424 For constants, we emit a compare of the shifted constant with the
4425 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4426 compared. For two fields at the same position, we do the ANDs with the
4427 similar mask and compare the result of the ANDs.
4428
4429 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4430 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4431 are the left and right operands of the comparison, respectively.
4432
4433 If the optimization described above can be done, we return the resulting
4434 tree. Otherwise we return zero. */
4435
4436 static tree
4437 optimize_bit_field_compare (location_t loc, enum tree_code code,
4438 tree compare_type, tree lhs, tree rhs)
4439 {
4440 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4441 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4442 tree type = TREE_TYPE (lhs);
4443 tree unsigned_type;
4444 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4445 machine_mode lmode, rmode;
4446 scalar_int_mode nmode;
4447 int lunsignedp, runsignedp;
4448 int lreversep, rreversep;
4449 int lvolatilep = 0, rvolatilep = 0;
4450 tree linner, rinner = NULL_TREE;
4451 tree mask;
4452 tree offset;
4453
4454 /* Get all the information about the extractions being done. If the bit size
4455 is the same as the size of the underlying object, we aren't doing an
4456 extraction at all and so can do nothing. We also don't want to
4457 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4458 then will no longer be able to replace it. */
4459 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4460 &lunsignedp, &lreversep, &lvolatilep);
4461 if (linner == lhs
4462 || !known_size_p (plbitsize)
4463 || !plbitsize.is_constant (&lbitsize)
4464 || !plbitpos.is_constant (&lbitpos)
4465 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4466 || offset != 0
4467 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4468 || lvolatilep)
4469 return 0;
4470
4471 if (const_p)
4472 rreversep = lreversep;
4473 else
4474 {
4475 /* If this is not a constant, we can only do something if bit positions,
4476 sizes, signedness and storage order are the same. */
4477 rinner
4478 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4479 &runsignedp, &rreversep, &rvolatilep);
4480
4481 if (rinner == rhs
4482 || maybe_ne (lbitpos, rbitpos)
4483 || maybe_ne (lbitsize, rbitsize)
4484 || lunsignedp != runsignedp
4485 || lreversep != rreversep
4486 || offset != 0
4487 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4488 || rvolatilep)
4489 return 0;
4490 }
4491
4492 /* Honor the C++ memory model and mimic what RTL expansion does. */
4493 poly_uint64 bitstart = 0;
4494 poly_uint64 bitend = 0;
4495 if (TREE_CODE (lhs) == COMPONENT_REF)
4496 {
4497 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4498 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4499 return 0;
4500 }
4501
4502 /* See if we can find a mode to refer to this field. We should be able to,
4503 but fail if we can't. */
4504 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4505 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4506 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4507 TYPE_ALIGN (TREE_TYPE (rinner))),
4508 BITS_PER_WORD, false, &nmode))
4509 return 0;
4510
4511 /* Set signed and unsigned types of the precision of this mode for the
4512 shifts below. */
4513 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4514
4515 /* Compute the bit position and size for the new reference and our offset
4516 within it. If the new reference is the same size as the original, we
4517 won't optimize anything, so return zero. */
4518 nbitsize = GET_MODE_BITSIZE (nmode);
4519 nbitpos = lbitpos & ~ (nbitsize - 1);
4520 lbitpos -= nbitpos;
4521 if (nbitsize == lbitsize)
4522 return 0;
4523
4524 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4525 lbitpos = nbitsize - lbitsize - lbitpos;
4526
4527 /* Make the mask to be used against the extracted field. */
4528 mask = build_int_cst_type (unsigned_type, -1);
4529 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4530 mask = const_binop (RSHIFT_EXPR, mask,
4531 size_int (nbitsize - lbitsize - lbitpos));
4532
4533 if (! const_p)
4534 {
4535 if (nbitpos < 0)
4536 return 0;
4537
4538 /* If not comparing with constant, just rework the comparison
4539 and return. */
4540 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4541 nbitsize, nbitpos, 1, lreversep);
4542 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4543 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4544 nbitsize, nbitpos, 1, rreversep);
4545 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4546 return fold_build2_loc (loc, code, compare_type, t1, t2);
4547 }
4548
4549 /* Otherwise, we are handling the constant case. See if the constant is too
4550 big for the field. Warn and return a tree for 0 (false) if so. We do
4551 this not only for its own sake, but to avoid having to test for this
4552 error case below. If we didn't, we might generate wrong code.
4553
4554 For unsigned fields, the constant shifted right by the field length should
4555 be all zero. For signed fields, the high-order bits should agree with
4556 the sign bit. */
4557
4558 if (lunsignedp)
4559 {
4560 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4561 {
4562 warning (0, "comparison is always %d due to width of bit-field",
4563 code == NE_EXPR);
4564 return constant_boolean_node (code == NE_EXPR, compare_type);
4565 }
4566 }
4567 else
4568 {
4569 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4570 if (tem != 0 && tem != -1)
4571 {
4572 warning (0, "comparison is always %d due to width of bit-field",
4573 code == NE_EXPR);
4574 return constant_boolean_node (code == NE_EXPR, compare_type);
4575 }
4576 }
4577
4578 if (nbitpos < 0)
4579 return 0;
4580
4581 /* Single-bit compares should always be against zero. */
4582 if (lbitsize == 1 && ! integer_zerop (rhs))
4583 {
4584 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4585 rhs = build_int_cst (type, 0);
4586 }
4587
4588 /* Make a new bitfield reference, shift the constant over the
4589 appropriate number of bits and mask it with the computed mask
4590 (in case this was a signed field). If we changed it, make a new one. */
4591 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4592 nbitsize, nbitpos, 1, lreversep);
4593
4594 rhs = const_binop (BIT_AND_EXPR,
4595 const_binop (LSHIFT_EXPR,
4596 fold_convert_loc (loc, unsigned_type, rhs),
4597 size_int (lbitpos)),
4598 mask);
4599
4600 lhs = build2_loc (loc, code, compare_type,
4601 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4602 return lhs;
4603 }
4604 \f
4605 /* Subroutine for fold_truth_andor_1: decode a field reference.
4606
4607 If EXP is a comparison reference, we return the innermost reference.
4608
4609 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4610 set to the starting bit number.
4611
4612 If the innermost field can be completely contained in a mode-sized
4613 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4614
4615 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4616 otherwise it is not changed.
4617
4618 *PUNSIGNEDP is set to the signedness of the field.
4619
4620 *PREVERSEP is set to the storage order of the field.
4621
4622 *PMASK is set to the mask used. This is either contained in a
4623 BIT_AND_EXPR or derived from the width of the field.
4624
4625 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4626
4627 Return 0 if this is not a component reference or is one that we can't
4628 do anything with. */
4629
4630 static tree
4631 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4632 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4633 int *punsignedp, int *preversep, int *pvolatilep,
4634 tree *pmask, tree *pand_mask)
4635 {
4636 tree exp = *exp_;
4637 tree outer_type = 0;
4638 tree and_mask = 0;
4639 tree mask, inner, offset;
4640 tree unsigned_type;
4641 unsigned int precision;
4642
4643 /* All the optimizations using this function assume integer fields.
4644 There are problems with FP fields since the type_for_size call
4645 below can fail for, e.g., XFmode. */
4646 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4647 return NULL_TREE;
4648
4649 /* We are interested in the bare arrangement of bits, so strip everything
4650 that doesn't affect the machine mode. However, record the type of the
4651 outermost expression if it may matter below. */
4652 if (CONVERT_EXPR_P (exp)
4653 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4654 outer_type = TREE_TYPE (exp);
4655 STRIP_NOPS (exp);
4656
4657 if (TREE_CODE (exp) == BIT_AND_EXPR)
4658 {
4659 and_mask = TREE_OPERAND (exp, 1);
4660 exp = TREE_OPERAND (exp, 0);
4661 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4662 if (TREE_CODE (and_mask) != INTEGER_CST)
4663 return NULL_TREE;
4664 }
4665
4666 poly_int64 poly_bitsize, poly_bitpos;
4667 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4668 pmode, punsignedp, preversep, pvolatilep);
4669 if ((inner == exp && and_mask == 0)
4670 || !poly_bitsize.is_constant (pbitsize)
4671 || !poly_bitpos.is_constant (pbitpos)
4672 || *pbitsize < 0
4673 || offset != 0
4674 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4675 /* Reject out-of-bound accesses (PR79731). */
4676 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4677 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4678 *pbitpos + *pbitsize) < 0))
4679 return NULL_TREE;
4680
4681 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4682 if (unsigned_type == NULL_TREE)
4683 return NULL_TREE;
4684
4685 *exp_ = exp;
4686
4687 /* If the number of bits in the reference is the same as the bitsize of
4688 the outer type, then the outer type gives the signedness. Otherwise
4689 (in case of a small bitfield) the signedness is unchanged. */
4690 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4691 *punsignedp = TYPE_UNSIGNED (outer_type);
4692
4693 /* Compute the mask to access the bitfield. */
4694 precision = TYPE_PRECISION (unsigned_type);
4695
4696 mask = build_int_cst_type (unsigned_type, -1);
4697
4698 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4699 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4700
4701 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4702 if (and_mask != 0)
4703 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4704 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4705
4706 *pmask = mask;
4707 *pand_mask = and_mask;
4708 return inner;
4709 }
4710
4711 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4712 bit positions and MASK is SIGNED. */
4713
4714 static bool
4715 all_ones_mask_p (const_tree mask, unsigned int size)
4716 {
4717 tree type = TREE_TYPE (mask);
4718 unsigned int precision = TYPE_PRECISION (type);
4719
4720 /* If this function returns true when the type of the mask is
4721 UNSIGNED, then there will be errors. In particular see
4722 gcc.c-torture/execute/990326-1.c. There does not appear to be
4723 any documentation paper trail as to why this is so. But the pre
4724 wide-int worked with that restriction and it has been preserved
4725 here. */
4726 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4727 return false;
4728
4729 return wi::mask (size, false, precision) == wi::to_wide (mask);
4730 }
4731
4732 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4733 represents the sign bit of EXP's type. If EXP represents a sign
4734 or zero extension, also test VAL against the unextended type.
4735 The return value is the (sub)expression whose sign bit is VAL,
4736 or NULL_TREE otherwise. */
4737
4738 tree
4739 sign_bit_p (tree exp, const_tree val)
4740 {
4741 int width;
4742 tree t;
4743
4744 /* Tree EXP must have an integral type. */
4745 t = TREE_TYPE (exp);
4746 if (! INTEGRAL_TYPE_P (t))
4747 return NULL_TREE;
4748
4749 /* Tree VAL must be an integer constant. */
4750 if (TREE_CODE (val) != INTEGER_CST
4751 || TREE_OVERFLOW (val))
4752 return NULL_TREE;
4753
4754 width = TYPE_PRECISION (t);
4755 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4756 return exp;
4757
4758 /* Handle extension from a narrower type. */
4759 if (TREE_CODE (exp) == NOP_EXPR
4760 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4761 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4762
4763 return NULL_TREE;
4764 }
4765
4766 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4767 to be evaluated unconditionally. */
4768
4769 static bool
4770 simple_operand_p (const_tree exp)
4771 {
4772 /* Strip any conversions that don't change the machine mode. */
4773 STRIP_NOPS (exp);
4774
4775 return (CONSTANT_CLASS_P (exp)
4776 || TREE_CODE (exp) == SSA_NAME
4777 || (DECL_P (exp)
4778 && ! TREE_ADDRESSABLE (exp)
4779 && ! TREE_THIS_VOLATILE (exp)
4780 && ! DECL_NONLOCAL (exp)
4781 /* Don't regard global variables as simple. They may be
4782 allocated in ways unknown to the compiler (shared memory,
4783 #pragma weak, etc). */
4784 && ! TREE_PUBLIC (exp)
4785 && ! DECL_EXTERNAL (exp)
4786 /* Weakrefs are not safe to be read, since they can be NULL.
4787 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4788 have DECL_WEAK flag set. */
4789 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4790 /* Loading a static variable is unduly expensive, but global
4791 registers aren't expensive. */
4792 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4793 }
4794
4795 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4796 to be evaluated unconditionally.
4797 I addition to simple_operand_p, we assume that comparisons, conversions,
4798 and logic-not operations are simple, if their operands are simple, too. */
4799
4800 static bool
4801 simple_operand_p_2 (tree exp)
4802 {
4803 enum tree_code code;
4804
4805 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
4806 return false;
4807
4808 while (CONVERT_EXPR_P (exp))
4809 exp = TREE_OPERAND (exp, 0);
4810
4811 code = TREE_CODE (exp);
4812
4813 if (TREE_CODE_CLASS (code) == tcc_comparison)
4814 return (simple_operand_p (TREE_OPERAND (exp, 0))
4815 && simple_operand_p (TREE_OPERAND (exp, 1)));
4816
4817 if (code == TRUTH_NOT_EXPR)
4818 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4819
4820 return simple_operand_p (exp);
4821 }
4822
4823 \f
4824 /* The following functions are subroutines to fold_range_test and allow it to
4825 try to change a logical combination of comparisons into a range test.
4826
4827 For example, both
4828 X == 2 || X == 3 || X == 4 || X == 5
4829 and
4830 X >= 2 && X <= 5
4831 are converted to
4832 (unsigned) (X - 2) <= 3
4833
4834 We describe each set of comparisons as being either inside or outside
4835 a range, using a variable named like IN_P, and then describe the
4836 range with a lower and upper bound. If one of the bounds is omitted,
4837 it represents either the highest or lowest value of the type.
4838
4839 In the comments below, we represent a range by two numbers in brackets
4840 preceded by a "+" to designate being inside that range, or a "-" to
4841 designate being outside that range, so the condition can be inverted by
4842 flipping the prefix. An omitted bound is represented by a "-". For
4843 example, "- [-, 10]" means being outside the range starting at the lowest
4844 possible value and ending at 10, in other words, being greater than 10.
4845 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4846 always false.
4847
4848 We set up things so that the missing bounds are handled in a consistent
4849 manner so neither a missing bound nor "true" and "false" need to be
4850 handled using a special case. */
4851
4852 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4853 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4854 and UPPER1_P are nonzero if the respective argument is an upper bound
4855 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4856 must be specified for a comparison. ARG1 will be converted to ARG0's
4857 type if both are specified. */
4858
4859 static tree
4860 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4861 tree arg1, int upper1_p)
4862 {
4863 tree tem;
4864 int result;
4865 int sgn0, sgn1;
4866
4867 /* If neither arg represents infinity, do the normal operation.
4868 Else, if not a comparison, return infinity. Else handle the special
4869 comparison rules. Note that most of the cases below won't occur, but
4870 are handled for consistency. */
4871
4872 if (arg0 != 0 && arg1 != 0)
4873 {
4874 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4875 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4876 STRIP_NOPS (tem);
4877 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4878 }
4879
4880 if (TREE_CODE_CLASS (code) != tcc_comparison)
4881 return 0;
4882
4883 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4884 for neither. In real maths, we cannot assume open ended ranges are
4885 the same. But, this is computer arithmetic, where numbers are finite.
4886 We can therefore make the transformation of any unbounded range with
4887 the value Z, Z being greater than any representable number. This permits
4888 us to treat unbounded ranges as equal. */
4889 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4890 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4891 switch (code)
4892 {
4893 case EQ_EXPR:
4894 result = sgn0 == sgn1;
4895 break;
4896 case NE_EXPR:
4897 result = sgn0 != sgn1;
4898 break;
4899 case LT_EXPR:
4900 result = sgn0 < sgn1;
4901 break;
4902 case LE_EXPR:
4903 result = sgn0 <= sgn1;
4904 break;
4905 case GT_EXPR:
4906 result = sgn0 > sgn1;
4907 break;
4908 case GE_EXPR:
4909 result = sgn0 >= sgn1;
4910 break;
4911 default:
4912 gcc_unreachable ();
4913 }
4914
4915 return constant_boolean_node (result, type);
4916 }
4917 \f
4918 /* Helper routine for make_range. Perform one step for it, return
4919 new expression if the loop should continue or NULL_TREE if it should
4920 stop. */
4921
4922 tree
4923 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4924 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4925 bool *strict_overflow_p)
4926 {
4927 tree arg0_type = TREE_TYPE (arg0);
4928 tree n_low, n_high, low = *p_low, high = *p_high;
4929 int in_p = *p_in_p, n_in_p;
4930
4931 switch (code)
4932 {
4933 case TRUTH_NOT_EXPR:
4934 /* We can only do something if the range is testing for zero. */
4935 if (low == NULL_TREE || high == NULL_TREE
4936 || ! integer_zerop (low) || ! integer_zerop (high))
4937 return NULL_TREE;
4938 *p_in_p = ! in_p;
4939 return arg0;
4940
4941 case EQ_EXPR: case NE_EXPR:
4942 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4943 /* We can only do something if the range is testing for zero
4944 and if the second operand is an integer constant. Note that
4945 saying something is "in" the range we make is done by
4946 complementing IN_P since it will set in the initial case of
4947 being not equal to zero; "out" is leaving it alone. */
4948 if (low == NULL_TREE || high == NULL_TREE
4949 || ! integer_zerop (low) || ! integer_zerop (high)
4950 || TREE_CODE (arg1) != INTEGER_CST)
4951 return NULL_TREE;
4952
4953 switch (code)
4954 {
4955 case NE_EXPR: /* - [c, c] */
4956 low = high = arg1;
4957 break;
4958 case EQ_EXPR: /* + [c, c] */
4959 in_p = ! in_p, low = high = arg1;
4960 break;
4961 case GT_EXPR: /* - [-, c] */
4962 low = 0, high = arg1;
4963 break;
4964 case GE_EXPR: /* + [c, -] */
4965 in_p = ! in_p, low = arg1, high = 0;
4966 break;
4967 case LT_EXPR: /* - [c, -] */
4968 low = arg1, high = 0;
4969 break;
4970 case LE_EXPR: /* + [-, c] */
4971 in_p = ! in_p, low = 0, high = arg1;
4972 break;
4973 default:
4974 gcc_unreachable ();
4975 }
4976
4977 /* If this is an unsigned comparison, we also know that EXP is
4978 greater than or equal to zero. We base the range tests we make
4979 on that fact, so we record it here so we can parse existing
4980 range tests. We test arg0_type since often the return type
4981 of, e.g. EQ_EXPR, is boolean. */
4982 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4983 {
4984 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4985 in_p, low, high, 1,
4986 build_int_cst (arg0_type, 0),
4987 NULL_TREE))
4988 return NULL_TREE;
4989
4990 in_p = n_in_p, low = n_low, high = n_high;
4991
4992 /* If the high bound is missing, but we have a nonzero low
4993 bound, reverse the range so it goes from zero to the low bound
4994 minus 1. */
4995 if (high == 0 && low && ! integer_zerop (low))
4996 {
4997 in_p = ! in_p;
4998 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4999 build_int_cst (TREE_TYPE (low), 1), 0);
5000 low = build_int_cst (arg0_type, 0);
5001 }
5002 }
5003
5004 *p_low = low;
5005 *p_high = high;
5006 *p_in_p = in_p;
5007 return arg0;
5008
5009 case NEGATE_EXPR:
5010 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5011 low and high are non-NULL, then normalize will DTRT. */
5012 if (!TYPE_UNSIGNED (arg0_type)
5013 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5014 {
5015 if (low == NULL_TREE)
5016 low = TYPE_MIN_VALUE (arg0_type);
5017 if (high == NULL_TREE)
5018 high = TYPE_MAX_VALUE (arg0_type);
5019 }
5020
5021 /* (-x) IN [a,b] -> x in [-b, -a] */
5022 n_low = range_binop (MINUS_EXPR, exp_type,
5023 build_int_cst (exp_type, 0),
5024 0, high, 1);
5025 n_high = range_binop (MINUS_EXPR, exp_type,
5026 build_int_cst (exp_type, 0),
5027 0, low, 0);
5028 if (n_high != 0 && TREE_OVERFLOW (n_high))
5029 return NULL_TREE;
5030 goto normalize;
5031
5032 case BIT_NOT_EXPR:
5033 /* ~ X -> -X - 1 */
5034 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5035 build_int_cst (exp_type, 1));
5036
5037 case PLUS_EXPR:
5038 case MINUS_EXPR:
5039 if (TREE_CODE (arg1) != INTEGER_CST)
5040 return NULL_TREE;
5041
5042 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5043 move a constant to the other side. */
5044 if (!TYPE_UNSIGNED (arg0_type)
5045 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5046 return NULL_TREE;
5047
5048 /* If EXP is signed, any overflow in the computation is undefined,
5049 so we don't worry about it so long as our computations on
5050 the bounds don't overflow. For unsigned, overflow is defined
5051 and this is exactly the right thing. */
5052 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5053 arg0_type, low, 0, arg1, 0);
5054 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5055 arg0_type, high, 1, arg1, 0);
5056 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5057 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5058 return NULL_TREE;
5059
5060 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5061 *strict_overflow_p = true;
5062
5063 normalize:
5064 /* Check for an unsigned range which has wrapped around the maximum
5065 value thus making n_high < n_low, and normalize it. */
5066 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5067 {
5068 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5069 build_int_cst (TREE_TYPE (n_high), 1), 0);
5070 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5071 build_int_cst (TREE_TYPE (n_low), 1), 0);
5072
5073 /* If the range is of the form +/- [ x+1, x ], we won't
5074 be able to normalize it. But then, it represents the
5075 whole range or the empty set, so make it
5076 +/- [ -, - ]. */
5077 if (tree_int_cst_equal (n_low, low)
5078 && tree_int_cst_equal (n_high, high))
5079 low = high = 0;
5080 else
5081 in_p = ! in_p;
5082 }
5083 else
5084 low = n_low, high = n_high;
5085
5086 *p_low = low;
5087 *p_high = high;
5088 *p_in_p = in_p;
5089 return arg0;
5090
5091 CASE_CONVERT:
5092 case NON_LVALUE_EXPR:
5093 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5094 return NULL_TREE;
5095
5096 if (! INTEGRAL_TYPE_P (arg0_type)
5097 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5098 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5099 return NULL_TREE;
5100
5101 n_low = low, n_high = high;
5102
5103 if (n_low != 0)
5104 n_low = fold_convert_loc (loc, arg0_type, n_low);
5105
5106 if (n_high != 0)
5107 n_high = fold_convert_loc (loc, arg0_type, n_high);
5108
5109 /* If we're converting arg0 from an unsigned type, to exp,
5110 a signed type, we will be doing the comparison as unsigned.
5111 The tests above have already verified that LOW and HIGH
5112 are both positive.
5113
5114 So we have to ensure that we will handle large unsigned
5115 values the same way that the current signed bounds treat
5116 negative values. */
5117
5118 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5119 {
5120 tree high_positive;
5121 tree equiv_type;
5122 /* For fixed-point modes, we need to pass the saturating flag
5123 as the 2nd parameter. */
5124 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5125 equiv_type
5126 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5127 TYPE_SATURATING (arg0_type));
5128 else
5129 equiv_type
5130 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5131
5132 /* A range without an upper bound is, naturally, unbounded.
5133 Since convert would have cropped a very large value, use
5134 the max value for the destination type. */
5135 high_positive
5136 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5137 : TYPE_MAX_VALUE (arg0_type);
5138
5139 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5140 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5141 fold_convert_loc (loc, arg0_type,
5142 high_positive),
5143 build_int_cst (arg0_type, 1));
5144
5145 /* If the low bound is specified, "and" the range with the
5146 range for which the original unsigned value will be
5147 positive. */
5148 if (low != 0)
5149 {
5150 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5151 1, fold_convert_loc (loc, arg0_type,
5152 integer_zero_node),
5153 high_positive))
5154 return NULL_TREE;
5155
5156 in_p = (n_in_p == in_p);
5157 }
5158 else
5159 {
5160 /* Otherwise, "or" the range with the range of the input
5161 that will be interpreted as negative. */
5162 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5163 1, fold_convert_loc (loc, arg0_type,
5164 integer_zero_node),
5165 high_positive))
5166 return NULL_TREE;
5167
5168 in_p = (in_p != n_in_p);
5169 }
5170 }
5171
5172 *p_low = n_low;
5173 *p_high = n_high;
5174 *p_in_p = in_p;
5175 return arg0;
5176
5177 default:
5178 return NULL_TREE;
5179 }
5180 }
5181
5182 /* Given EXP, a logical expression, set the range it is testing into
5183 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5184 actually being tested. *PLOW and *PHIGH will be made of the same
5185 type as the returned expression. If EXP is not a comparison, we
5186 will most likely not be returning a useful value and range. Set
5187 *STRICT_OVERFLOW_P to true if the return value is only valid
5188 because signed overflow is undefined; otherwise, do not change
5189 *STRICT_OVERFLOW_P. */
5190
5191 tree
5192 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5193 bool *strict_overflow_p)
5194 {
5195 enum tree_code code;
5196 tree arg0, arg1 = NULL_TREE;
5197 tree exp_type, nexp;
5198 int in_p;
5199 tree low, high;
5200 location_t loc = EXPR_LOCATION (exp);
5201
5202 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5203 and see if we can refine the range. Some of the cases below may not
5204 happen, but it doesn't seem worth worrying about this. We "continue"
5205 the outer loop when we've changed something; otherwise we "break"
5206 the switch, which will "break" the while. */
5207
5208 in_p = 0;
5209 low = high = build_int_cst (TREE_TYPE (exp), 0);
5210
5211 while (1)
5212 {
5213 code = TREE_CODE (exp);
5214 exp_type = TREE_TYPE (exp);
5215 arg0 = NULL_TREE;
5216
5217 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5218 {
5219 if (TREE_OPERAND_LENGTH (exp) > 0)
5220 arg0 = TREE_OPERAND (exp, 0);
5221 if (TREE_CODE_CLASS (code) == tcc_binary
5222 || TREE_CODE_CLASS (code) == tcc_comparison
5223 || (TREE_CODE_CLASS (code) == tcc_expression
5224 && TREE_OPERAND_LENGTH (exp) > 1))
5225 arg1 = TREE_OPERAND (exp, 1);
5226 }
5227 if (arg0 == NULL_TREE)
5228 break;
5229
5230 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5231 &high, &in_p, strict_overflow_p);
5232 if (nexp == NULL_TREE)
5233 break;
5234 exp = nexp;
5235 }
5236
5237 /* If EXP is a constant, we can evaluate whether this is true or false. */
5238 if (TREE_CODE (exp) == INTEGER_CST)
5239 {
5240 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5241 exp, 0, low, 0))
5242 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5243 exp, 1, high, 1)));
5244 low = high = 0;
5245 exp = 0;
5246 }
5247
5248 *pin_p = in_p, *plow = low, *phigh = high;
5249 return exp;
5250 }
5251
5252 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5253 a bitwise check i.e. when
5254 LOW == 0xXX...X00...0
5255 HIGH == 0xXX...X11...1
5256 Return corresponding mask in MASK and stem in VALUE. */
5257
5258 static bool
5259 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5260 tree *value)
5261 {
5262 if (TREE_CODE (low) != INTEGER_CST
5263 || TREE_CODE (high) != INTEGER_CST)
5264 return false;
5265
5266 unsigned prec = TYPE_PRECISION (type);
5267 wide_int lo = wi::to_wide (low, prec);
5268 wide_int hi = wi::to_wide (high, prec);
5269
5270 wide_int end_mask = lo ^ hi;
5271 if ((end_mask & (end_mask + 1)) != 0
5272 || (lo & end_mask) != 0)
5273 return false;
5274
5275 wide_int stem_mask = ~end_mask;
5276 wide_int stem = lo & stem_mask;
5277 if (stem != (hi & stem_mask))
5278 return false;
5279
5280 *mask = wide_int_to_tree (type, stem_mask);
5281 *value = wide_int_to_tree (type, stem);
5282
5283 return true;
5284 }
5285 \f
5286 /* Helper routine for build_range_check and match.pd. Return the type to
5287 perform the check or NULL if it shouldn't be optimized. */
5288
5289 tree
5290 range_check_type (tree etype)
5291 {
5292 /* First make sure that arithmetics in this type is valid, then make sure
5293 that it wraps around. */
5294 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5295 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5296
5297 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5298 {
5299 tree utype, minv, maxv;
5300
5301 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5302 for the type in question, as we rely on this here. */
5303 utype = unsigned_type_for (etype);
5304 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5305 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5306 build_int_cst (TREE_TYPE (maxv), 1), 1);
5307 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5308
5309 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5310 minv, 1, maxv, 1)))
5311 etype = utype;
5312 else
5313 return NULL_TREE;
5314 }
5315 else if (POINTER_TYPE_P (etype))
5316 etype = unsigned_type_for (etype);
5317 return etype;
5318 }
5319
5320 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5321 type, TYPE, return an expression to test if EXP is in (or out of, depending
5322 on IN_P) the range. Return 0 if the test couldn't be created. */
5323
5324 tree
5325 build_range_check (location_t loc, tree type, tree exp, int in_p,
5326 tree low, tree high)
5327 {
5328 tree etype = TREE_TYPE (exp), mask, value;
5329
5330 /* Disable this optimization for function pointer expressions
5331 on targets that require function pointer canonicalization. */
5332 if (targetm.have_canonicalize_funcptr_for_compare ()
5333 && POINTER_TYPE_P (etype)
5334 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5335 return NULL_TREE;
5336
5337 if (! in_p)
5338 {
5339 value = build_range_check (loc, type, exp, 1, low, high);
5340 if (value != 0)
5341 return invert_truthvalue_loc (loc, value);
5342
5343 return 0;
5344 }
5345
5346 if (low == 0 && high == 0)
5347 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5348
5349 if (low == 0)
5350 return fold_build2_loc (loc, LE_EXPR, type, exp,
5351 fold_convert_loc (loc, etype, high));
5352
5353 if (high == 0)
5354 return fold_build2_loc (loc, GE_EXPR, type, exp,
5355 fold_convert_loc (loc, etype, low));
5356
5357 if (operand_equal_p (low, high, 0))
5358 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5359 fold_convert_loc (loc, etype, low));
5360
5361 if (TREE_CODE (exp) == BIT_AND_EXPR
5362 && maskable_range_p (low, high, etype, &mask, &value))
5363 return fold_build2_loc (loc, EQ_EXPR, type,
5364 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5365 exp, mask),
5366 value);
5367
5368 if (integer_zerop (low))
5369 {
5370 if (! TYPE_UNSIGNED (etype))
5371 {
5372 etype = unsigned_type_for (etype);
5373 high = fold_convert_loc (loc, etype, high);
5374 exp = fold_convert_loc (loc, etype, exp);
5375 }
5376 return build_range_check (loc, type, exp, 1, 0, high);
5377 }
5378
5379 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5380 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5381 {
5382 int prec = TYPE_PRECISION (etype);
5383
5384 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5385 {
5386 if (TYPE_UNSIGNED (etype))
5387 {
5388 tree signed_etype = signed_type_for (etype);
5389 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5390 etype
5391 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5392 else
5393 etype = signed_etype;
5394 exp = fold_convert_loc (loc, etype, exp);
5395 }
5396 return fold_build2_loc (loc, GT_EXPR, type, exp,
5397 build_int_cst (etype, 0));
5398 }
5399 }
5400
5401 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5402 This requires wrap-around arithmetics for the type of the expression. */
5403 etype = range_check_type (etype);
5404 if (etype == NULL_TREE)
5405 return NULL_TREE;
5406
5407 high = fold_convert_loc (loc, etype, high);
5408 low = fold_convert_loc (loc, etype, low);
5409 exp = fold_convert_loc (loc, etype, exp);
5410
5411 value = const_binop (MINUS_EXPR, high, low);
5412
5413 if (value != 0 && !TREE_OVERFLOW (value))
5414 return build_range_check (loc, type,
5415 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5416 1, build_int_cst (etype, 0), value);
5417
5418 return 0;
5419 }
5420 \f
5421 /* Return the predecessor of VAL in its type, handling the infinite case. */
5422
5423 static tree
5424 range_predecessor (tree val)
5425 {
5426 tree type = TREE_TYPE (val);
5427
5428 if (INTEGRAL_TYPE_P (type)
5429 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5430 return 0;
5431 else
5432 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5433 build_int_cst (TREE_TYPE (val), 1), 0);
5434 }
5435
5436 /* Return the successor of VAL in its type, handling the infinite case. */
5437
5438 static tree
5439 range_successor (tree val)
5440 {
5441 tree type = TREE_TYPE (val);
5442
5443 if (INTEGRAL_TYPE_P (type)
5444 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5445 return 0;
5446 else
5447 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5448 build_int_cst (TREE_TYPE (val), 1), 0);
5449 }
5450
5451 /* Given two ranges, see if we can merge them into one. Return 1 if we
5452 can, 0 if we can't. Set the output range into the specified parameters. */
5453
5454 bool
5455 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5456 tree high0, int in1_p, tree low1, tree high1)
5457 {
5458 int no_overlap;
5459 int subset;
5460 int temp;
5461 tree tem;
5462 int in_p;
5463 tree low, high;
5464 int lowequal = ((low0 == 0 && low1 == 0)
5465 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5466 low0, 0, low1, 0)));
5467 int highequal = ((high0 == 0 && high1 == 0)
5468 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5469 high0, 1, high1, 1)));
5470
5471 /* Make range 0 be the range that starts first, or ends last if they
5472 start at the same value. Swap them if it isn't. */
5473 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5474 low0, 0, low1, 0))
5475 || (lowequal
5476 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5477 high1, 1, high0, 1))))
5478 {
5479 temp = in0_p, in0_p = in1_p, in1_p = temp;
5480 tem = low0, low0 = low1, low1 = tem;
5481 tem = high0, high0 = high1, high1 = tem;
5482 }
5483
5484 /* If the second range is != high1 where high1 is the type maximum of
5485 the type, try first merging with < high1 range. */
5486 if (low1
5487 && high1
5488 && TREE_CODE (low1) == INTEGER_CST
5489 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5490 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5491 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5492 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5493 && operand_equal_p (low1, high1, 0))
5494 {
5495 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5496 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5497 !in1_p, NULL_TREE, range_predecessor (low1)))
5498 return true;
5499 /* Similarly for the second range != low1 where low1 is the type minimum
5500 of the type, try first merging with > low1 range. */
5501 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5502 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5503 !in1_p, range_successor (low1), NULL_TREE))
5504 return true;
5505 }
5506
5507 /* Now flag two cases, whether the ranges are disjoint or whether the
5508 second range is totally subsumed in the first. Note that the tests
5509 below are simplified by the ones above. */
5510 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5511 high0, 1, low1, 0));
5512 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5513 high1, 1, high0, 1));
5514
5515 /* We now have four cases, depending on whether we are including or
5516 excluding the two ranges. */
5517 if (in0_p && in1_p)
5518 {
5519 /* If they don't overlap, the result is false. If the second range
5520 is a subset it is the result. Otherwise, the range is from the start
5521 of the second to the end of the first. */
5522 if (no_overlap)
5523 in_p = 0, low = high = 0;
5524 else if (subset)
5525 in_p = 1, low = low1, high = high1;
5526 else
5527 in_p = 1, low = low1, high = high0;
5528 }
5529
5530 else if (in0_p && ! in1_p)
5531 {
5532 /* If they don't overlap, the result is the first range. If they are
5533 equal, the result is false. If the second range is a subset of the
5534 first, and the ranges begin at the same place, we go from just after
5535 the end of the second range to the end of the first. If the second
5536 range is not a subset of the first, or if it is a subset and both
5537 ranges end at the same place, the range starts at the start of the
5538 first range and ends just before the second range.
5539 Otherwise, we can't describe this as a single range. */
5540 if (no_overlap)
5541 in_p = 1, low = low0, high = high0;
5542 else if (lowequal && highequal)
5543 in_p = 0, low = high = 0;
5544 else if (subset && lowequal)
5545 {
5546 low = range_successor (high1);
5547 high = high0;
5548 in_p = 1;
5549 if (low == 0)
5550 {
5551 /* We are in the weird situation where high0 > high1 but
5552 high1 has no successor. Punt. */
5553 return 0;
5554 }
5555 }
5556 else if (! subset || highequal)
5557 {
5558 low = low0;
5559 high = range_predecessor (low1);
5560 in_p = 1;
5561 if (high == 0)
5562 {
5563 /* low0 < low1 but low1 has no predecessor. Punt. */
5564 return 0;
5565 }
5566 }
5567 else
5568 return 0;
5569 }
5570
5571 else if (! in0_p && in1_p)
5572 {
5573 /* If they don't overlap, the result is the second range. If the second
5574 is a subset of the first, the result is false. Otherwise,
5575 the range starts just after the first range and ends at the
5576 end of the second. */
5577 if (no_overlap)
5578 in_p = 1, low = low1, high = high1;
5579 else if (subset || highequal)
5580 in_p = 0, low = high = 0;
5581 else
5582 {
5583 low = range_successor (high0);
5584 high = high1;
5585 in_p = 1;
5586 if (low == 0)
5587 {
5588 /* high1 > high0 but high0 has no successor. Punt. */
5589 return 0;
5590 }
5591 }
5592 }
5593
5594 else
5595 {
5596 /* The case where we are excluding both ranges. Here the complex case
5597 is if they don't overlap. In that case, the only time we have a
5598 range is if they are adjacent. If the second is a subset of the
5599 first, the result is the first. Otherwise, the range to exclude
5600 starts at the beginning of the first range and ends at the end of the
5601 second. */
5602 if (no_overlap)
5603 {
5604 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5605 range_successor (high0),
5606 1, low1, 0)))
5607 in_p = 0, low = low0, high = high1;
5608 else
5609 {
5610 /* Canonicalize - [min, x] into - [-, x]. */
5611 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5612 switch (TREE_CODE (TREE_TYPE (low0)))
5613 {
5614 case ENUMERAL_TYPE:
5615 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5616 GET_MODE_BITSIZE
5617 (TYPE_MODE (TREE_TYPE (low0)))))
5618 break;
5619 /* FALLTHROUGH */
5620 case INTEGER_TYPE:
5621 if (tree_int_cst_equal (low0,
5622 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5623 low0 = 0;
5624 break;
5625 case POINTER_TYPE:
5626 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5627 && integer_zerop (low0))
5628 low0 = 0;
5629 break;
5630 default:
5631 break;
5632 }
5633
5634 /* Canonicalize - [x, max] into - [x, -]. */
5635 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5636 switch (TREE_CODE (TREE_TYPE (high1)))
5637 {
5638 case ENUMERAL_TYPE:
5639 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5640 GET_MODE_BITSIZE
5641 (TYPE_MODE (TREE_TYPE (high1)))))
5642 break;
5643 /* FALLTHROUGH */
5644 case INTEGER_TYPE:
5645 if (tree_int_cst_equal (high1,
5646 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5647 high1 = 0;
5648 break;
5649 case POINTER_TYPE:
5650 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5651 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5652 high1, 1,
5653 build_int_cst (TREE_TYPE (high1), 1),
5654 1)))
5655 high1 = 0;
5656 break;
5657 default:
5658 break;
5659 }
5660
5661 /* The ranges might be also adjacent between the maximum and
5662 minimum values of the given type. For
5663 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5664 return + [x + 1, y - 1]. */
5665 if (low0 == 0 && high1 == 0)
5666 {
5667 low = range_successor (high0);
5668 high = range_predecessor (low1);
5669 if (low == 0 || high == 0)
5670 return 0;
5671
5672 in_p = 1;
5673 }
5674 else
5675 return 0;
5676 }
5677 }
5678 else if (subset)
5679 in_p = 0, low = low0, high = high0;
5680 else
5681 in_p = 0, low = low0, high = high1;
5682 }
5683
5684 *pin_p = in_p, *plow = low, *phigh = high;
5685 return 1;
5686 }
5687 \f
5688
5689 /* Subroutine of fold, looking inside expressions of the form
5690 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5691 of the COND_EXPR. This function is being used also to optimize
5692 A op B ? C : A, by reversing the comparison first.
5693
5694 Return a folded expression whose code is not a COND_EXPR
5695 anymore, or NULL_TREE if no folding opportunity is found. */
5696
5697 static tree
5698 fold_cond_expr_with_comparison (location_t loc, tree type,
5699 tree arg0, tree arg1, tree arg2)
5700 {
5701 enum tree_code comp_code = TREE_CODE (arg0);
5702 tree arg00 = TREE_OPERAND (arg0, 0);
5703 tree arg01 = TREE_OPERAND (arg0, 1);
5704 tree arg1_type = TREE_TYPE (arg1);
5705 tree tem;
5706
5707 STRIP_NOPS (arg1);
5708 STRIP_NOPS (arg2);
5709
5710 /* If we have A op 0 ? A : -A, consider applying the following
5711 transformations:
5712
5713 A == 0? A : -A same as -A
5714 A != 0? A : -A same as A
5715 A >= 0? A : -A same as abs (A)
5716 A > 0? A : -A same as abs (A)
5717 A <= 0? A : -A same as -abs (A)
5718 A < 0? A : -A same as -abs (A)
5719
5720 None of these transformations work for modes with signed
5721 zeros. If A is +/-0, the first two transformations will
5722 change the sign of the result (from +0 to -0, or vice
5723 versa). The last four will fix the sign of the result,
5724 even though the original expressions could be positive or
5725 negative, depending on the sign of A.
5726
5727 Note that all these transformations are correct if A is
5728 NaN, since the two alternatives (A and -A) are also NaNs. */
5729 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5730 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5731 ? real_zerop (arg01)
5732 : integer_zerop (arg01))
5733 && ((TREE_CODE (arg2) == NEGATE_EXPR
5734 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5735 /* In the case that A is of the form X-Y, '-A' (arg2) may
5736 have already been folded to Y-X, check for that. */
5737 || (TREE_CODE (arg1) == MINUS_EXPR
5738 && TREE_CODE (arg2) == MINUS_EXPR
5739 && operand_equal_p (TREE_OPERAND (arg1, 0),
5740 TREE_OPERAND (arg2, 1), 0)
5741 && operand_equal_p (TREE_OPERAND (arg1, 1),
5742 TREE_OPERAND (arg2, 0), 0))))
5743 switch (comp_code)
5744 {
5745 case EQ_EXPR:
5746 case UNEQ_EXPR:
5747 tem = fold_convert_loc (loc, arg1_type, arg1);
5748 return fold_convert_loc (loc, type, negate_expr (tem));
5749 case NE_EXPR:
5750 case LTGT_EXPR:
5751 return fold_convert_loc (loc, type, arg1);
5752 case UNGE_EXPR:
5753 case UNGT_EXPR:
5754 if (flag_trapping_math)
5755 break;
5756 /* Fall through. */
5757 case GE_EXPR:
5758 case GT_EXPR:
5759 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5760 break;
5761 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5762 return fold_convert_loc (loc, type, tem);
5763 case UNLE_EXPR:
5764 case UNLT_EXPR:
5765 if (flag_trapping_math)
5766 break;
5767 /* FALLTHRU */
5768 case LE_EXPR:
5769 case LT_EXPR:
5770 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5771 break;
5772 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5773 return negate_expr (fold_convert_loc (loc, type, tem));
5774 default:
5775 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5776 break;
5777 }
5778
5779 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5780 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5781 both transformations are correct when A is NaN: A != 0
5782 is then true, and A == 0 is false. */
5783
5784 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5785 && integer_zerop (arg01) && integer_zerop (arg2))
5786 {
5787 if (comp_code == NE_EXPR)
5788 return fold_convert_loc (loc, type, arg1);
5789 else if (comp_code == EQ_EXPR)
5790 return build_zero_cst (type);
5791 }
5792
5793 /* Try some transformations of A op B ? A : B.
5794
5795 A == B? A : B same as B
5796 A != B? A : B same as A
5797 A >= B? A : B same as max (A, B)
5798 A > B? A : B same as max (B, A)
5799 A <= B? A : B same as min (A, B)
5800 A < B? A : B same as min (B, A)
5801
5802 As above, these transformations don't work in the presence
5803 of signed zeros. For example, if A and B are zeros of
5804 opposite sign, the first two transformations will change
5805 the sign of the result. In the last four, the original
5806 expressions give different results for (A=+0, B=-0) and
5807 (A=-0, B=+0), but the transformed expressions do not.
5808
5809 The first two transformations are correct if either A or B
5810 is a NaN. In the first transformation, the condition will
5811 be false, and B will indeed be chosen. In the case of the
5812 second transformation, the condition A != B will be true,
5813 and A will be chosen.
5814
5815 The conversions to max() and min() are not correct if B is
5816 a number and A is not. The conditions in the original
5817 expressions will be false, so all four give B. The min()
5818 and max() versions would give a NaN instead. */
5819 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5820 && operand_equal_for_comparison_p (arg01, arg2)
5821 /* Avoid these transformations if the COND_EXPR may be used
5822 as an lvalue in the C++ front-end. PR c++/19199. */
5823 && (in_gimple_form
5824 || VECTOR_TYPE_P (type)
5825 || (! lang_GNU_CXX ()
5826 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5827 || ! maybe_lvalue_p (arg1)
5828 || ! maybe_lvalue_p (arg2)))
5829 {
5830 tree comp_op0 = arg00;
5831 tree comp_op1 = arg01;
5832 tree comp_type = TREE_TYPE (comp_op0);
5833
5834 switch (comp_code)
5835 {
5836 case EQ_EXPR:
5837 return fold_convert_loc (loc, type, arg2);
5838 case NE_EXPR:
5839 return fold_convert_loc (loc, type, arg1);
5840 case LE_EXPR:
5841 case LT_EXPR:
5842 case UNLE_EXPR:
5843 case UNLT_EXPR:
5844 /* In C++ a ?: expression can be an lvalue, so put the
5845 operand which will be used if they are equal first
5846 so that we can convert this back to the
5847 corresponding COND_EXPR. */
5848 if (!HONOR_NANS (arg1))
5849 {
5850 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5851 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5852 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5853 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5854 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5855 comp_op1, comp_op0);
5856 return fold_convert_loc (loc, type, tem);
5857 }
5858 break;
5859 case GE_EXPR:
5860 case GT_EXPR:
5861 case UNGE_EXPR:
5862 case UNGT_EXPR:
5863 if (!HONOR_NANS (arg1))
5864 {
5865 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5866 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5867 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5868 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5869 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5870 comp_op1, comp_op0);
5871 return fold_convert_loc (loc, type, tem);
5872 }
5873 break;
5874 case UNEQ_EXPR:
5875 if (!HONOR_NANS (arg1))
5876 return fold_convert_loc (loc, type, arg2);
5877 break;
5878 case LTGT_EXPR:
5879 if (!HONOR_NANS (arg1))
5880 return fold_convert_loc (loc, type, arg1);
5881 break;
5882 default:
5883 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5884 break;
5885 }
5886 }
5887
5888 return NULL_TREE;
5889 }
5890
5891
5892 \f
5893 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5894 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5895 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5896 false) >= 2)
5897 #endif
5898
5899 /* EXP is some logical combination of boolean tests. See if we can
5900 merge it into some range test. Return the new tree if so. */
5901
5902 static tree
5903 fold_range_test (location_t loc, enum tree_code code, tree type,
5904 tree op0, tree op1)
5905 {
5906 int or_op = (code == TRUTH_ORIF_EXPR
5907 || code == TRUTH_OR_EXPR);
5908 int in0_p, in1_p, in_p;
5909 tree low0, low1, low, high0, high1, high;
5910 bool strict_overflow_p = false;
5911 tree tem, lhs, rhs;
5912 const char * const warnmsg = G_("assuming signed overflow does not occur "
5913 "when simplifying range test");
5914
5915 if (!INTEGRAL_TYPE_P (type))
5916 return 0;
5917
5918 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5919 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5920
5921 /* If this is an OR operation, invert both sides; we will invert
5922 again at the end. */
5923 if (or_op)
5924 in0_p = ! in0_p, in1_p = ! in1_p;
5925
5926 /* If both expressions are the same, if we can merge the ranges, and we
5927 can build the range test, return it or it inverted. If one of the
5928 ranges is always true or always false, consider it to be the same
5929 expression as the other. */
5930 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5931 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5932 in1_p, low1, high1)
5933 && (tem = (build_range_check (loc, type,
5934 lhs != 0 ? lhs
5935 : rhs != 0 ? rhs : integer_zero_node,
5936 in_p, low, high))) != 0)
5937 {
5938 if (strict_overflow_p)
5939 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5940 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5941 }
5942
5943 /* On machines where the branch cost is expensive, if this is a
5944 short-circuited branch and the underlying object on both sides
5945 is the same, make a non-short-circuit operation. */
5946 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
5947 if (param_logical_op_non_short_circuit != -1)
5948 logical_op_non_short_circuit
5949 = param_logical_op_non_short_circuit;
5950 if (logical_op_non_short_circuit
5951 && !flag_sanitize_coverage
5952 && lhs != 0 && rhs != 0
5953 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5954 && operand_equal_p (lhs, rhs, 0))
5955 {
5956 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5957 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5958 which cases we can't do this. */
5959 if (simple_operand_p (lhs))
5960 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5961 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5962 type, op0, op1);
5963
5964 else if (!lang_hooks.decls.global_bindings_p ()
5965 && !CONTAINS_PLACEHOLDER_P (lhs))
5966 {
5967 tree common = save_expr (lhs);
5968
5969 if ((lhs = build_range_check (loc, type, common,
5970 or_op ? ! in0_p : in0_p,
5971 low0, high0)) != 0
5972 && (rhs = build_range_check (loc, type, common,
5973 or_op ? ! in1_p : in1_p,
5974 low1, high1)) != 0)
5975 {
5976 if (strict_overflow_p)
5977 fold_overflow_warning (warnmsg,
5978 WARN_STRICT_OVERFLOW_COMPARISON);
5979 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5980 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5981 type, lhs, rhs);
5982 }
5983 }
5984 }
5985
5986 return 0;
5987 }
5988 \f
5989 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5990 bit value. Arrange things so the extra bits will be set to zero if and
5991 only if C is signed-extended to its full width. If MASK is nonzero,
5992 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5993
5994 static tree
5995 unextend (tree c, int p, int unsignedp, tree mask)
5996 {
5997 tree type = TREE_TYPE (c);
5998 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
5999 tree temp;
6000
6001 if (p == modesize || unsignedp)
6002 return c;
6003
6004 /* We work by getting just the sign bit into the low-order bit, then
6005 into the high-order bit, then sign-extend. We then XOR that value
6006 with C. */
6007 temp = build_int_cst (TREE_TYPE (c),
6008 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6009
6010 /* We must use a signed type in order to get an arithmetic right shift.
6011 However, we must also avoid introducing accidental overflows, so that
6012 a subsequent call to integer_zerop will work. Hence we must
6013 do the type conversion here. At this point, the constant is either
6014 zero or one, and the conversion to a signed type can never overflow.
6015 We could get an overflow if this conversion is done anywhere else. */
6016 if (TYPE_UNSIGNED (type))
6017 temp = fold_convert (signed_type_for (type), temp);
6018
6019 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6020 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6021 if (mask != 0)
6022 temp = const_binop (BIT_AND_EXPR, temp,
6023 fold_convert (TREE_TYPE (c), mask));
6024 /* If necessary, convert the type back to match the type of C. */
6025 if (TYPE_UNSIGNED (type))
6026 temp = fold_convert (type, temp);
6027
6028 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6029 }
6030 \f
6031 /* For an expression that has the form
6032 (A && B) || ~B
6033 or
6034 (A || B) && ~B,
6035 we can drop one of the inner expressions and simplify to
6036 A || ~B
6037 or
6038 A && ~B
6039 LOC is the location of the resulting expression. OP is the inner
6040 logical operation; the left-hand side in the examples above, while CMPOP
6041 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6042 removing a condition that guards another, as in
6043 (A != NULL && A->...) || A == NULL
6044 which we must not transform. If RHS_ONLY is true, only eliminate the
6045 right-most operand of the inner logical operation. */
6046
6047 static tree
6048 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6049 bool rhs_only)
6050 {
6051 tree type = TREE_TYPE (cmpop);
6052 enum tree_code code = TREE_CODE (cmpop);
6053 enum tree_code truthop_code = TREE_CODE (op);
6054 tree lhs = TREE_OPERAND (op, 0);
6055 tree rhs = TREE_OPERAND (op, 1);
6056 tree orig_lhs = lhs, orig_rhs = rhs;
6057 enum tree_code rhs_code = TREE_CODE (rhs);
6058 enum tree_code lhs_code = TREE_CODE (lhs);
6059 enum tree_code inv_code;
6060
6061 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6062 return NULL_TREE;
6063
6064 if (TREE_CODE_CLASS (code) != tcc_comparison)
6065 return NULL_TREE;
6066
6067 if (rhs_code == truthop_code)
6068 {
6069 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6070 if (newrhs != NULL_TREE)
6071 {
6072 rhs = newrhs;
6073 rhs_code = TREE_CODE (rhs);
6074 }
6075 }
6076 if (lhs_code == truthop_code && !rhs_only)
6077 {
6078 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6079 if (newlhs != NULL_TREE)
6080 {
6081 lhs = newlhs;
6082 lhs_code = TREE_CODE (lhs);
6083 }
6084 }
6085
6086 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6087 if (inv_code == rhs_code
6088 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6089 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6090 return lhs;
6091 if (!rhs_only && inv_code == lhs_code
6092 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6093 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6094 return rhs;
6095 if (rhs != orig_rhs || lhs != orig_lhs)
6096 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6097 lhs, rhs);
6098 return NULL_TREE;
6099 }
6100
6101 /* Find ways of folding logical expressions of LHS and RHS:
6102 Try to merge two comparisons to the same innermost item.
6103 Look for range tests like "ch >= '0' && ch <= '9'".
6104 Look for combinations of simple terms on machines with expensive branches
6105 and evaluate the RHS unconditionally.
6106
6107 For example, if we have p->a == 2 && p->b == 4 and we can make an
6108 object large enough to span both A and B, we can do this with a comparison
6109 against the object ANDed with the a mask.
6110
6111 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6112 operations to do this with one comparison.
6113
6114 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6115 function and the one above.
6116
6117 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6118 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6119
6120 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6121 two operands.
6122
6123 We return the simplified tree or 0 if no optimization is possible. */
6124
6125 static tree
6126 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6127 tree lhs, tree rhs)
6128 {
6129 /* If this is the "or" of two comparisons, we can do something if
6130 the comparisons are NE_EXPR. If this is the "and", we can do something
6131 if the comparisons are EQ_EXPR. I.e.,
6132 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6133
6134 WANTED_CODE is this operation code. For single bit fields, we can
6135 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6136 comparison for one-bit fields. */
6137
6138 enum tree_code wanted_code;
6139 enum tree_code lcode, rcode;
6140 tree ll_arg, lr_arg, rl_arg, rr_arg;
6141 tree ll_inner, lr_inner, rl_inner, rr_inner;
6142 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6143 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6144 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6145 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6146 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6147 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6148 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6149 scalar_int_mode lnmode, rnmode;
6150 tree ll_mask, lr_mask, rl_mask, rr_mask;
6151 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6152 tree l_const, r_const;
6153 tree lntype, rntype, result;
6154 HOST_WIDE_INT first_bit, end_bit;
6155 int volatilep;
6156
6157 /* Start by getting the comparison codes. Fail if anything is volatile.
6158 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6159 it were surrounded with a NE_EXPR. */
6160
6161 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6162 return 0;
6163
6164 lcode = TREE_CODE (lhs);
6165 rcode = TREE_CODE (rhs);
6166
6167 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6168 {
6169 lhs = build2 (NE_EXPR, truth_type, lhs,
6170 build_int_cst (TREE_TYPE (lhs), 0));
6171 lcode = NE_EXPR;
6172 }
6173
6174 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6175 {
6176 rhs = build2 (NE_EXPR, truth_type, rhs,
6177 build_int_cst (TREE_TYPE (rhs), 0));
6178 rcode = NE_EXPR;
6179 }
6180
6181 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6182 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6183 return 0;
6184
6185 ll_arg = TREE_OPERAND (lhs, 0);
6186 lr_arg = TREE_OPERAND (lhs, 1);
6187 rl_arg = TREE_OPERAND (rhs, 0);
6188 rr_arg = TREE_OPERAND (rhs, 1);
6189
6190 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6191 if (simple_operand_p (ll_arg)
6192 && simple_operand_p (lr_arg))
6193 {
6194 if (operand_equal_p (ll_arg, rl_arg, 0)
6195 && operand_equal_p (lr_arg, rr_arg, 0))
6196 {
6197 result = combine_comparisons (loc, code, lcode, rcode,
6198 truth_type, ll_arg, lr_arg);
6199 if (result)
6200 return result;
6201 }
6202 else if (operand_equal_p (ll_arg, rr_arg, 0)
6203 && operand_equal_p (lr_arg, rl_arg, 0))
6204 {
6205 result = combine_comparisons (loc, code, lcode,
6206 swap_tree_comparison (rcode),
6207 truth_type, ll_arg, lr_arg);
6208 if (result)
6209 return result;
6210 }
6211 }
6212
6213 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6214 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6215
6216 /* If the RHS can be evaluated unconditionally and its operands are
6217 simple, it wins to evaluate the RHS unconditionally on machines
6218 with expensive branches. In this case, this isn't a comparison
6219 that can be merged. */
6220
6221 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6222 false) >= 2
6223 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6224 && simple_operand_p (rl_arg)
6225 && simple_operand_p (rr_arg))
6226 {
6227 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6228 if (code == TRUTH_OR_EXPR
6229 && lcode == NE_EXPR && integer_zerop (lr_arg)
6230 && rcode == NE_EXPR && integer_zerop (rr_arg)
6231 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6232 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6233 return build2_loc (loc, NE_EXPR, truth_type,
6234 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6235 ll_arg, rl_arg),
6236 build_int_cst (TREE_TYPE (ll_arg), 0));
6237
6238 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6239 if (code == TRUTH_AND_EXPR
6240 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6241 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6242 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6243 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6244 return build2_loc (loc, EQ_EXPR, truth_type,
6245 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6246 ll_arg, rl_arg),
6247 build_int_cst (TREE_TYPE (ll_arg), 0));
6248 }
6249
6250 /* See if the comparisons can be merged. Then get all the parameters for
6251 each side. */
6252
6253 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6254 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6255 return 0;
6256
6257 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6258 volatilep = 0;
6259 ll_inner = decode_field_reference (loc, &ll_arg,
6260 &ll_bitsize, &ll_bitpos, &ll_mode,
6261 &ll_unsignedp, &ll_reversep, &volatilep,
6262 &ll_mask, &ll_and_mask);
6263 lr_inner = decode_field_reference (loc, &lr_arg,
6264 &lr_bitsize, &lr_bitpos, &lr_mode,
6265 &lr_unsignedp, &lr_reversep, &volatilep,
6266 &lr_mask, &lr_and_mask);
6267 rl_inner = decode_field_reference (loc, &rl_arg,
6268 &rl_bitsize, &rl_bitpos, &rl_mode,
6269 &rl_unsignedp, &rl_reversep, &volatilep,
6270 &rl_mask, &rl_and_mask);
6271 rr_inner = decode_field_reference (loc, &rr_arg,
6272 &rr_bitsize, &rr_bitpos, &rr_mode,
6273 &rr_unsignedp, &rr_reversep, &volatilep,
6274 &rr_mask, &rr_and_mask);
6275
6276 /* It must be true that the inner operation on the lhs of each
6277 comparison must be the same if we are to be able to do anything.
6278 Then see if we have constants. If not, the same must be true for
6279 the rhs's. */
6280 if (volatilep
6281 || ll_reversep != rl_reversep
6282 || ll_inner == 0 || rl_inner == 0
6283 || ! operand_equal_p (ll_inner, rl_inner, 0))
6284 return 0;
6285
6286 if (TREE_CODE (lr_arg) == INTEGER_CST
6287 && TREE_CODE (rr_arg) == INTEGER_CST)
6288 {
6289 l_const = lr_arg, r_const = rr_arg;
6290 lr_reversep = ll_reversep;
6291 }
6292 else if (lr_reversep != rr_reversep
6293 || lr_inner == 0 || rr_inner == 0
6294 || ! operand_equal_p (lr_inner, rr_inner, 0))
6295 return 0;
6296 else
6297 l_const = r_const = 0;
6298
6299 /* If either comparison code is not correct for our logical operation,
6300 fail. However, we can convert a one-bit comparison against zero into
6301 the opposite comparison against that bit being set in the field. */
6302
6303 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6304 if (lcode != wanted_code)
6305 {
6306 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6307 {
6308 /* Make the left operand unsigned, since we are only interested
6309 in the value of one bit. Otherwise we are doing the wrong
6310 thing below. */
6311 ll_unsignedp = 1;
6312 l_const = ll_mask;
6313 }
6314 else
6315 return 0;
6316 }
6317
6318 /* This is analogous to the code for l_const above. */
6319 if (rcode != wanted_code)
6320 {
6321 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6322 {
6323 rl_unsignedp = 1;
6324 r_const = rl_mask;
6325 }
6326 else
6327 return 0;
6328 }
6329
6330 /* See if we can find a mode that contains both fields being compared on
6331 the left. If we can't, fail. Otherwise, update all constants and masks
6332 to be relative to a field of that size. */
6333 first_bit = MIN (ll_bitpos, rl_bitpos);
6334 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6335 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6336 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6337 volatilep, &lnmode))
6338 return 0;
6339
6340 lnbitsize = GET_MODE_BITSIZE (lnmode);
6341 lnbitpos = first_bit & ~ (lnbitsize - 1);
6342 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6343 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6344
6345 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6346 {
6347 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6348 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6349 }
6350
6351 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6352 size_int (xll_bitpos));
6353 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6354 size_int (xrl_bitpos));
6355
6356 if (l_const)
6357 {
6358 l_const = fold_convert_loc (loc, lntype, l_const);
6359 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6360 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6361 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6362 fold_build1_loc (loc, BIT_NOT_EXPR,
6363 lntype, ll_mask))))
6364 {
6365 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6366
6367 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6368 }
6369 }
6370 if (r_const)
6371 {
6372 r_const = fold_convert_loc (loc, lntype, r_const);
6373 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6374 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6375 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6376 fold_build1_loc (loc, BIT_NOT_EXPR,
6377 lntype, rl_mask))))
6378 {
6379 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6380
6381 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6382 }
6383 }
6384
6385 /* If the right sides are not constant, do the same for it. Also,
6386 disallow this optimization if a size, signedness or storage order
6387 mismatch occurs between the left and right sides. */
6388 if (l_const == 0)
6389 {
6390 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6391 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6392 || ll_reversep != lr_reversep
6393 /* Make sure the two fields on the right
6394 correspond to the left without being swapped. */
6395 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6396 return 0;
6397
6398 first_bit = MIN (lr_bitpos, rr_bitpos);
6399 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6400 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6401 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6402 volatilep, &rnmode))
6403 return 0;
6404
6405 rnbitsize = GET_MODE_BITSIZE (rnmode);
6406 rnbitpos = first_bit & ~ (rnbitsize - 1);
6407 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6408 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6409
6410 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6411 {
6412 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6413 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6414 }
6415
6416 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6417 rntype, lr_mask),
6418 size_int (xlr_bitpos));
6419 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6420 rntype, rr_mask),
6421 size_int (xrr_bitpos));
6422
6423 /* Make a mask that corresponds to both fields being compared.
6424 Do this for both items being compared. If the operands are the
6425 same size and the bits being compared are in the same position
6426 then we can do this by masking both and comparing the masked
6427 results. */
6428 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6429 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6430 if (lnbitsize == rnbitsize
6431 && xll_bitpos == xlr_bitpos
6432 && lnbitpos >= 0
6433 && rnbitpos >= 0)
6434 {
6435 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6436 lntype, lnbitsize, lnbitpos,
6437 ll_unsignedp || rl_unsignedp, ll_reversep);
6438 if (! all_ones_mask_p (ll_mask, lnbitsize))
6439 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6440
6441 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6442 rntype, rnbitsize, rnbitpos,
6443 lr_unsignedp || rr_unsignedp, lr_reversep);
6444 if (! all_ones_mask_p (lr_mask, rnbitsize))
6445 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6446
6447 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6448 }
6449
6450 /* There is still another way we can do something: If both pairs of
6451 fields being compared are adjacent, we may be able to make a wider
6452 field containing them both.
6453
6454 Note that we still must mask the lhs/rhs expressions. Furthermore,
6455 the mask must be shifted to account for the shift done by
6456 make_bit_field_ref. */
6457 if (((ll_bitsize + ll_bitpos == rl_bitpos
6458 && lr_bitsize + lr_bitpos == rr_bitpos)
6459 || (ll_bitpos == rl_bitpos + rl_bitsize
6460 && lr_bitpos == rr_bitpos + rr_bitsize))
6461 && ll_bitpos >= 0
6462 && rl_bitpos >= 0
6463 && lr_bitpos >= 0
6464 && rr_bitpos >= 0)
6465 {
6466 tree type;
6467
6468 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6469 ll_bitsize + rl_bitsize,
6470 MIN (ll_bitpos, rl_bitpos),
6471 ll_unsignedp, ll_reversep);
6472 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6473 lr_bitsize + rr_bitsize,
6474 MIN (lr_bitpos, rr_bitpos),
6475 lr_unsignedp, lr_reversep);
6476
6477 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6478 size_int (MIN (xll_bitpos, xrl_bitpos)));
6479 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6480 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6481
6482 /* Convert to the smaller type before masking out unwanted bits. */
6483 type = lntype;
6484 if (lntype != rntype)
6485 {
6486 if (lnbitsize > rnbitsize)
6487 {
6488 lhs = fold_convert_loc (loc, rntype, lhs);
6489 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6490 type = rntype;
6491 }
6492 else if (lnbitsize < rnbitsize)
6493 {
6494 rhs = fold_convert_loc (loc, lntype, rhs);
6495 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6496 type = lntype;
6497 }
6498 }
6499
6500 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6501 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6502
6503 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6504 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6505
6506 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6507 }
6508
6509 return 0;
6510 }
6511
6512 /* Handle the case of comparisons with constants. If there is something in
6513 common between the masks, those bits of the constants must be the same.
6514 If not, the condition is always false. Test for this to avoid generating
6515 incorrect code below. */
6516 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6517 if (! integer_zerop (result)
6518 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6519 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6520 {
6521 if (wanted_code == NE_EXPR)
6522 {
6523 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6524 return constant_boolean_node (true, truth_type);
6525 }
6526 else
6527 {
6528 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6529 return constant_boolean_node (false, truth_type);
6530 }
6531 }
6532
6533 if (lnbitpos < 0)
6534 return 0;
6535
6536 /* Construct the expression we will return. First get the component
6537 reference we will make. Unless the mask is all ones the width of
6538 that field, perform the mask operation. Then compare with the
6539 merged constant. */
6540 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6541 lntype, lnbitsize, lnbitpos,
6542 ll_unsignedp || rl_unsignedp, ll_reversep);
6543
6544 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6545 if (! all_ones_mask_p (ll_mask, lnbitsize))
6546 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6547
6548 return build2_loc (loc, wanted_code, truth_type, result,
6549 const_binop (BIT_IOR_EXPR, l_const, r_const));
6550 }
6551 \f
6552 /* T is an integer expression that is being multiplied, divided, or taken a
6553 modulus (CODE says which and what kind of divide or modulus) by a
6554 constant C. See if we can eliminate that operation by folding it with
6555 other operations already in T. WIDE_TYPE, if non-null, is a type that
6556 should be used for the computation if wider than our type.
6557
6558 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6559 (X * 2) + (Y * 4). We must, however, be assured that either the original
6560 expression would not overflow or that overflow is undefined for the type
6561 in the language in question.
6562
6563 If we return a non-null expression, it is an equivalent form of the
6564 original computation, but need not be in the original type.
6565
6566 We set *STRICT_OVERFLOW_P to true if the return values depends on
6567 signed overflow being undefined. Otherwise we do not change
6568 *STRICT_OVERFLOW_P. */
6569
6570 static tree
6571 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6572 bool *strict_overflow_p)
6573 {
6574 /* To avoid exponential search depth, refuse to allow recursion past
6575 three levels. Beyond that (1) it's highly unlikely that we'll find
6576 something interesting and (2) we've probably processed it before
6577 when we built the inner expression. */
6578
6579 static int depth;
6580 tree ret;
6581
6582 if (depth > 3)
6583 return NULL;
6584
6585 depth++;
6586 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6587 depth--;
6588
6589 return ret;
6590 }
6591
6592 static tree
6593 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6594 bool *strict_overflow_p)
6595 {
6596 tree type = TREE_TYPE (t);
6597 enum tree_code tcode = TREE_CODE (t);
6598 tree ctype = (wide_type != 0
6599 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6600 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6601 ? wide_type : type);
6602 tree t1, t2;
6603 int same_p = tcode == code;
6604 tree op0 = NULL_TREE, op1 = NULL_TREE;
6605 bool sub_strict_overflow_p;
6606
6607 /* Don't deal with constants of zero here; they confuse the code below. */
6608 if (integer_zerop (c))
6609 return NULL_TREE;
6610
6611 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6612 op0 = TREE_OPERAND (t, 0);
6613
6614 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6615 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6616
6617 /* Note that we need not handle conditional operations here since fold
6618 already handles those cases. So just do arithmetic here. */
6619 switch (tcode)
6620 {
6621 case INTEGER_CST:
6622 /* For a constant, we can always simplify if we are a multiply
6623 or (for divide and modulus) if it is a multiple of our constant. */
6624 if (code == MULT_EXPR
6625 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6626 TYPE_SIGN (type)))
6627 {
6628 tree tem = const_binop (code, fold_convert (ctype, t),
6629 fold_convert (ctype, c));
6630 /* If the multiplication overflowed, we lost information on it.
6631 See PR68142 and PR69845. */
6632 if (TREE_OVERFLOW (tem))
6633 return NULL_TREE;
6634 return tem;
6635 }
6636 break;
6637
6638 CASE_CONVERT: case NON_LVALUE_EXPR:
6639 /* If op0 is an expression ... */
6640 if ((COMPARISON_CLASS_P (op0)
6641 || UNARY_CLASS_P (op0)
6642 || BINARY_CLASS_P (op0)
6643 || VL_EXP_CLASS_P (op0)
6644 || EXPRESSION_CLASS_P (op0))
6645 /* ... and has wrapping overflow, and its type is smaller
6646 than ctype, then we cannot pass through as widening. */
6647 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6648 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6649 && (TYPE_PRECISION (ctype)
6650 > TYPE_PRECISION (TREE_TYPE (op0))))
6651 /* ... or this is a truncation (t is narrower than op0),
6652 then we cannot pass through this narrowing. */
6653 || (TYPE_PRECISION (type)
6654 < TYPE_PRECISION (TREE_TYPE (op0)))
6655 /* ... or signedness changes for division or modulus,
6656 then we cannot pass through this conversion. */
6657 || (code != MULT_EXPR
6658 && (TYPE_UNSIGNED (ctype)
6659 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6660 /* ... or has undefined overflow while the converted to
6661 type has not, we cannot do the operation in the inner type
6662 as that would introduce undefined overflow. */
6663 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6664 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6665 && !TYPE_OVERFLOW_UNDEFINED (type))))
6666 break;
6667
6668 /* Pass the constant down and see if we can make a simplification. If
6669 we can, replace this expression with the inner simplification for
6670 possible later conversion to our or some other type. */
6671 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6672 && TREE_CODE (t2) == INTEGER_CST
6673 && !TREE_OVERFLOW (t2)
6674 && (t1 = extract_muldiv (op0, t2, code,
6675 code == MULT_EXPR ? ctype : NULL_TREE,
6676 strict_overflow_p)) != 0)
6677 return t1;
6678 break;
6679
6680 case ABS_EXPR:
6681 /* If widening the type changes it from signed to unsigned, then we
6682 must avoid building ABS_EXPR itself as unsigned. */
6683 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6684 {
6685 tree cstype = (*signed_type_for) (ctype);
6686 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6687 != 0)
6688 {
6689 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6690 return fold_convert (ctype, t1);
6691 }
6692 break;
6693 }
6694 /* If the constant is negative, we cannot simplify this. */
6695 if (tree_int_cst_sgn (c) == -1)
6696 break;
6697 /* FALLTHROUGH */
6698 case NEGATE_EXPR:
6699 /* For division and modulus, type can't be unsigned, as e.g.
6700 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6701 For signed types, even with wrapping overflow, this is fine. */
6702 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6703 break;
6704 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6705 != 0)
6706 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6707 break;
6708
6709 case MIN_EXPR: case MAX_EXPR:
6710 /* If widening the type changes the signedness, then we can't perform
6711 this optimization as that changes the result. */
6712 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6713 break;
6714
6715 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6716 sub_strict_overflow_p = false;
6717 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6718 &sub_strict_overflow_p)) != 0
6719 && (t2 = extract_muldiv (op1, c, code, wide_type,
6720 &sub_strict_overflow_p)) != 0)
6721 {
6722 if (tree_int_cst_sgn (c) < 0)
6723 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6724 if (sub_strict_overflow_p)
6725 *strict_overflow_p = true;
6726 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6727 fold_convert (ctype, t2));
6728 }
6729 break;
6730
6731 case LSHIFT_EXPR: case RSHIFT_EXPR:
6732 /* If the second operand is constant, this is a multiplication
6733 or floor division, by a power of two, so we can treat it that
6734 way unless the multiplier or divisor overflows. Signed
6735 left-shift overflow is implementation-defined rather than
6736 undefined in C90, so do not convert signed left shift into
6737 multiplication. */
6738 if (TREE_CODE (op1) == INTEGER_CST
6739 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6740 /* const_binop may not detect overflow correctly,
6741 so check for it explicitly here. */
6742 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6743 wi::to_wide (op1))
6744 && (t1 = fold_convert (ctype,
6745 const_binop (LSHIFT_EXPR, size_one_node,
6746 op1))) != 0
6747 && !TREE_OVERFLOW (t1))
6748 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6749 ? MULT_EXPR : FLOOR_DIV_EXPR,
6750 ctype,
6751 fold_convert (ctype, op0),
6752 t1),
6753 c, code, wide_type, strict_overflow_p);
6754 break;
6755
6756 case PLUS_EXPR: case MINUS_EXPR:
6757 /* See if we can eliminate the operation on both sides. If we can, we
6758 can return a new PLUS or MINUS. If we can't, the only remaining
6759 cases where we can do anything are if the second operand is a
6760 constant. */
6761 sub_strict_overflow_p = false;
6762 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6763 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6764 if (t1 != 0 && t2 != 0
6765 && TYPE_OVERFLOW_WRAPS (ctype)
6766 && (code == MULT_EXPR
6767 /* If not multiplication, we can only do this if both operands
6768 are divisible by c. */
6769 || (multiple_of_p (ctype, op0, c)
6770 && multiple_of_p (ctype, op1, c))))
6771 {
6772 if (sub_strict_overflow_p)
6773 *strict_overflow_p = true;
6774 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6775 fold_convert (ctype, t2));
6776 }
6777
6778 /* If this was a subtraction, negate OP1 and set it to be an addition.
6779 This simplifies the logic below. */
6780 if (tcode == MINUS_EXPR)
6781 {
6782 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6783 /* If OP1 was not easily negatable, the constant may be OP0. */
6784 if (TREE_CODE (op0) == INTEGER_CST)
6785 {
6786 std::swap (op0, op1);
6787 std::swap (t1, t2);
6788 }
6789 }
6790
6791 if (TREE_CODE (op1) != INTEGER_CST)
6792 break;
6793
6794 /* If either OP1 or C are negative, this optimization is not safe for
6795 some of the division and remainder types while for others we need
6796 to change the code. */
6797 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6798 {
6799 if (code == CEIL_DIV_EXPR)
6800 code = FLOOR_DIV_EXPR;
6801 else if (code == FLOOR_DIV_EXPR)
6802 code = CEIL_DIV_EXPR;
6803 else if (code != MULT_EXPR
6804 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6805 break;
6806 }
6807
6808 /* If it's a multiply or a division/modulus operation of a multiple
6809 of our constant, do the operation and verify it doesn't overflow. */
6810 if (code == MULT_EXPR
6811 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6812 TYPE_SIGN (type)))
6813 {
6814 op1 = const_binop (code, fold_convert (ctype, op1),
6815 fold_convert (ctype, c));
6816 /* We allow the constant to overflow with wrapping semantics. */
6817 if (op1 == 0
6818 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6819 break;
6820 }
6821 else
6822 break;
6823
6824 /* If we have an unsigned type, we cannot widen the operation since it
6825 will change the result if the original computation overflowed. */
6826 if (TYPE_UNSIGNED (ctype) && ctype != type)
6827 break;
6828
6829 /* The last case is if we are a multiply. In that case, we can
6830 apply the distributive law to commute the multiply and addition
6831 if the multiplication of the constants doesn't overflow
6832 and overflow is defined. With undefined overflow
6833 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6834 But fold_plusminus_mult_expr would factor back any power-of-two
6835 value so do not distribute in the first place in this case. */
6836 if (code == MULT_EXPR
6837 && TYPE_OVERFLOW_WRAPS (ctype)
6838 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
6839 return fold_build2 (tcode, ctype,
6840 fold_build2 (code, ctype,
6841 fold_convert (ctype, op0),
6842 fold_convert (ctype, c)),
6843 op1);
6844
6845 break;
6846
6847 case MULT_EXPR:
6848 /* We have a special case here if we are doing something like
6849 (C * 8) % 4 since we know that's zero. */
6850 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6851 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6852 /* If the multiplication can overflow we cannot optimize this. */
6853 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6854 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6855 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6856 TYPE_SIGN (type)))
6857 {
6858 *strict_overflow_p = true;
6859 return omit_one_operand (type, integer_zero_node, op0);
6860 }
6861
6862 /* ... fall through ... */
6863
6864 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6865 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6866 /* If we can extract our operation from the LHS, do so and return a
6867 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6868 do something only if the second operand is a constant. */
6869 if (same_p
6870 && TYPE_OVERFLOW_WRAPS (ctype)
6871 && (t1 = extract_muldiv (op0, c, code, wide_type,
6872 strict_overflow_p)) != 0)
6873 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6874 fold_convert (ctype, op1));
6875 else if (tcode == MULT_EXPR && code == MULT_EXPR
6876 && TYPE_OVERFLOW_WRAPS (ctype)
6877 && (t1 = extract_muldiv (op1, c, code, wide_type,
6878 strict_overflow_p)) != 0)
6879 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6880 fold_convert (ctype, t1));
6881 else if (TREE_CODE (op1) != INTEGER_CST)
6882 return 0;
6883
6884 /* If these are the same operation types, we can associate them
6885 assuming no overflow. */
6886 if (tcode == code)
6887 {
6888 bool overflow_p = false;
6889 wi::overflow_type overflow_mul;
6890 signop sign = TYPE_SIGN (ctype);
6891 unsigned prec = TYPE_PRECISION (ctype);
6892 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6893 wi::to_wide (c, prec),
6894 sign, &overflow_mul);
6895 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6896 if (overflow_mul
6897 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6898 overflow_p = true;
6899 if (!overflow_p)
6900 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6901 wide_int_to_tree (ctype, mul));
6902 }
6903
6904 /* If these operations "cancel" each other, we have the main
6905 optimizations of this pass, which occur when either constant is a
6906 multiple of the other, in which case we replace this with either an
6907 operation or CODE or TCODE.
6908
6909 If we have an unsigned type, we cannot do this since it will change
6910 the result if the original computation overflowed. */
6911 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6912 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6913 || (tcode == MULT_EXPR
6914 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6915 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6916 && code != MULT_EXPR)))
6917 {
6918 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6919 TYPE_SIGN (type)))
6920 {
6921 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6922 *strict_overflow_p = true;
6923 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6924 fold_convert (ctype,
6925 const_binop (TRUNC_DIV_EXPR,
6926 op1, c)));
6927 }
6928 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6929 TYPE_SIGN (type)))
6930 {
6931 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6932 *strict_overflow_p = true;
6933 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6934 fold_convert (ctype,
6935 const_binop (TRUNC_DIV_EXPR,
6936 c, op1)));
6937 }
6938 }
6939 break;
6940
6941 default:
6942 break;
6943 }
6944
6945 return 0;
6946 }
6947 \f
6948 /* Return a node which has the indicated constant VALUE (either 0 or
6949 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6950 and is of the indicated TYPE. */
6951
6952 tree
6953 constant_boolean_node (bool value, tree type)
6954 {
6955 if (type == integer_type_node)
6956 return value ? integer_one_node : integer_zero_node;
6957 else if (type == boolean_type_node)
6958 return value ? boolean_true_node : boolean_false_node;
6959 else if (TREE_CODE (type) == VECTOR_TYPE)
6960 return build_vector_from_val (type,
6961 build_int_cst (TREE_TYPE (type),
6962 value ? -1 : 0));
6963 else
6964 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6965 }
6966
6967
6968 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6969 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6970 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6971 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6972 COND is the first argument to CODE; otherwise (as in the example
6973 given here), it is the second argument. TYPE is the type of the
6974 original expression. Return NULL_TREE if no simplification is
6975 possible. */
6976
6977 static tree
6978 fold_binary_op_with_conditional_arg (location_t loc,
6979 enum tree_code code,
6980 tree type, tree op0, tree op1,
6981 tree cond, tree arg, int cond_first_p)
6982 {
6983 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6984 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6985 tree test, true_value, false_value;
6986 tree lhs = NULL_TREE;
6987 tree rhs = NULL_TREE;
6988 enum tree_code cond_code = COND_EXPR;
6989
6990 /* Do not move possibly trapping operations into the conditional as this
6991 pessimizes code and causes gimplification issues when applied late. */
6992 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
6993 ANY_INTEGRAL_TYPE_P (type)
6994 && TYPE_OVERFLOW_TRAPS (type), op1))
6995 return NULL_TREE;
6996
6997 if (TREE_CODE (cond) == COND_EXPR
6998 || TREE_CODE (cond) == VEC_COND_EXPR)
6999 {
7000 test = TREE_OPERAND (cond, 0);
7001 true_value = TREE_OPERAND (cond, 1);
7002 false_value = TREE_OPERAND (cond, 2);
7003 /* If this operand throws an expression, then it does not make
7004 sense to try to perform a logical or arithmetic operation
7005 involving it. */
7006 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7007 lhs = true_value;
7008 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7009 rhs = false_value;
7010 }
7011 else if (!(TREE_CODE (type) != VECTOR_TYPE
7012 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
7013 {
7014 tree testtype = TREE_TYPE (cond);
7015 test = cond;
7016 true_value = constant_boolean_node (true, testtype);
7017 false_value = constant_boolean_node (false, testtype);
7018 }
7019 else
7020 /* Detect the case of mixing vector and scalar types - bail out. */
7021 return NULL_TREE;
7022
7023 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
7024 cond_code = VEC_COND_EXPR;
7025
7026 /* This transformation is only worthwhile if we don't have to wrap ARG
7027 in a SAVE_EXPR and the operation can be simplified without recursing
7028 on at least one of the branches once its pushed inside the COND_EXPR. */
7029 if (!TREE_CONSTANT (arg)
7030 && (TREE_SIDE_EFFECTS (arg)
7031 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7032 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7033 return NULL_TREE;
7034
7035 arg = fold_convert_loc (loc, arg_type, arg);
7036 if (lhs == 0)
7037 {
7038 true_value = fold_convert_loc (loc, cond_type, true_value);
7039 if (cond_first_p)
7040 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7041 else
7042 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7043 }
7044 if (rhs == 0)
7045 {
7046 false_value = fold_convert_loc (loc, cond_type, false_value);
7047 if (cond_first_p)
7048 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7049 else
7050 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7051 }
7052
7053 /* Check that we have simplified at least one of the branches. */
7054 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7055 return NULL_TREE;
7056
7057 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7058 }
7059
7060 \f
7061 /* Subroutine of fold() that checks for the addition of +/- 0.0.
7062
7063 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
7064 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
7065 ADDEND is the same as X.
7066
7067 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7068 and finite. The problematic cases are when X is zero, and its mode
7069 has signed zeros. In the case of rounding towards -infinity,
7070 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7071 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7072
7073 bool
7074 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
7075 {
7076 if (!real_zerop (addend))
7077 return false;
7078
7079 /* Don't allow the fold with -fsignaling-nans. */
7080 if (HONOR_SNANS (type))
7081 return false;
7082
7083 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7084 if (!HONOR_SIGNED_ZEROS (type))
7085 return true;
7086
7087 /* There is no case that is safe for all rounding modes. */
7088 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7089 return false;
7090
7091 /* In a vector or complex, we would need to check the sign of all zeros. */
7092 if (TREE_CODE (addend) == VECTOR_CST)
7093 addend = uniform_vector_p (addend);
7094 if (!addend || TREE_CODE (addend) != REAL_CST)
7095 return false;
7096
7097 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7098 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
7099 negate = !negate;
7100
7101 /* The mode has signed zeros, and we have to honor their sign.
7102 In this situation, there is only one case we can return true for.
7103 X - 0 is the same as X with default rounding. */
7104 return negate;
7105 }
7106
7107 /* Subroutine of match.pd that optimizes comparisons of a division by
7108 a nonzero integer constant against an integer constant, i.e.
7109 X/C1 op C2.
7110
7111 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7112 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7113
7114 enum tree_code
7115 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7116 tree *hi, bool *neg_overflow)
7117 {
7118 tree prod, tmp, type = TREE_TYPE (c1);
7119 signop sign = TYPE_SIGN (type);
7120 wi::overflow_type overflow;
7121
7122 /* We have to do this the hard way to detect unsigned overflow.
7123 prod = int_const_binop (MULT_EXPR, c1, c2); */
7124 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7125 prod = force_fit_type (type, val, -1, overflow);
7126 *neg_overflow = false;
7127
7128 if (sign == UNSIGNED)
7129 {
7130 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7131 *lo = prod;
7132
7133 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7134 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7135 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7136 }
7137 else if (tree_int_cst_sgn (c1) >= 0)
7138 {
7139 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7140 switch (tree_int_cst_sgn (c2))
7141 {
7142 case -1:
7143 *neg_overflow = true;
7144 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7145 *hi = prod;
7146 break;
7147
7148 case 0:
7149 *lo = fold_negate_const (tmp, type);
7150 *hi = tmp;
7151 break;
7152
7153 case 1:
7154 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7155 *lo = prod;
7156 break;
7157
7158 default:
7159 gcc_unreachable ();
7160 }
7161 }
7162 else
7163 {
7164 /* A negative divisor reverses the relational operators. */
7165 code = swap_tree_comparison (code);
7166
7167 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7168 switch (tree_int_cst_sgn (c2))
7169 {
7170 case -1:
7171 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7172 *lo = prod;
7173 break;
7174
7175 case 0:
7176 *hi = fold_negate_const (tmp, type);
7177 *lo = tmp;
7178 break;
7179
7180 case 1:
7181 *neg_overflow = true;
7182 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7183 *hi = prod;
7184 break;
7185
7186 default:
7187 gcc_unreachable ();
7188 }
7189 }
7190
7191 if (code != EQ_EXPR && code != NE_EXPR)
7192 return code;
7193
7194 if (TREE_OVERFLOW (*lo)
7195 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7196 *lo = NULL_TREE;
7197 if (TREE_OVERFLOW (*hi)
7198 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7199 *hi = NULL_TREE;
7200
7201 return code;
7202 }
7203
7204
7205 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7206 equality/inequality test, then return a simplified form of the test
7207 using a sign testing. Otherwise return NULL. TYPE is the desired
7208 result type. */
7209
7210 static tree
7211 fold_single_bit_test_into_sign_test (location_t loc,
7212 enum tree_code code, tree arg0, tree arg1,
7213 tree result_type)
7214 {
7215 /* If this is testing a single bit, we can optimize the test. */
7216 if ((code == NE_EXPR || code == EQ_EXPR)
7217 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7218 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7219 {
7220 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7221 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7222 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7223
7224 if (arg00 != NULL_TREE
7225 /* This is only a win if casting to a signed type is cheap,
7226 i.e. when arg00's type is not a partial mode. */
7227 && type_has_mode_precision_p (TREE_TYPE (arg00)))
7228 {
7229 tree stype = signed_type_for (TREE_TYPE (arg00));
7230 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7231 result_type,
7232 fold_convert_loc (loc, stype, arg00),
7233 build_int_cst (stype, 0));
7234 }
7235 }
7236
7237 return NULL_TREE;
7238 }
7239
7240 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7241 equality/inequality test, then return a simplified form of
7242 the test using shifts and logical operations. Otherwise return
7243 NULL. TYPE is the desired result type. */
7244
7245 tree
7246 fold_single_bit_test (location_t loc, enum tree_code code,
7247 tree arg0, tree arg1, tree result_type)
7248 {
7249 /* If this is testing a single bit, we can optimize the test. */
7250 if ((code == NE_EXPR || code == EQ_EXPR)
7251 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7252 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7253 {
7254 tree inner = TREE_OPERAND (arg0, 0);
7255 tree type = TREE_TYPE (arg0);
7256 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7257 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
7258 int ops_unsigned;
7259 tree signed_type, unsigned_type, intermediate_type;
7260 tree tem, one;
7261
7262 /* First, see if we can fold the single bit test into a sign-bit
7263 test. */
7264 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7265 result_type);
7266 if (tem)
7267 return tem;
7268
7269 /* Otherwise we have (A & C) != 0 where C is a single bit,
7270 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7271 Similarly for (A & C) == 0. */
7272
7273 /* If INNER is a right shift of a constant and it plus BITNUM does
7274 not overflow, adjust BITNUM and INNER. */
7275 if (TREE_CODE (inner) == RSHIFT_EXPR
7276 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7277 && bitnum < TYPE_PRECISION (type)
7278 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
7279 TYPE_PRECISION (type) - bitnum))
7280 {
7281 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
7282 inner = TREE_OPERAND (inner, 0);
7283 }
7284
7285 /* If we are going to be able to omit the AND below, we must do our
7286 operations as unsigned. If we must use the AND, we have a choice.
7287 Normally unsigned is faster, but for some machines signed is. */
7288 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
7289 && !flag_syntax_only) ? 0 : 1;
7290
7291 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7292 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7293 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7294 inner = fold_convert_loc (loc, intermediate_type, inner);
7295
7296 if (bitnum != 0)
7297 inner = build2 (RSHIFT_EXPR, intermediate_type,
7298 inner, size_int (bitnum));
7299
7300 one = build_int_cst (intermediate_type, 1);
7301
7302 if (code == EQ_EXPR)
7303 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7304
7305 /* Put the AND last so it can combine with more things. */
7306 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7307
7308 /* Make sure to return the proper type. */
7309 inner = fold_convert_loc (loc, result_type, inner);
7310
7311 return inner;
7312 }
7313 return NULL_TREE;
7314 }
7315
7316 /* Test whether it is preferable two swap two operands, ARG0 and
7317 ARG1, for example because ARG0 is an integer constant and ARG1
7318 isn't. */
7319
7320 bool
7321 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7322 {
7323 if (CONSTANT_CLASS_P (arg1))
7324 return 0;
7325 if (CONSTANT_CLASS_P (arg0))
7326 return 1;
7327
7328 STRIP_NOPS (arg0);
7329 STRIP_NOPS (arg1);
7330
7331 if (TREE_CONSTANT (arg1))
7332 return 0;
7333 if (TREE_CONSTANT (arg0))
7334 return 1;
7335
7336 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7337 for commutative and comparison operators. Ensuring a canonical
7338 form allows the optimizers to find additional redundancies without
7339 having to explicitly check for both orderings. */
7340 if (TREE_CODE (arg0) == SSA_NAME
7341 && TREE_CODE (arg1) == SSA_NAME
7342 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7343 return 1;
7344
7345 /* Put SSA_NAMEs last. */
7346 if (TREE_CODE (arg1) == SSA_NAME)
7347 return 0;
7348 if (TREE_CODE (arg0) == SSA_NAME)
7349 return 1;
7350
7351 /* Put variables last. */
7352 if (DECL_P (arg1))
7353 return 0;
7354 if (DECL_P (arg0))
7355 return 1;
7356
7357 return 0;
7358 }
7359
7360
7361 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7362 means A >= Y && A != MAX, but in this case we know that
7363 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7364
7365 static tree
7366 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7367 {
7368 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7369
7370 if (TREE_CODE (bound) == LT_EXPR)
7371 a = TREE_OPERAND (bound, 0);
7372 else if (TREE_CODE (bound) == GT_EXPR)
7373 a = TREE_OPERAND (bound, 1);
7374 else
7375 return NULL_TREE;
7376
7377 typea = TREE_TYPE (a);
7378 if (!INTEGRAL_TYPE_P (typea)
7379 && !POINTER_TYPE_P (typea))
7380 return NULL_TREE;
7381
7382 if (TREE_CODE (ineq) == LT_EXPR)
7383 {
7384 a1 = TREE_OPERAND (ineq, 1);
7385 y = TREE_OPERAND (ineq, 0);
7386 }
7387 else if (TREE_CODE (ineq) == GT_EXPR)
7388 {
7389 a1 = TREE_OPERAND (ineq, 0);
7390 y = TREE_OPERAND (ineq, 1);
7391 }
7392 else
7393 return NULL_TREE;
7394
7395 if (TREE_TYPE (a1) != typea)
7396 return NULL_TREE;
7397
7398 if (POINTER_TYPE_P (typea))
7399 {
7400 /* Convert the pointer types into integer before taking the difference. */
7401 tree ta = fold_convert_loc (loc, ssizetype, a);
7402 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7403 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7404 }
7405 else
7406 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7407
7408 if (!diff || !integer_onep (diff))
7409 return NULL_TREE;
7410
7411 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7412 }
7413
7414 /* Fold a sum or difference of at least one multiplication.
7415 Returns the folded tree or NULL if no simplification could be made. */
7416
7417 static tree
7418 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7419 tree arg0, tree arg1)
7420 {
7421 tree arg00, arg01, arg10, arg11;
7422 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7423
7424 /* (A * C) +- (B * C) -> (A+-B) * C.
7425 (A * C) +- A -> A * (C+-1).
7426 We are most concerned about the case where C is a constant,
7427 but other combinations show up during loop reduction. Since
7428 it is not difficult, try all four possibilities. */
7429
7430 if (TREE_CODE (arg0) == MULT_EXPR)
7431 {
7432 arg00 = TREE_OPERAND (arg0, 0);
7433 arg01 = TREE_OPERAND (arg0, 1);
7434 }
7435 else if (TREE_CODE (arg0) == INTEGER_CST)
7436 {
7437 arg00 = build_one_cst (type);
7438 arg01 = arg0;
7439 }
7440 else
7441 {
7442 /* We cannot generate constant 1 for fract. */
7443 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7444 return NULL_TREE;
7445 arg00 = arg0;
7446 arg01 = build_one_cst (type);
7447 }
7448 if (TREE_CODE (arg1) == MULT_EXPR)
7449 {
7450 arg10 = TREE_OPERAND (arg1, 0);
7451 arg11 = TREE_OPERAND (arg1, 1);
7452 }
7453 else if (TREE_CODE (arg1) == INTEGER_CST)
7454 {
7455 arg10 = build_one_cst (type);
7456 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7457 the purpose of this canonicalization. */
7458 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7459 && negate_expr_p (arg1)
7460 && code == PLUS_EXPR)
7461 {
7462 arg11 = negate_expr (arg1);
7463 code = MINUS_EXPR;
7464 }
7465 else
7466 arg11 = arg1;
7467 }
7468 else
7469 {
7470 /* We cannot generate constant 1 for fract. */
7471 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7472 return NULL_TREE;
7473 arg10 = arg1;
7474 arg11 = build_one_cst (type);
7475 }
7476 same = NULL_TREE;
7477
7478 /* Prefer factoring a common non-constant. */
7479 if (operand_equal_p (arg00, arg10, 0))
7480 same = arg00, alt0 = arg01, alt1 = arg11;
7481 else if (operand_equal_p (arg01, arg11, 0))
7482 same = arg01, alt0 = arg00, alt1 = arg10;
7483 else if (operand_equal_p (arg00, arg11, 0))
7484 same = arg00, alt0 = arg01, alt1 = arg10;
7485 else if (operand_equal_p (arg01, arg10, 0))
7486 same = arg01, alt0 = arg00, alt1 = arg11;
7487
7488 /* No identical multiplicands; see if we can find a common
7489 power-of-two factor in non-power-of-two multiplies. This
7490 can help in multi-dimensional array access. */
7491 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7492 {
7493 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7494 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7495 HOST_WIDE_INT tmp;
7496 bool swap = false;
7497 tree maybe_same;
7498
7499 /* Move min of absolute values to int11. */
7500 if (absu_hwi (int01) < absu_hwi (int11))
7501 {
7502 tmp = int01, int01 = int11, int11 = tmp;
7503 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7504 maybe_same = arg01;
7505 swap = true;
7506 }
7507 else
7508 maybe_same = arg11;
7509
7510 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7511 if (factor > 1
7512 && pow2p_hwi (factor)
7513 && (int01 & (factor - 1)) == 0
7514 /* The remainder should not be a constant, otherwise we
7515 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7516 increased the number of multiplications necessary. */
7517 && TREE_CODE (arg10) != INTEGER_CST)
7518 {
7519 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7520 build_int_cst (TREE_TYPE (arg00),
7521 int01 / int11));
7522 alt1 = arg10;
7523 same = maybe_same;
7524 if (swap)
7525 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7526 }
7527 }
7528
7529 if (!same)
7530 return NULL_TREE;
7531
7532 if (! ANY_INTEGRAL_TYPE_P (type)
7533 || TYPE_OVERFLOW_WRAPS (type)
7534 /* We are neither factoring zero nor minus one. */
7535 || TREE_CODE (same) == INTEGER_CST)
7536 return fold_build2_loc (loc, MULT_EXPR, type,
7537 fold_build2_loc (loc, code, type,
7538 fold_convert_loc (loc, type, alt0),
7539 fold_convert_loc (loc, type, alt1)),
7540 fold_convert_loc (loc, type, same));
7541
7542 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7543 same may be minus one and thus the multiplication may overflow. Perform
7544 the sum operation in an unsigned type. */
7545 tree utype = unsigned_type_for (type);
7546 tree tem = fold_build2_loc (loc, code, utype,
7547 fold_convert_loc (loc, utype, alt0),
7548 fold_convert_loc (loc, utype, alt1));
7549 /* If the sum evaluated to a constant that is not -INF the multiplication
7550 cannot overflow. */
7551 if (TREE_CODE (tem) == INTEGER_CST
7552 && (wi::to_wide (tem)
7553 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7554 return fold_build2_loc (loc, MULT_EXPR, type,
7555 fold_convert (type, tem), same);
7556
7557 /* Do not resort to unsigned multiplication because
7558 we lose the no-overflow property of the expression. */
7559 return NULL_TREE;
7560 }
7561
7562 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7563 specified by EXPR into the buffer PTR of length LEN bytes.
7564 Return the number of bytes placed in the buffer, or zero
7565 upon failure. */
7566
7567 static int
7568 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7569 {
7570 tree type = TREE_TYPE (expr);
7571 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7572 int byte, offset, word, words;
7573 unsigned char value;
7574
7575 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7576 return 0;
7577 if (off == -1)
7578 off = 0;
7579
7580 if (ptr == NULL)
7581 /* Dry run. */
7582 return MIN (len, total_bytes - off);
7583
7584 words = total_bytes / UNITS_PER_WORD;
7585
7586 for (byte = 0; byte < total_bytes; byte++)
7587 {
7588 int bitpos = byte * BITS_PER_UNIT;
7589 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7590 number of bytes. */
7591 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7592
7593 if (total_bytes > UNITS_PER_WORD)
7594 {
7595 word = byte / UNITS_PER_WORD;
7596 if (WORDS_BIG_ENDIAN)
7597 word = (words - 1) - word;
7598 offset = word * UNITS_PER_WORD;
7599 if (BYTES_BIG_ENDIAN)
7600 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7601 else
7602 offset += byte % UNITS_PER_WORD;
7603 }
7604 else
7605 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7606 if (offset >= off && offset - off < len)
7607 ptr[offset - off] = value;
7608 }
7609 return MIN (len, total_bytes - off);
7610 }
7611
7612
7613 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7614 specified by EXPR into the buffer PTR of length LEN bytes.
7615 Return the number of bytes placed in the buffer, or zero
7616 upon failure. */
7617
7618 static int
7619 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7620 {
7621 tree type = TREE_TYPE (expr);
7622 scalar_mode mode = SCALAR_TYPE_MODE (type);
7623 int total_bytes = GET_MODE_SIZE (mode);
7624 FIXED_VALUE_TYPE value;
7625 tree i_value, i_type;
7626
7627 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7628 return 0;
7629
7630 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7631
7632 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7633 return 0;
7634
7635 value = TREE_FIXED_CST (expr);
7636 i_value = double_int_to_tree (i_type, value.data);
7637
7638 return native_encode_int (i_value, ptr, len, off);
7639 }
7640
7641
7642 /* Subroutine of native_encode_expr. Encode the REAL_CST
7643 specified by EXPR into the buffer PTR of length LEN bytes.
7644 Return the number of bytes placed in the buffer, or zero
7645 upon failure. */
7646
7647 static int
7648 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7649 {
7650 tree type = TREE_TYPE (expr);
7651 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7652 int byte, offset, word, words, bitpos;
7653 unsigned char value;
7654
7655 /* There are always 32 bits in each long, no matter the size of
7656 the hosts long. We handle floating point representations with
7657 up to 192 bits. */
7658 long tmp[6];
7659
7660 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7661 return 0;
7662 if (off == -1)
7663 off = 0;
7664
7665 if (ptr == NULL)
7666 /* Dry run. */
7667 return MIN (len, total_bytes - off);
7668
7669 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7670
7671 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7672
7673 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7674 bitpos += BITS_PER_UNIT)
7675 {
7676 byte = (bitpos / BITS_PER_UNIT) & 3;
7677 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7678
7679 if (UNITS_PER_WORD < 4)
7680 {
7681 word = byte / UNITS_PER_WORD;
7682 if (WORDS_BIG_ENDIAN)
7683 word = (words - 1) - word;
7684 offset = word * UNITS_PER_WORD;
7685 if (BYTES_BIG_ENDIAN)
7686 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7687 else
7688 offset += byte % UNITS_PER_WORD;
7689 }
7690 else
7691 {
7692 offset = byte;
7693 if (BYTES_BIG_ENDIAN)
7694 {
7695 /* Reverse bytes within each long, or within the entire float
7696 if it's smaller than a long (for HFmode). */
7697 offset = MIN (3, total_bytes - 1) - offset;
7698 gcc_assert (offset >= 0);
7699 }
7700 }
7701 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7702 if (offset >= off
7703 && offset - off < len)
7704 ptr[offset - off] = value;
7705 }
7706 return MIN (len, total_bytes - off);
7707 }
7708
7709 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7710 specified by EXPR into the buffer PTR of length LEN bytes.
7711 Return the number of bytes placed in the buffer, or zero
7712 upon failure. */
7713
7714 static int
7715 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7716 {
7717 int rsize, isize;
7718 tree part;
7719
7720 part = TREE_REALPART (expr);
7721 rsize = native_encode_expr (part, ptr, len, off);
7722 if (off == -1 && rsize == 0)
7723 return 0;
7724 part = TREE_IMAGPART (expr);
7725 if (off != -1)
7726 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7727 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7728 len - rsize, off);
7729 if (off == -1 && isize != rsize)
7730 return 0;
7731 return rsize + isize;
7732 }
7733
7734 /* Like native_encode_vector, but only encode the first COUNT elements.
7735 The other arguments are as for native_encode_vector. */
7736
7737 static int
7738 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7739 int off, unsigned HOST_WIDE_INT count)
7740 {
7741 tree itype = TREE_TYPE (TREE_TYPE (expr));
7742 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7743 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7744 {
7745 /* This is the only case in which elements can be smaller than a byte.
7746 Element 0 is always in the lsb of the containing byte. */
7747 unsigned int elt_bits = TYPE_PRECISION (itype);
7748 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7749 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7750 return 0;
7751
7752 if (off == -1)
7753 off = 0;
7754
7755 /* Zero the buffer and then set bits later where necessary. */
7756 int extract_bytes = MIN (len, total_bytes - off);
7757 if (ptr)
7758 memset (ptr, 0, extract_bytes);
7759
7760 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7761 unsigned int first_elt = off * elts_per_byte;
7762 unsigned int extract_elts = extract_bytes * elts_per_byte;
7763 for (unsigned int i = 0; i < extract_elts; ++i)
7764 {
7765 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7766 if (TREE_CODE (elt) != INTEGER_CST)
7767 return 0;
7768
7769 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7770 {
7771 unsigned int bit = i * elt_bits;
7772 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7773 }
7774 }
7775 return extract_bytes;
7776 }
7777
7778 int offset = 0;
7779 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7780 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7781 {
7782 if (off >= size)
7783 {
7784 off -= size;
7785 continue;
7786 }
7787 tree elem = VECTOR_CST_ELT (expr, i);
7788 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7789 len - offset, off);
7790 if ((off == -1 && res != size) || res == 0)
7791 return 0;
7792 offset += res;
7793 if (offset >= len)
7794 return (off == -1 && i < count - 1) ? 0 : offset;
7795 if (off != -1)
7796 off = 0;
7797 }
7798 return offset;
7799 }
7800
7801 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7802 specified by EXPR into the buffer PTR of length LEN bytes.
7803 Return the number of bytes placed in the buffer, or zero
7804 upon failure. */
7805
7806 static int
7807 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7808 {
7809 unsigned HOST_WIDE_INT count;
7810 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7811 return 0;
7812 return native_encode_vector_part (expr, ptr, len, off, count);
7813 }
7814
7815
7816 /* Subroutine of native_encode_expr. Encode the STRING_CST
7817 specified by EXPR into the buffer PTR of length LEN bytes.
7818 Return the number of bytes placed in the buffer, or zero
7819 upon failure. */
7820
7821 static int
7822 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7823 {
7824 tree type = TREE_TYPE (expr);
7825
7826 /* Wide-char strings are encoded in target byte-order so native
7827 encoding them is trivial. */
7828 if (BITS_PER_UNIT != CHAR_BIT
7829 || TREE_CODE (type) != ARRAY_TYPE
7830 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7831 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7832 return 0;
7833
7834 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7835 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7836 return 0;
7837 if (off == -1)
7838 off = 0;
7839 if (ptr == NULL)
7840 /* Dry run. */;
7841 else if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7842 {
7843 int written = 0;
7844 if (off < TREE_STRING_LENGTH (expr))
7845 {
7846 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7847 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7848 }
7849 memset (ptr + written, 0,
7850 MIN (total_bytes - written, len - written));
7851 }
7852 else
7853 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7854 return MIN (total_bytes - off, len);
7855 }
7856
7857
7858 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7859 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7860 buffer PTR of length LEN bytes. If PTR is NULL, don't actually store
7861 anything, just do a dry run. If OFF is not -1 then start
7862 the encoding at byte offset OFF and encode at most LEN bytes.
7863 Return the number of bytes placed in the buffer, or zero upon failure. */
7864
7865 int
7866 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7867 {
7868 /* We don't support starting at negative offset and -1 is special. */
7869 if (off < -1)
7870 return 0;
7871
7872 switch (TREE_CODE (expr))
7873 {
7874 case INTEGER_CST:
7875 return native_encode_int (expr, ptr, len, off);
7876
7877 case REAL_CST:
7878 return native_encode_real (expr, ptr, len, off);
7879
7880 case FIXED_CST:
7881 return native_encode_fixed (expr, ptr, len, off);
7882
7883 case COMPLEX_CST:
7884 return native_encode_complex (expr, ptr, len, off);
7885
7886 case VECTOR_CST:
7887 return native_encode_vector (expr, ptr, len, off);
7888
7889 case STRING_CST:
7890 return native_encode_string (expr, ptr, len, off);
7891
7892 default:
7893 return 0;
7894 }
7895 }
7896
7897
7898 /* Subroutine of native_interpret_expr. Interpret the contents of
7899 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7900 If the buffer cannot be interpreted, return NULL_TREE. */
7901
7902 static tree
7903 native_interpret_int (tree type, const unsigned char *ptr, int len)
7904 {
7905 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7906
7907 if (total_bytes > len
7908 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7909 return NULL_TREE;
7910
7911 wide_int result = wi::from_buffer (ptr, total_bytes);
7912
7913 return wide_int_to_tree (type, result);
7914 }
7915
7916
7917 /* Subroutine of native_interpret_expr. Interpret the contents of
7918 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7919 If the buffer cannot be interpreted, return NULL_TREE. */
7920
7921 static tree
7922 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7923 {
7924 scalar_mode mode = SCALAR_TYPE_MODE (type);
7925 int total_bytes = GET_MODE_SIZE (mode);
7926 double_int result;
7927 FIXED_VALUE_TYPE fixed_value;
7928
7929 if (total_bytes > len
7930 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7931 return NULL_TREE;
7932
7933 result = double_int::from_buffer (ptr, total_bytes);
7934 fixed_value = fixed_from_double_int (result, mode);
7935
7936 return build_fixed (type, fixed_value);
7937 }
7938
7939
7940 /* Subroutine of native_interpret_expr. Interpret the contents of
7941 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7942 If the buffer cannot be interpreted, return NULL_TREE. */
7943
7944 static tree
7945 native_interpret_real (tree type, const unsigned char *ptr, int len)
7946 {
7947 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
7948 int total_bytes = GET_MODE_SIZE (mode);
7949 unsigned char value;
7950 /* There are always 32 bits in each long, no matter the size of
7951 the hosts long. We handle floating point representations with
7952 up to 192 bits. */
7953 REAL_VALUE_TYPE r;
7954 long tmp[6];
7955
7956 if (total_bytes > len || total_bytes > 24)
7957 return NULL_TREE;
7958 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7959
7960 memset (tmp, 0, sizeof (tmp));
7961 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7962 bitpos += BITS_PER_UNIT)
7963 {
7964 /* Both OFFSET and BYTE index within a long;
7965 bitpos indexes the whole float. */
7966 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7967 if (UNITS_PER_WORD < 4)
7968 {
7969 int word = byte / UNITS_PER_WORD;
7970 if (WORDS_BIG_ENDIAN)
7971 word = (words - 1) - word;
7972 offset = word * UNITS_PER_WORD;
7973 if (BYTES_BIG_ENDIAN)
7974 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7975 else
7976 offset += byte % UNITS_PER_WORD;
7977 }
7978 else
7979 {
7980 offset = byte;
7981 if (BYTES_BIG_ENDIAN)
7982 {
7983 /* Reverse bytes within each long, or within the entire float
7984 if it's smaller than a long (for HFmode). */
7985 offset = MIN (3, total_bytes - 1) - offset;
7986 gcc_assert (offset >= 0);
7987 }
7988 }
7989 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7990
7991 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7992 }
7993
7994 real_from_target (&r, tmp, mode);
7995 return build_real (type, r);
7996 }
7997
7998
7999 /* Subroutine of native_interpret_expr. Interpret the contents of
8000 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8001 If the buffer cannot be interpreted, return NULL_TREE. */
8002
8003 static tree
8004 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8005 {
8006 tree etype, rpart, ipart;
8007 int size;
8008
8009 etype = TREE_TYPE (type);
8010 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8011 if (size * 2 > len)
8012 return NULL_TREE;
8013 rpart = native_interpret_expr (etype, ptr, size);
8014 if (!rpart)
8015 return NULL_TREE;
8016 ipart = native_interpret_expr (etype, ptr+size, size);
8017 if (!ipart)
8018 return NULL_TREE;
8019 return build_complex (type, rpart, ipart);
8020 }
8021
8022 /* Read a vector of type TYPE from the target memory image given by BYTES,
8023 which contains LEN bytes. The vector is known to be encodable using
8024 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8025
8026 Return the vector on success, otherwise return null. */
8027
8028 static tree
8029 native_interpret_vector_part (tree type, const unsigned char *bytes,
8030 unsigned int len, unsigned int npatterns,
8031 unsigned int nelts_per_pattern)
8032 {
8033 tree elt_type = TREE_TYPE (type);
8034 if (VECTOR_BOOLEAN_TYPE_P (type)
8035 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8036 {
8037 /* This is the only case in which elements can be smaller than a byte.
8038 Element 0 is always in the lsb of the containing byte. */
8039 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8040 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8041 return NULL_TREE;
8042
8043 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8044 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8045 {
8046 unsigned int bit_index = i * elt_bits;
8047 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8048 unsigned int lsb = bit_index % BITS_PER_UNIT;
8049 builder.quick_push (bytes[byte_index] & (1 << lsb)
8050 ? build_all_ones_cst (elt_type)
8051 : build_zero_cst (elt_type));
8052 }
8053 return builder.build ();
8054 }
8055
8056 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8057 if (elt_bytes * npatterns * nelts_per_pattern > len)
8058 return NULL_TREE;
8059
8060 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8061 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8062 {
8063 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8064 if (!elt)
8065 return NULL_TREE;
8066 builder.quick_push (elt);
8067 bytes += elt_bytes;
8068 }
8069 return builder.build ();
8070 }
8071
8072 /* Subroutine of native_interpret_expr. Interpret the contents of
8073 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8074 If the buffer cannot be interpreted, return NULL_TREE. */
8075
8076 static tree
8077 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8078 {
8079 tree etype;
8080 unsigned int size;
8081 unsigned HOST_WIDE_INT count;
8082
8083 etype = TREE_TYPE (type);
8084 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8085 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
8086 || size * count > len)
8087 return NULL_TREE;
8088
8089 return native_interpret_vector_part (type, ptr, len, count, 1);
8090 }
8091
8092
8093 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8094 the buffer PTR of length LEN as a constant of type TYPE. For
8095 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8096 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8097 return NULL_TREE. */
8098
8099 tree
8100 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8101 {
8102 switch (TREE_CODE (type))
8103 {
8104 case INTEGER_TYPE:
8105 case ENUMERAL_TYPE:
8106 case BOOLEAN_TYPE:
8107 case POINTER_TYPE:
8108 case REFERENCE_TYPE:
8109 return native_interpret_int (type, ptr, len);
8110
8111 case REAL_TYPE:
8112 return native_interpret_real (type, ptr, len);
8113
8114 case FIXED_POINT_TYPE:
8115 return native_interpret_fixed (type, ptr, len);
8116
8117 case COMPLEX_TYPE:
8118 return native_interpret_complex (type, ptr, len);
8119
8120 case VECTOR_TYPE:
8121 return native_interpret_vector (type, ptr, len);
8122
8123 default:
8124 return NULL_TREE;
8125 }
8126 }
8127
8128 /* Returns true if we can interpret the contents of a native encoding
8129 as TYPE. */
8130
8131 static bool
8132 can_native_interpret_type_p (tree type)
8133 {
8134 switch (TREE_CODE (type))
8135 {
8136 case INTEGER_TYPE:
8137 case ENUMERAL_TYPE:
8138 case BOOLEAN_TYPE:
8139 case POINTER_TYPE:
8140 case REFERENCE_TYPE:
8141 case FIXED_POINT_TYPE:
8142 case REAL_TYPE:
8143 case COMPLEX_TYPE:
8144 case VECTOR_TYPE:
8145 return true;
8146 default:
8147 return false;
8148 }
8149 }
8150
8151 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
8152 directly on the VECTOR_CST encoding, in a way that works for variable-
8153 length vectors. Return the resulting VECTOR_CST on success or null
8154 on failure. */
8155
8156 static tree
8157 fold_view_convert_vector_encoding (tree type, tree expr)
8158 {
8159 tree expr_type = TREE_TYPE (expr);
8160 poly_uint64 type_bits, expr_bits;
8161 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
8162 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
8163 return NULL_TREE;
8164
8165 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
8166 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
8167 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
8168 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
8169
8170 /* We can only preserve the semantics of a stepped pattern if the new
8171 vector element is an integer of the same size. */
8172 if (VECTOR_CST_STEPPED_P (expr)
8173 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
8174 return NULL_TREE;
8175
8176 /* The number of bits needed to encode one element from every pattern
8177 of the original vector. */
8178 unsigned int expr_sequence_bits
8179 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
8180
8181 /* The number of bits needed to encode one element from every pattern
8182 of the result. */
8183 unsigned int type_sequence_bits
8184 = least_common_multiple (expr_sequence_bits, type_elt_bits);
8185
8186 /* Don't try to read more bytes than are available, which can happen
8187 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
8188 The general VIEW_CONVERT handling can cope with that case, so there's
8189 no point complicating things here. */
8190 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
8191 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
8192 BITS_PER_UNIT);
8193 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
8194 if (known_gt (buffer_bits, expr_bits))
8195 return NULL_TREE;
8196
8197 /* Get enough bytes of EXPR to form the new encoding. */
8198 auto_vec<unsigned char, 128> buffer (buffer_bytes);
8199 buffer.quick_grow (buffer_bytes);
8200 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
8201 buffer_bits / expr_elt_bits)
8202 != (int) buffer_bytes)
8203 return NULL_TREE;
8204
8205 /* Reencode the bytes as TYPE. */
8206 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
8207 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
8208 type_npatterns, nelts_per_pattern);
8209 }
8210
8211 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8212 TYPE at compile-time. If we're unable to perform the conversion
8213 return NULL_TREE. */
8214
8215 static tree
8216 fold_view_convert_expr (tree type, tree expr)
8217 {
8218 /* We support up to 512-bit values (for V8DFmode). */
8219 unsigned char buffer[64];
8220 int len;
8221
8222 /* Check that the host and target are sane. */
8223 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
8224 return NULL_TREE;
8225
8226 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
8227 if (tree res = fold_view_convert_vector_encoding (type, expr))
8228 return res;
8229
8230 len = native_encode_expr (expr, buffer, sizeof (buffer));
8231 if (len == 0)
8232 return NULL_TREE;
8233
8234 return native_interpret_expr (type, buffer, len);
8235 }
8236
8237 /* Build an expression for the address of T. Folds away INDIRECT_REF
8238 to avoid confusing the gimplify process. */
8239
8240 tree
8241 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
8242 {
8243 /* The size of the object is not relevant when talking about its address. */
8244 if (TREE_CODE (t) == WITH_SIZE_EXPR)
8245 t = TREE_OPERAND (t, 0);
8246
8247 if (TREE_CODE (t) == INDIRECT_REF)
8248 {
8249 t = TREE_OPERAND (t, 0);
8250
8251 if (TREE_TYPE (t) != ptrtype)
8252 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
8253 }
8254 else if (TREE_CODE (t) == MEM_REF
8255 && integer_zerop (TREE_OPERAND (t, 1)))
8256 return TREE_OPERAND (t, 0);
8257 else if (TREE_CODE (t) == MEM_REF
8258 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
8259 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
8260 TREE_OPERAND (t, 0),
8261 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
8262 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8263 {
8264 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
8265
8266 if (TREE_TYPE (t) != ptrtype)
8267 t = fold_convert_loc (loc, ptrtype, t);
8268 }
8269 else
8270 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
8271
8272 return t;
8273 }
8274
8275 /* Build an expression for the address of T. */
8276
8277 tree
8278 build_fold_addr_expr_loc (location_t loc, tree t)
8279 {
8280 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8281
8282 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
8283 }
8284
8285 /* Fold a unary expression of code CODE and type TYPE with operand
8286 OP0. Return the folded expression if folding is successful.
8287 Otherwise, return NULL_TREE. */
8288
8289 tree
8290 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8291 {
8292 tree tem;
8293 tree arg0;
8294 enum tree_code_class kind = TREE_CODE_CLASS (code);
8295
8296 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8297 && TREE_CODE_LENGTH (code) == 1);
8298
8299 arg0 = op0;
8300 if (arg0)
8301 {
8302 if (CONVERT_EXPR_CODE_P (code)
8303 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
8304 {
8305 /* Don't use STRIP_NOPS, because signedness of argument type
8306 matters. */
8307 STRIP_SIGN_NOPS (arg0);
8308 }
8309 else
8310 {
8311 /* Strip any conversions that don't change the mode. This
8312 is safe for every expression, except for a comparison
8313 expression because its signedness is derived from its
8314 operands.
8315
8316 Note that this is done as an internal manipulation within
8317 the constant folder, in order to find the simplest
8318 representation of the arguments so that their form can be
8319 studied. In any cases, the appropriate type conversions
8320 should be put back in the tree that will get out of the
8321 constant folder. */
8322 STRIP_NOPS (arg0);
8323 }
8324
8325 if (CONSTANT_CLASS_P (arg0))
8326 {
8327 tree tem = const_unop (code, type, arg0);
8328 if (tem)
8329 {
8330 if (TREE_TYPE (tem) != type)
8331 tem = fold_convert_loc (loc, type, tem);
8332 return tem;
8333 }
8334 }
8335 }
8336
8337 tem = generic_simplify (loc, code, type, op0);
8338 if (tem)
8339 return tem;
8340
8341 if (TREE_CODE_CLASS (code) == tcc_unary)
8342 {
8343 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8344 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8345 fold_build1_loc (loc, code, type,
8346 fold_convert_loc (loc, TREE_TYPE (op0),
8347 TREE_OPERAND (arg0, 1))));
8348 else if (TREE_CODE (arg0) == COND_EXPR)
8349 {
8350 tree arg01 = TREE_OPERAND (arg0, 1);
8351 tree arg02 = TREE_OPERAND (arg0, 2);
8352 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8353 arg01 = fold_build1_loc (loc, code, type,
8354 fold_convert_loc (loc,
8355 TREE_TYPE (op0), arg01));
8356 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8357 arg02 = fold_build1_loc (loc, code, type,
8358 fold_convert_loc (loc,
8359 TREE_TYPE (op0), arg02));
8360 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
8361 arg01, arg02);
8362
8363 /* If this was a conversion, and all we did was to move into
8364 inside the COND_EXPR, bring it back out. But leave it if
8365 it is a conversion from integer to integer and the
8366 result precision is no wider than a word since such a
8367 conversion is cheap and may be optimized away by combine,
8368 while it couldn't if it were outside the COND_EXPR. Then return
8369 so we don't get into an infinite recursion loop taking the
8370 conversion out and then back in. */
8371
8372 if ((CONVERT_EXPR_CODE_P (code)
8373 || code == NON_LVALUE_EXPR)
8374 && TREE_CODE (tem) == COND_EXPR
8375 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8376 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8377 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8378 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8379 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8380 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8381 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8382 && (INTEGRAL_TYPE_P
8383 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8384 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8385 || flag_syntax_only))
8386 tem = build1_loc (loc, code, type,
8387 build3 (COND_EXPR,
8388 TREE_TYPE (TREE_OPERAND
8389 (TREE_OPERAND (tem, 1), 0)),
8390 TREE_OPERAND (tem, 0),
8391 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8392 TREE_OPERAND (TREE_OPERAND (tem, 2),
8393 0)));
8394 return tem;
8395 }
8396 }
8397
8398 switch (code)
8399 {
8400 case NON_LVALUE_EXPR:
8401 if (!maybe_lvalue_p (op0))
8402 return fold_convert_loc (loc, type, op0);
8403 return NULL_TREE;
8404
8405 CASE_CONVERT:
8406 case FLOAT_EXPR:
8407 case FIX_TRUNC_EXPR:
8408 if (COMPARISON_CLASS_P (op0))
8409 {
8410 /* If we have (type) (a CMP b) and type is an integral type, return
8411 new expression involving the new type. Canonicalize
8412 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
8413 non-integral type.
8414 Do not fold the result as that would not simplify further, also
8415 folding again results in recursions. */
8416 if (TREE_CODE (type) == BOOLEAN_TYPE)
8417 return build2_loc (loc, TREE_CODE (op0), type,
8418 TREE_OPERAND (op0, 0),
8419 TREE_OPERAND (op0, 1));
8420 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
8421 && TREE_CODE (type) != VECTOR_TYPE)
8422 return build3_loc (loc, COND_EXPR, type, op0,
8423 constant_boolean_node (true, type),
8424 constant_boolean_node (false, type));
8425 }
8426
8427 /* Handle (T *)&A.B.C for A being of type T and B and C
8428 living at offset zero. This occurs frequently in
8429 C++ upcasting and then accessing the base. */
8430 if (TREE_CODE (op0) == ADDR_EXPR
8431 && POINTER_TYPE_P (type)
8432 && handled_component_p (TREE_OPERAND (op0, 0)))
8433 {
8434 poly_int64 bitsize, bitpos;
8435 tree offset;
8436 machine_mode mode;
8437 int unsignedp, reversep, volatilep;
8438 tree base
8439 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
8440 &offset, &mode, &unsignedp, &reversep,
8441 &volatilep);
8442 /* If the reference was to a (constant) zero offset, we can use
8443 the address of the base if it has the same base type
8444 as the result type and the pointer type is unqualified. */
8445 if (!offset
8446 && known_eq (bitpos, 0)
8447 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8448 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8449 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8450 return fold_convert_loc (loc, type,
8451 build_fold_addr_expr_loc (loc, base));
8452 }
8453
8454 if (TREE_CODE (op0) == MODIFY_EXPR
8455 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8456 /* Detect assigning a bitfield. */
8457 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8458 && DECL_BIT_FIELD
8459 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8460 {
8461 /* Don't leave an assignment inside a conversion
8462 unless assigning a bitfield. */
8463 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8464 /* First do the assignment, then return converted constant. */
8465 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8466 TREE_NO_WARNING (tem) = 1;
8467 TREE_USED (tem) = 1;
8468 return tem;
8469 }
8470
8471 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8472 constants (if x has signed type, the sign bit cannot be set
8473 in c). This folds extension into the BIT_AND_EXPR.
8474 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8475 very likely don't have maximal range for their precision and this
8476 transformation effectively doesn't preserve non-maximal ranges. */
8477 if (TREE_CODE (type) == INTEGER_TYPE
8478 && TREE_CODE (op0) == BIT_AND_EXPR
8479 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8480 {
8481 tree and_expr = op0;
8482 tree and0 = TREE_OPERAND (and_expr, 0);
8483 tree and1 = TREE_OPERAND (and_expr, 1);
8484 int change = 0;
8485
8486 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8487 || (TYPE_PRECISION (type)
8488 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8489 change = 1;
8490 else if (TYPE_PRECISION (TREE_TYPE (and1))
8491 <= HOST_BITS_PER_WIDE_INT
8492 && tree_fits_uhwi_p (and1))
8493 {
8494 unsigned HOST_WIDE_INT cst;
8495
8496 cst = tree_to_uhwi (and1);
8497 cst &= HOST_WIDE_INT_M1U
8498 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8499 change = (cst == 0);
8500 if (change
8501 && !flag_syntax_only
8502 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
8503 == ZERO_EXTEND))
8504 {
8505 tree uns = unsigned_type_for (TREE_TYPE (and0));
8506 and0 = fold_convert_loc (loc, uns, and0);
8507 and1 = fold_convert_loc (loc, uns, and1);
8508 }
8509 }
8510 if (change)
8511 {
8512 tem = force_fit_type (type, wi::to_widest (and1), 0,
8513 TREE_OVERFLOW (and1));
8514 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8515 fold_convert_loc (loc, type, and0), tem);
8516 }
8517 }
8518
8519 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
8520 cast (T1)X will fold away. We assume that this happens when X itself
8521 is a cast. */
8522 if (POINTER_TYPE_P (type)
8523 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8524 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
8525 {
8526 tree arg00 = TREE_OPERAND (arg0, 0);
8527 tree arg01 = TREE_OPERAND (arg0, 1);
8528
8529 return fold_build_pointer_plus_loc
8530 (loc, fold_convert_loc (loc, type, arg00), arg01);
8531 }
8532
8533 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8534 of the same precision, and X is an integer type not narrower than
8535 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8536 if (INTEGRAL_TYPE_P (type)
8537 && TREE_CODE (op0) == BIT_NOT_EXPR
8538 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8539 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8540 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8541 {
8542 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8543 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8544 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8545 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8546 fold_convert_loc (loc, type, tem));
8547 }
8548
8549 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8550 type of X and Y (integer types only). */
8551 if (INTEGRAL_TYPE_P (type)
8552 && TREE_CODE (op0) == MULT_EXPR
8553 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8554 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8555 {
8556 /* Be careful not to introduce new overflows. */
8557 tree mult_type;
8558 if (TYPE_OVERFLOW_WRAPS (type))
8559 mult_type = type;
8560 else
8561 mult_type = unsigned_type_for (type);
8562
8563 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8564 {
8565 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8566 fold_convert_loc (loc, mult_type,
8567 TREE_OPERAND (op0, 0)),
8568 fold_convert_loc (loc, mult_type,
8569 TREE_OPERAND (op0, 1)));
8570 return fold_convert_loc (loc, type, tem);
8571 }
8572 }
8573
8574 return NULL_TREE;
8575
8576 case VIEW_CONVERT_EXPR:
8577 if (TREE_CODE (op0) == MEM_REF)
8578 {
8579 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
8580 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
8581 tem = fold_build2_loc (loc, MEM_REF, type,
8582 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8583 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
8584 return tem;
8585 }
8586
8587 return NULL_TREE;
8588
8589 case NEGATE_EXPR:
8590 tem = fold_negate_expr (loc, arg0);
8591 if (tem)
8592 return fold_convert_loc (loc, type, tem);
8593 return NULL_TREE;
8594
8595 case ABS_EXPR:
8596 /* Convert fabs((double)float) into (double)fabsf(float). */
8597 if (TREE_CODE (arg0) == NOP_EXPR
8598 && TREE_CODE (type) == REAL_TYPE)
8599 {
8600 tree targ0 = strip_float_extensions (arg0);
8601 if (targ0 != arg0)
8602 return fold_convert_loc (loc, type,
8603 fold_build1_loc (loc, ABS_EXPR,
8604 TREE_TYPE (targ0),
8605 targ0));
8606 }
8607 return NULL_TREE;
8608
8609 case BIT_NOT_EXPR:
8610 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8611 if (TREE_CODE (arg0) == BIT_XOR_EXPR
8612 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8613 fold_convert_loc (loc, type,
8614 TREE_OPERAND (arg0, 0)))))
8615 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8616 fold_convert_loc (loc, type,
8617 TREE_OPERAND (arg0, 1)));
8618 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8619 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8620 fold_convert_loc (loc, type,
8621 TREE_OPERAND (arg0, 1)))))
8622 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8623 fold_convert_loc (loc, type,
8624 TREE_OPERAND (arg0, 0)), tem);
8625
8626 return NULL_TREE;
8627
8628 case TRUTH_NOT_EXPR:
8629 /* Note that the operand of this must be an int
8630 and its values must be 0 or 1.
8631 ("true" is a fixed value perhaps depending on the language,
8632 but we don't handle values other than 1 correctly yet.) */
8633 tem = fold_truth_not_expr (loc, arg0);
8634 if (!tem)
8635 return NULL_TREE;
8636 return fold_convert_loc (loc, type, tem);
8637
8638 case INDIRECT_REF:
8639 /* Fold *&X to X if X is an lvalue. */
8640 if (TREE_CODE (op0) == ADDR_EXPR)
8641 {
8642 tree op00 = TREE_OPERAND (op0, 0);
8643 if ((VAR_P (op00)
8644 || TREE_CODE (op00) == PARM_DECL
8645 || TREE_CODE (op00) == RESULT_DECL)
8646 && !TREE_READONLY (op00))
8647 return op00;
8648 }
8649 return NULL_TREE;
8650
8651 default:
8652 return NULL_TREE;
8653 } /* switch (code) */
8654 }
8655
8656
8657 /* If the operation was a conversion do _not_ mark a resulting constant
8658 with TREE_OVERFLOW if the original constant was not. These conversions
8659 have implementation defined behavior and retaining the TREE_OVERFLOW
8660 flag here would confuse later passes such as VRP. */
8661 tree
8662 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8663 tree type, tree op0)
8664 {
8665 tree res = fold_unary_loc (loc, code, type, op0);
8666 if (res
8667 && TREE_CODE (res) == INTEGER_CST
8668 && TREE_CODE (op0) == INTEGER_CST
8669 && CONVERT_EXPR_CODE_P (code))
8670 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8671
8672 return res;
8673 }
8674
8675 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8676 operands OP0 and OP1. LOC is the location of the resulting expression.
8677 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8678 Return the folded expression if folding is successful. Otherwise,
8679 return NULL_TREE. */
8680 static tree
8681 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8682 tree arg0, tree arg1, tree op0, tree op1)
8683 {
8684 tree tem;
8685
8686 /* We only do these simplifications if we are optimizing. */
8687 if (!optimize)
8688 return NULL_TREE;
8689
8690 /* Check for things like (A || B) && (A || C). We can convert this
8691 to A || (B && C). Note that either operator can be any of the four
8692 truth and/or operations and the transformation will still be
8693 valid. Also note that we only care about order for the
8694 ANDIF and ORIF operators. If B contains side effects, this
8695 might change the truth-value of A. */
8696 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8697 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8698 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8699 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8700 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8701 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8702 {
8703 tree a00 = TREE_OPERAND (arg0, 0);
8704 tree a01 = TREE_OPERAND (arg0, 1);
8705 tree a10 = TREE_OPERAND (arg1, 0);
8706 tree a11 = TREE_OPERAND (arg1, 1);
8707 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8708 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8709 && (code == TRUTH_AND_EXPR
8710 || code == TRUTH_OR_EXPR));
8711
8712 if (operand_equal_p (a00, a10, 0))
8713 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8714 fold_build2_loc (loc, code, type, a01, a11));
8715 else if (commutative && operand_equal_p (a00, a11, 0))
8716 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8717 fold_build2_loc (loc, code, type, a01, a10));
8718 else if (commutative && operand_equal_p (a01, a10, 0))
8719 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8720 fold_build2_loc (loc, code, type, a00, a11));
8721
8722 /* This case if tricky because we must either have commutative
8723 operators or else A10 must not have side-effects. */
8724
8725 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8726 && operand_equal_p (a01, a11, 0))
8727 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8728 fold_build2_loc (loc, code, type, a00, a10),
8729 a01);
8730 }
8731
8732 /* See if we can build a range comparison. */
8733 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
8734 return tem;
8735
8736 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8737 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8738 {
8739 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8740 if (tem)
8741 return fold_build2_loc (loc, code, type, tem, arg1);
8742 }
8743
8744 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8745 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8746 {
8747 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8748 if (tem)
8749 return fold_build2_loc (loc, code, type, arg0, tem);
8750 }
8751
8752 /* Check for the possibility of merging component references. If our
8753 lhs is another similar operation, try to merge its rhs with our
8754 rhs. Then try to merge our lhs and rhs. */
8755 if (TREE_CODE (arg0) == code
8756 && (tem = fold_truth_andor_1 (loc, code, type,
8757 TREE_OPERAND (arg0, 1), arg1)) != 0)
8758 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8759
8760 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8761 return tem;
8762
8763 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
8764 if (param_logical_op_non_short_circuit != -1)
8765 logical_op_non_short_circuit
8766 = param_logical_op_non_short_circuit;
8767 if (logical_op_non_short_circuit
8768 && !flag_sanitize_coverage
8769 && (code == TRUTH_AND_EXPR
8770 || code == TRUTH_ANDIF_EXPR
8771 || code == TRUTH_OR_EXPR
8772 || code == TRUTH_ORIF_EXPR))
8773 {
8774 enum tree_code ncode, icode;
8775
8776 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8777 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8778 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8779
8780 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8781 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8782 We don't want to pack more than two leafs to a non-IF AND/OR
8783 expression.
8784 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8785 equal to IF-CODE, then we don't want to add right-hand operand.
8786 If the inner right-hand side of left-hand operand has
8787 side-effects, or isn't simple, then we can't add to it,
8788 as otherwise we might destroy if-sequence. */
8789 if (TREE_CODE (arg0) == icode
8790 && simple_operand_p_2 (arg1)
8791 /* Needed for sequence points to handle trappings, and
8792 side-effects. */
8793 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8794 {
8795 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8796 arg1);
8797 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8798 tem);
8799 }
8800 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8801 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8802 else if (TREE_CODE (arg1) == icode
8803 && simple_operand_p_2 (arg0)
8804 /* Needed for sequence points to handle trappings, and
8805 side-effects. */
8806 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8807 {
8808 tem = fold_build2_loc (loc, ncode, type,
8809 arg0, TREE_OPERAND (arg1, 0));
8810 return fold_build2_loc (loc, icode, type, tem,
8811 TREE_OPERAND (arg1, 1));
8812 }
8813 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8814 into (A OR B).
8815 For sequence point consistancy, we need to check for trapping,
8816 and side-effects. */
8817 else if (code == icode && simple_operand_p_2 (arg0)
8818 && simple_operand_p_2 (arg1))
8819 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8820 }
8821
8822 return NULL_TREE;
8823 }
8824
8825 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8826 by changing CODE to reduce the magnitude of constants involved in
8827 ARG0 of the comparison.
8828 Returns a canonicalized comparison tree if a simplification was
8829 possible, otherwise returns NULL_TREE.
8830 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8831 valid if signed overflow is undefined. */
8832
8833 static tree
8834 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8835 tree arg0, tree arg1,
8836 bool *strict_overflow_p)
8837 {
8838 enum tree_code code0 = TREE_CODE (arg0);
8839 tree t, cst0 = NULL_TREE;
8840 int sgn0;
8841
8842 /* Match A +- CST code arg1. We can change this only if overflow
8843 is undefined. */
8844 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8845 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8846 /* In principle pointers also have undefined overflow behavior,
8847 but that causes problems elsewhere. */
8848 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8849 && (code0 == MINUS_EXPR
8850 || code0 == PLUS_EXPR)
8851 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8852 return NULL_TREE;
8853
8854 /* Identify the constant in arg0 and its sign. */
8855 cst0 = TREE_OPERAND (arg0, 1);
8856 sgn0 = tree_int_cst_sgn (cst0);
8857
8858 /* Overflowed constants and zero will cause problems. */
8859 if (integer_zerop (cst0)
8860 || TREE_OVERFLOW (cst0))
8861 return NULL_TREE;
8862
8863 /* See if we can reduce the magnitude of the constant in
8864 arg0 by changing the comparison code. */
8865 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8866 if (code == LT_EXPR
8867 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8868 code = LE_EXPR;
8869 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8870 else if (code == GT_EXPR
8871 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8872 code = GE_EXPR;
8873 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8874 else if (code == LE_EXPR
8875 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8876 code = LT_EXPR;
8877 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8878 else if (code == GE_EXPR
8879 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8880 code = GT_EXPR;
8881 else
8882 return NULL_TREE;
8883 *strict_overflow_p = true;
8884
8885 /* Now build the constant reduced in magnitude. But not if that
8886 would produce one outside of its types range. */
8887 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8888 && ((sgn0 == 1
8889 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8890 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8891 || (sgn0 == -1
8892 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8893 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8894 return NULL_TREE;
8895
8896 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8897 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8898 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8899 t = fold_convert (TREE_TYPE (arg1), t);
8900
8901 return fold_build2_loc (loc, code, type, t, arg1);
8902 }
8903
8904 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8905 overflow further. Try to decrease the magnitude of constants involved
8906 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8907 and put sole constants at the second argument position.
8908 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8909
8910 static tree
8911 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8912 tree arg0, tree arg1)
8913 {
8914 tree t;
8915 bool strict_overflow_p;
8916 const char * const warnmsg = G_("assuming signed overflow does not occur "
8917 "when reducing constant in comparison");
8918
8919 /* Try canonicalization by simplifying arg0. */
8920 strict_overflow_p = false;
8921 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8922 &strict_overflow_p);
8923 if (t)
8924 {
8925 if (strict_overflow_p)
8926 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8927 return t;
8928 }
8929
8930 /* Try canonicalization by simplifying arg1 using the swapped
8931 comparison. */
8932 code = swap_tree_comparison (code);
8933 strict_overflow_p = false;
8934 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8935 &strict_overflow_p);
8936 if (t && strict_overflow_p)
8937 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8938 return t;
8939 }
8940
8941 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8942 space. This is used to avoid issuing overflow warnings for
8943 expressions like &p->x which cannot wrap. */
8944
8945 static bool
8946 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
8947 {
8948 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8949 return true;
8950
8951 if (maybe_lt (bitpos, 0))
8952 return true;
8953
8954 poly_wide_int wi_offset;
8955 int precision = TYPE_PRECISION (TREE_TYPE (base));
8956 if (offset == NULL_TREE)
8957 wi_offset = wi::zero (precision);
8958 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
8959 return true;
8960 else
8961 wi_offset = wi::to_poly_wide (offset);
8962
8963 wi::overflow_type overflow;
8964 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
8965 precision);
8966 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8967 if (overflow)
8968 return true;
8969
8970 poly_uint64 total_hwi, size;
8971 if (!total.to_uhwi (&total_hwi)
8972 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
8973 &size)
8974 || known_eq (size, 0U))
8975 return true;
8976
8977 if (known_le (total_hwi, size))
8978 return false;
8979
8980 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8981 array. */
8982 if (TREE_CODE (base) == ADDR_EXPR
8983 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
8984 &size)
8985 && maybe_ne (size, 0U)
8986 && known_le (total_hwi, size))
8987 return false;
8988
8989 return true;
8990 }
8991
8992 /* Return a positive integer when the symbol DECL is known to have
8993 a nonzero address, zero when it's known not to (e.g., it's a weak
8994 symbol), and a negative integer when the symbol is not yet in the
8995 symbol table and so whether or not its address is zero is unknown.
8996 For function local objects always return positive integer. */
8997 static int
8998 maybe_nonzero_address (tree decl)
8999 {
9000 if (DECL_P (decl) && decl_in_symtab_p (decl))
9001 if (struct symtab_node *symbol = symtab_node::get_create (decl))
9002 return symbol->nonzero_address ();
9003
9004 /* Function local objects are never NULL. */
9005 if (DECL_P (decl)
9006 && (DECL_CONTEXT (decl)
9007 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
9008 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
9009 return 1;
9010
9011 return -1;
9012 }
9013
9014 /* Subroutine of fold_binary. This routine performs all of the
9015 transformations that are common to the equality/inequality
9016 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9017 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9018 fold_binary should call fold_binary. Fold a comparison with
9019 tree code CODE and type TYPE with operands OP0 and OP1. Return
9020 the folded comparison or NULL_TREE. */
9021
9022 static tree
9023 fold_comparison (location_t loc, enum tree_code code, tree type,
9024 tree op0, tree op1)
9025 {
9026 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
9027 tree arg0, arg1, tem;
9028
9029 arg0 = op0;
9030 arg1 = op1;
9031
9032 STRIP_SIGN_NOPS (arg0);
9033 STRIP_SIGN_NOPS (arg1);
9034
9035 /* For comparisons of pointers we can decompose it to a compile time
9036 comparison of the base objects and the offsets into the object.
9037 This requires at least one operand being an ADDR_EXPR or a
9038 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9039 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9040 && (TREE_CODE (arg0) == ADDR_EXPR
9041 || TREE_CODE (arg1) == ADDR_EXPR
9042 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9043 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9044 {
9045 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9046 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
9047 machine_mode mode;
9048 int volatilep, reversep, unsignedp;
9049 bool indirect_base0 = false, indirect_base1 = false;
9050
9051 /* Get base and offset for the access. Strip ADDR_EXPR for
9052 get_inner_reference, but put it back by stripping INDIRECT_REF
9053 off the base object if possible. indirect_baseN will be true
9054 if baseN is not an address but refers to the object itself. */
9055 base0 = arg0;
9056 if (TREE_CODE (arg0) == ADDR_EXPR)
9057 {
9058 base0
9059 = get_inner_reference (TREE_OPERAND (arg0, 0),
9060 &bitsize, &bitpos0, &offset0, &mode,
9061 &unsignedp, &reversep, &volatilep);
9062 if (TREE_CODE (base0) == INDIRECT_REF)
9063 base0 = TREE_OPERAND (base0, 0);
9064 else
9065 indirect_base0 = true;
9066 }
9067 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9068 {
9069 base0 = TREE_OPERAND (arg0, 0);
9070 STRIP_SIGN_NOPS (base0);
9071 if (TREE_CODE (base0) == ADDR_EXPR)
9072 {
9073 base0
9074 = get_inner_reference (TREE_OPERAND (base0, 0),
9075 &bitsize, &bitpos0, &offset0, &mode,
9076 &unsignedp, &reversep, &volatilep);
9077 if (TREE_CODE (base0) == INDIRECT_REF)
9078 base0 = TREE_OPERAND (base0, 0);
9079 else
9080 indirect_base0 = true;
9081 }
9082 if (offset0 == NULL_TREE || integer_zerop (offset0))
9083 offset0 = TREE_OPERAND (arg0, 1);
9084 else
9085 offset0 = size_binop (PLUS_EXPR, offset0,
9086 TREE_OPERAND (arg0, 1));
9087 if (poly_int_tree_p (offset0))
9088 {
9089 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
9090 TYPE_PRECISION (sizetype));
9091 tem <<= LOG2_BITS_PER_UNIT;
9092 tem += bitpos0;
9093 if (tem.to_shwi (&bitpos0))
9094 offset0 = NULL_TREE;
9095 }
9096 }
9097
9098 base1 = arg1;
9099 if (TREE_CODE (arg1) == ADDR_EXPR)
9100 {
9101 base1
9102 = get_inner_reference (TREE_OPERAND (arg1, 0),
9103 &bitsize, &bitpos1, &offset1, &mode,
9104 &unsignedp, &reversep, &volatilep);
9105 if (TREE_CODE (base1) == INDIRECT_REF)
9106 base1 = TREE_OPERAND (base1, 0);
9107 else
9108 indirect_base1 = true;
9109 }
9110 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9111 {
9112 base1 = TREE_OPERAND (arg1, 0);
9113 STRIP_SIGN_NOPS (base1);
9114 if (TREE_CODE (base1) == ADDR_EXPR)
9115 {
9116 base1
9117 = get_inner_reference (TREE_OPERAND (base1, 0),
9118 &bitsize, &bitpos1, &offset1, &mode,
9119 &unsignedp, &reversep, &volatilep);
9120 if (TREE_CODE (base1) == INDIRECT_REF)
9121 base1 = TREE_OPERAND (base1, 0);
9122 else
9123 indirect_base1 = true;
9124 }
9125 if (offset1 == NULL_TREE || integer_zerop (offset1))
9126 offset1 = TREE_OPERAND (arg1, 1);
9127 else
9128 offset1 = size_binop (PLUS_EXPR, offset1,
9129 TREE_OPERAND (arg1, 1));
9130 if (poly_int_tree_p (offset1))
9131 {
9132 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
9133 TYPE_PRECISION (sizetype));
9134 tem <<= LOG2_BITS_PER_UNIT;
9135 tem += bitpos1;
9136 if (tem.to_shwi (&bitpos1))
9137 offset1 = NULL_TREE;
9138 }
9139 }
9140
9141 /* If we have equivalent bases we might be able to simplify. */
9142 if (indirect_base0 == indirect_base1
9143 && operand_equal_p (base0, base1,
9144 indirect_base0 ? OEP_ADDRESS_OF : 0))
9145 {
9146 /* We can fold this expression to a constant if the non-constant
9147 offset parts are equal. */
9148 if ((offset0 == offset1
9149 || (offset0 && offset1
9150 && operand_equal_p (offset0, offset1, 0)))
9151 && (equality_code
9152 || (indirect_base0
9153 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
9154 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9155 {
9156 if (!equality_code
9157 && maybe_ne (bitpos0, bitpos1)
9158 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9159 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9160 fold_overflow_warning (("assuming pointer wraparound does not "
9161 "occur when comparing P +- C1 with "
9162 "P +- C2"),
9163 WARN_STRICT_OVERFLOW_CONDITIONAL);
9164
9165 switch (code)
9166 {
9167 case EQ_EXPR:
9168 if (known_eq (bitpos0, bitpos1))
9169 return constant_boolean_node (true, type);
9170 if (known_ne (bitpos0, bitpos1))
9171 return constant_boolean_node (false, type);
9172 break;
9173 case NE_EXPR:
9174 if (known_ne (bitpos0, bitpos1))
9175 return constant_boolean_node (true, type);
9176 if (known_eq (bitpos0, bitpos1))
9177 return constant_boolean_node (false, type);
9178 break;
9179 case LT_EXPR:
9180 if (known_lt (bitpos0, bitpos1))
9181 return constant_boolean_node (true, type);
9182 if (known_ge (bitpos0, bitpos1))
9183 return constant_boolean_node (false, type);
9184 break;
9185 case LE_EXPR:
9186 if (known_le (bitpos0, bitpos1))
9187 return constant_boolean_node (true, type);
9188 if (known_gt (bitpos0, bitpos1))
9189 return constant_boolean_node (false, type);
9190 break;
9191 case GE_EXPR:
9192 if (known_ge (bitpos0, bitpos1))
9193 return constant_boolean_node (true, type);
9194 if (known_lt (bitpos0, bitpos1))
9195 return constant_boolean_node (false, type);
9196 break;
9197 case GT_EXPR:
9198 if (known_gt (bitpos0, bitpos1))
9199 return constant_boolean_node (true, type);
9200 if (known_le (bitpos0, bitpos1))
9201 return constant_boolean_node (false, type);
9202 break;
9203 default:;
9204 }
9205 }
9206 /* We can simplify the comparison to a comparison of the variable
9207 offset parts if the constant offset parts are equal.
9208 Be careful to use signed sizetype here because otherwise we
9209 mess with array offsets in the wrong way. This is possible
9210 because pointer arithmetic is restricted to retain within an
9211 object and overflow on pointer differences is undefined as of
9212 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9213 else if (known_eq (bitpos0, bitpos1)
9214 && (equality_code
9215 || (indirect_base0
9216 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
9217 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9218 {
9219 /* By converting to signed sizetype we cover middle-end pointer
9220 arithmetic which operates on unsigned pointer types of size
9221 type size and ARRAY_REF offsets which are properly sign or
9222 zero extended from their type in case it is narrower than
9223 sizetype. */
9224 if (offset0 == NULL_TREE)
9225 offset0 = build_int_cst (ssizetype, 0);
9226 else
9227 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9228 if (offset1 == NULL_TREE)
9229 offset1 = build_int_cst (ssizetype, 0);
9230 else
9231 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9232
9233 if (!equality_code
9234 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9235 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9236 fold_overflow_warning (("assuming pointer wraparound does not "
9237 "occur when comparing P +- C1 with "
9238 "P +- C2"),
9239 WARN_STRICT_OVERFLOW_COMPARISON);
9240
9241 return fold_build2_loc (loc, code, type, offset0, offset1);
9242 }
9243 }
9244 /* For equal offsets we can simplify to a comparison of the
9245 base addresses. */
9246 else if (known_eq (bitpos0, bitpos1)
9247 && (indirect_base0
9248 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9249 && (indirect_base1
9250 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9251 && ((offset0 == offset1)
9252 || (offset0 && offset1
9253 && operand_equal_p (offset0, offset1, 0))))
9254 {
9255 if (indirect_base0)
9256 base0 = build_fold_addr_expr_loc (loc, base0);
9257 if (indirect_base1)
9258 base1 = build_fold_addr_expr_loc (loc, base1);
9259 return fold_build2_loc (loc, code, type, base0, base1);
9260 }
9261 /* Comparison between an ordinary (non-weak) symbol and a null
9262 pointer can be eliminated since such symbols must have a non
9263 null address. In C, relational expressions between pointers
9264 to objects and null pointers are undefined. The results
9265 below follow the C++ rules with the additional property that
9266 every object pointer compares greater than a null pointer.
9267 */
9268 else if (((DECL_P (base0)
9269 && maybe_nonzero_address (base0) > 0
9270 /* Avoid folding references to struct members at offset 0 to
9271 prevent tests like '&ptr->firstmember == 0' from getting
9272 eliminated. When ptr is null, although the -> expression
9273 is strictly speaking invalid, GCC retains it as a matter
9274 of QoI. See PR c/44555. */
9275 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
9276 || CONSTANT_CLASS_P (base0))
9277 && indirect_base0
9278 /* The caller guarantees that when one of the arguments is
9279 constant (i.e., null in this case) it is second. */
9280 && integer_zerop (arg1))
9281 {
9282 switch (code)
9283 {
9284 case EQ_EXPR:
9285 case LE_EXPR:
9286 case LT_EXPR:
9287 return constant_boolean_node (false, type);
9288 case GE_EXPR:
9289 case GT_EXPR:
9290 case NE_EXPR:
9291 return constant_boolean_node (true, type);
9292 default:
9293 gcc_unreachable ();
9294 }
9295 }
9296 }
9297
9298 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9299 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9300 the resulting offset is smaller in absolute value than the
9301 original one and has the same sign. */
9302 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9303 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9304 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9305 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9306 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9307 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9308 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9309 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9310 {
9311 tree const1 = TREE_OPERAND (arg0, 1);
9312 tree const2 = TREE_OPERAND (arg1, 1);
9313 tree variable1 = TREE_OPERAND (arg0, 0);
9314 tree variable2 = TREE_OPERAND (arg1, 0);
9315 tree cst;
9316 const char * const warnmsg = G_("assuming signed overflow does not "
9317 "occur when combining constants around "
9318 "a comparison");
9319
9320 /* Put the constant on the side where it doesn't overflow and is
9321 of lower absolute value and of same sign than before. */
9322 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9323 ? MINUS_EXPR : PLUS_EXPR,
9324 const2, const1);
9325 if (!TREE_OVERFLOW (cst)
9326 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9327 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9328 {
9329 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9330 return fold_build2_loc (loc, code, type,
9331 variable1,
9332 fold_build2_loc (loc, TREE_CODE (arg1),
9333 TREE_TYPE (arg1),
9334 variable2, cst));
9335 }
9336
9337 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9338 ? MINUS_EXPR : PLUS_EXPR,
9339 const1, const2);
9340 if (!TREE_OVERFLOW (cst)
9341 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9342 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9343 {
9344 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9345 return fold_build2_loc (loc, code, type,
9346 fold_build2_loc (loc, TREE_CODE (arg0),
9347 TREE_TYPE (arg0),
9348 variable1, cst),
9349 variable2);
9350 }
9351 }
9352
9353 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9354 if (tem)
9355 return tem;
9356
9357 /* If we are comparing an expression that just has comparisons
9358 of two integer values, arithmetic expressions of those comparisons,
9359 and constants, we can simplify it. There are only three cases
9360 to check: the two values can either be equal, the first can be
9361 greater, or the second can be greater. Fold the expression for
9362 those three values. Since each value must be 0 or 1, we have
9363 eight possibilities, each of which corresponds to the constant 0
9364 or 1 or one of the six possible comparisons.
9365
9366 This handles common cases like (a > b) == 0 but also handles
9367 expressions like ((x > y) - (y > x)) > 0, which supposedly
9368 occur in macroized code. */
9369
9370 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9371 {
9372 tree cval1 = 0, cval2 = 0;
9373
9374 if (twoval_comparison_p (arg0, &cval1, &cval2)
9375 /* Don't handle degenerate cases here; they should already
9376 have been handled anyway. */
9377 && cval1 != 0 && cval2 != 0
9378 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9379 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9380 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9381 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9382 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9383 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9384 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9385 {
9386 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9387 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9388
9389 /* We can't just pass T to eval_subst in case cval1 or cval2
9390 was the same as ARG1. */
9391
9392 tree high_result
9393 = fold_build2_loc (loc, code, type,
9394 eval_subst (loc, arg0, cval1, maxval,
9395 cval2, minval),
9396 arg1);
9397 tree equal_result
9398 = fold_build2_loc (loc, code, type,
9399 eval_subst (loc, arg0, cval1, maxval,
9400 cval2, maxval),
9401 arg1);
9402 tree low_result
9403 = fold_build2_loc (loc, code, type,
9404 eval_subst (loc, arg0, cval1, minval,
9405 cval2, maxval),
9406 arg1);
9407
9408 /* All three of these results should be 0 or 1. Confirm they are.
9409 Then use those values to select the proper code to use. */
9410
9411 if (TREE_CODE (high_result) == INTEGER_CST
9412 && TREE_CODE (equal_result) == INTEGER_CST
9413 && TREE_CODE (low_result) == INTEGER_CST)
9414 {
9415 /* Make a 3-bit mask with the high-order bit being the
9416 value for `>', the next for '=', and the low for '<'. */
9417 switch ((integer_onep (high_result) * 4)
9418 + (integer_onep (equal_result) * 2)
9419 + integer_onep (low_result))
9420 {
9421 case 0:
9422 /* Always false. */
9423 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9424 case 1:
9425 code = LT_EXPR;
9426 break;
9427 case 2:
9428 code = EQ_EXPR;
9429 break;
9430 case 3:
9431 code = LE_EXPR;
9432 break;
9433 case 4:
9434 code = GT_EXPR;
9435 break;
9436 case 5:
9437 code = NE_EXPR;
9438 break;
9439 case 6:
9440 code = GE_EXPR;
9441 break;
9442 case 7:
9443 /* Always true. */
9444 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9445 }
9446
9447 return fold_build2_loc (loc, code, type, cval1, cval2);
9448 }
9449 }
9450 }
9451
9452 return NULL_TREE;
9453 }
9454
9455
9456 /* Subroutine of fold_binary. Optimize complex multiplications of the
9457 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9458 argument EXPR represents the expression "z" of type TYPE. */
9459
9460 static tree
9461 fold_mult_zconjz (location_t loc, tree type, tree expr)
9462 {
9463 tree itype = TREE_TYPE (type);
9464 tree rpart, ipart, tem;
9465
9466 if (TREE_CODE (expr) == COMPLEX_EXPR)
9467 {
9468 rpart = TREE_OPERAND (expr, 0);
9469 ipart = TREE_OPERAND (expr, 1);
9470 }
9471 else if (TREE_CODE (expr) == COMPLEX_CST)
9472 {
9473 rpart = TREE_REALPART (expr);
9474 ipart = TREE_IMAGPART (expr);
9475 }
9476 else
9477 {
9478 expr = save_expr (expr);
9479 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9480 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9481 }
9482
9483 rpart = save_expr (rpart);
9484 ipart = save_expr (ipart);
9485 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9486 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9487 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9488 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9489 build_zero_cst (itype));
9490 }
9491
9492
9493 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9494 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
9495 true if successful. */
9496
9497 static bool
9498 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
9499 {
9500 unsigned HOST_WIDE_INT i, nunits;
9501
9502 if (TREE_CODE (arg) == VECTOR_CST
9503 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
9504 {
9505 for (i = 0; i < nunits; ++i)
9506 elts[i] = VECTOR_CST_ELT (arg, i);
9507 }
9508 else if (TREE_CODE (arg) == CONSTRUCTOR)
9509 {
9510 constructor_elt *elt;
9511
9512 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9513 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9514 return false;
9515 else
9516 elts[i] = elt->value;
9517 }
9518 else
9519 return false;
9520 for (; i < nelts; i++)
9521 elts[i]
9522 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9523 return true;
9524 }
9525
9526 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9527 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9528 NULL_TREE otherwise. */
9529
9530 tree
9531 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
9532 {
9533 unsigned int i;
9534 unsigned HOST_WIDE_INT nelts;
9535 bool need_ctor = false;
9536
9537 if (!sel.length ().is_constant (&nelts))
9538 return NULL_TREE;
9539 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
9540 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
9541 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
9542 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9543 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9544 return NULL_TREE;
9545
9546 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
9547 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
9548 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
9549 return NULL_TREE;
9550
9551 tree_vector_builder out_elts (type, nelts, 1);
9552 for (i = 0; i < nelts; i++)
9553 {
9554 HOST_WIDE_INT index;
9555 if (!sel[i].is_constant (&index))
9556 return NULL_TREE;
9557 if (!CONSTANT_CLASS_P (in_elts[index]))
9558 need_ctor = true;
9559 out_elts.quick_push (unshare_expr (in_elts[index]));
9560 }
9561
9562 if (need_ctor)
9563 {
9564 vec<constructor_elt, va_gc> *v;
9565 vec_alloc (v, nelts);
9566 for (i = 0; i < nelts; i++)
9567 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
9568 return build_constructor (type, v);
9569 }
9570 else
9571 return out_elts.build ();
9572 }
9573
9574 /* Try to fold a pointer difference of type TYPE two address expressions of
9575 array references AREF0 and AREF1 using location LOC. Return a
9576 simplified expression for the difference or NULL_TREE. */
9577
9578 static tree
9579 fold_addr_of_array_ref_difference (location_t loc, tree type,
9580 tree aref0, tree aref1,
9581 bool use_pointer_diff)
9582 {
9583 tree base0 = TREE_OPERAND (aref0, 0);
9584 tree base1 = TREE_OPERAND (aref1, 0);
9585 tree base_offset = build_int_cst (type, 0);
9586
9587 /* If the bases are array references as well, recurse. If the bases
9588 are pointer indirections compute the difference of the pointers.
9589 If the bases are equal, we are set. */
9590 if ((TREE_CODE (base0) == ARRAY_REF
9591 && TREE_CODE (base1) == ARRAY_REF
9592 && (base_offset
9593 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
9594 use_pointer_diff)))
9595 || (INDIRECT_REF_P (base0)
9596 && INDIRECT_REF_P (base1)
9597 && (base_offset
9598 = use_pointer_diff
9599 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
9600 TREE_OPERAND (base0, 0),
9601 TREE_OPERAND (base1, 0))
9602 : fold_binary_loc (loc, MINUS_EXPR, type,
9603 fold_convert (type,
9604 TREE_OPERAND (base0, 0)),
9605 fold_convert (type,
9606 TREE_OPERAND (base1, 0)))))
9607 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
9608 {
9609 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9610 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9611 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9612 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
9613 return fold_build2_loc (loc, PLUS_EXPR, type,
9614 base_offset,
9615 fold_build2_loc (loc, MULT_EXPR, type,
9616 diff, esz));
9617 }
9618 return NULL_TREE;
9619 }
9620
9621 /* If the real or vector real constant CST of type TYPE has an exact
9622 inverse, return it, else return NULL. */
9623
9624 tree
9625 exact_inverse (tree type, tree cst)
9626 {
9627 REAL_VALUE_TYPE r;
9628 tree unit_type;
9629 machine_mode mode;
9630
9631 switch (TREE_CODE (cst))
9632 {
9633 case REAL_CST:
9634 r = TREE_REAL_CST (cst);
9635
9636 if (exact_real_inverse (TYPE_MODE (type), &r))
9637 return build_real (type, r);
9638
9639 return NULL_TREE;
9640
9641 case VECTOR_CST:
9642 {
9643 unit_type = TREE_TYPE (type);
9644 mode = TYPE_MODE (unit_type);
9645
9646 tree_vector_builder elts;
9647 if (!elts.new_unary_operation (type, cst, false))
9648 return NULL_TREE;
9649 unsigned int count = elts.encoded_nelts ();
9650 for (unsigned int i = 0; i < count; ++i)
9651 {
9652 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9653 if (!exact_real_inverse (mode, &r))
9654 return NULL_TREE;
9655 elts.quick_push (build_real (unit_type, r));
9656 }
9657
9658 return elts.build ();
9659 }
9660
9661 default:
9662 return NULL_TREE;
9663 }
9664 }
9665
9666 /* Mask out the tz least significant bits of X of type TYPE where
9667 tz is the number of trailing zeroes in Y. */
9668 static wide_int
9669 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9670 {
9671 int tz = wi::ctz (y);
9672 if (tz > 0)
9673 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9674 return x;
9675 }
9676
9677 /* Return true when T is an address and is known to be nonzero.
9678 For floating point we further ensure that T is not denormal.
9679 Similar logic is present in nonzero_address in rtlanal.h.
9680
9681 If the return value is based on the assumption that signed overflow
9682 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9683 change *STRICT_OVERFLOW_P. */
9684
9685 static bool
9686 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9687 {
9688 tree type = TREE_TYPE (t);
9689 enum tree_code code;
9690
9691 /* Doing something useful for floating point would need more work. */
9692 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9693 return false;
9694
9695 code = TREE_CODE (t);
9696 switch (TREE_CODE_CLASS (code))
9697 {
9698 case tcc_unary:
9699 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9700 strict_overflow_p);
9701 case tcc_binary:
9702 case tcc_comparison:
9703 return tree_binary_nonzero_warnv_p (code, type,
9704 TREE_OPERAND (t, 0),
9705 TREE_OPERAND (t, 1),
9706 strict_overflow_p);
9707 case tcc_constant:
9708 case tcc_declaration:
9709 case tcc_reference:
9710 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9711
9712 default:
9713 break;
9714 }
9715
9716 switch (code)
9717 {
9718 case TRUTH_NOT_EXPR:
9719 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9720 strict_overflow_p);
9721
9722 case TRUTH_AND_EXPR:
9723 case TRUTH_OR_EXPR:
9724 case TRUTH_XOR_EXPR:
9725 return tree_binary_nonzero_warnv_p (code, type,
9726 TREE_OPERAND (t, 0),
9727 TREE_OPERAND (t, 1),
9728 strict_overflow_p);
9729
9730 case COND_EXPR:
9731 case CONSTRUCTOR:
9732 case OBJ_TYPE_REF:
9733 case ASSERT_EXPR:
9734 case ADDR_EXPR:
9735 case WITH_SIZE_EXPR:
9736 case SSA_NAME:
9737 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9738
9739 case COMPOUND_EXPR:
9740 case MODIFY_EXPR:
9741 case BIND_EXPR:
9742 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9743 strict_overflow_p);
9744
9745 case SAVE_EXPR:
9746 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9747 strict_overflow_p);
9748
9749 case CALL_EXPR:
9750 {
9751 tree fndecl = get_callee_fndecl (t);
9752 if (!fndecl) return false;
9753 if (flag_delete_null_pointer_checks && !flag_check_new
9754 && DECL_IS_OPERATOR_NEW_P (fndecl)
9755 && !TREE_NOTHROW (fndecl))
9756 return true;
9757 if (flag_delete_null_pointer_checks
9758 && lookup_attribute ("returns_nonnull",
9759 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9760 return true;
9761 return alloca_call_p (t);
9762 }
9763
9764 default:
9765 break;
9766 }
9767 return false;
9768 }
9769
9770 /* Return true when T is an address and is known to be nonzero.
9771 Handle warnings about undefined signed overflow. */
9772
9773 bool
9774 tree_expr_nonzero_p (tree t)
9775 {
9776 bool ret, strict_overflow_p;
9777
9778 strict_overflow_p = false;
9779 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9780 if (strict_overflow_p)
9781 fold_overflow_warning (("assuming signed overflow does not occur when "
9782 "determining that expression is always "
9783 "non-zero"),
9784 WARN_STRICT_OVERFLOW_MISC);
9785 return ret;
9786 }
9787
9788 /* Return true if T is known not to be equal to an integer W. */
9789
9790 bool
9791 expr_not_equal_to (tree t, const wide_int &w)
9792 {
9793 wide_int min, max, nz;
9794 value_range_kind rtype;
9795 switch (TREE_CODE (t))
9796 {
9797 case INTEGER_CST:
9798 return wi::to_wide (t) != w;
9799
9800 case SSA_NAME:
9801 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9802 return false;
9803 rtype = get_range_info (t, &min, &max);
9804 if (rtype == VR_RANGE)
9805 {
9806 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9807 return true;
9808 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9809 return true;
9810 }
9811 else if (rtype == VR_ANTI_RANGE
9812 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9813 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9814 return true;
9815 /* If T has some known zero bits and W has any of those bits set,
9816 then T is known not to be equal to W. */
9817 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9818 TYPE_PRECISION (TREE_TYPE (t))), 0))
9819 return true;
9820 return false;
9821
9822 default:
9823 return false;
9824 }
9825 }
9826
9827 /* Fold a binary expression of code CODE and type TYPE with operands
9828 OP0 and OP1. LOC is the location of the resulting expression.
9829 Return the folded expression if folding is successful. Otherwise,
9830 return NULL_TREE. */
9831
9832 tree
9833 fold_binary_loc (location_t loc, enum tree_code code, tree type,
9834 tree op0, tree op1)
9835 {
9836 enum tree_code_class kind = TREE_CODE_CLASS (code);
9837 tree arg0, arg1, tem;
9838 tree t1 = NULL_TREE;
9839 bool strict_overflow_p;
9840 unsigned int prec;
9841
9842 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9843 && TREE_CODE_LENGTH (code) == 2
9844 && op0 != NULL_TREE
9845 && op1 != NULL_TREE);
9846
9847 arg0 = op0;
9848 arg1 = op1;
9849
9850 /* Strip any conversions that don't change the mode. This is
9851 safe for every expression, except for a comparison expression
9852 because its signedness is derived from its operands. So, in
9853 the latter case, only strip conversions that don't change the
9854 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9855 preserved.
9856
9857 Note that this is done as an internal manipulation within the
9858 constant folder, in order to find the simplest representation
9859 of the arguments so that their form can be studied. In any
9860 cases, the appropriate type conversions should be put back in
9861 the tree that will get out of the constant folder. */
9862
9863 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9864 {
9865 STRIP_SIGN_NOPS (arg0);
9866 STRIP_SIGN_NOPS (arg1);
9867 }
9868 else
9869 {
9870 STRIP_NOPS (arg0);
9871 STRIP_NOPS (arg1);
9872 }
9873
9874 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9875 constant but we can't do arithmetic on them. */
9876 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9877 {
9878 tem = const_binop (code, type, arg0, arg1);
9879 if (tem != NULL_TREE)
9880 {
9881 if (TREE_TYPE (tem) != type)
9882 tem = fold_convert_loc (loc, type, tem);
9883 return tem;
9884 }
9885 }
9886
9887 /* If this is a commutative operation, and ARG0 is a constant, move it
9888 to ARG1 to reduce the number of tests below. */
9889 if (commutative_tree_code (code)
9890 && tree_swap_operands_p (arg0, arg1))
9891 return fold_build2_loc (loc, code, type, op1, op0);
9892
9893 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9894 to ARG1 to reduce the number of tests below. */
9895 if (kind == tcc_comparison
9896 && tree_swap_operands_p (arg0, arg1))
9897 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9898
9899 tem = generic_simplify (loc, code, type, op0, op1);
9900 if (tem)
9901 return tem;
9902
9903 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9904
9905 First check for cases where an arithmetic operation is applied to a
9906 compound, conditional, or comparison operation. Push the arithmetic
9907 operation inside the compound or conditional to see if any folding
9908 can then be done. Convert comparison to conditional for this purpose.
9909 The also optimizes non-constant cases that used to be done in
9910 expand_expr.
9911
9912 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9913 one of the operands is a comparison and the other is a comparison, a
9914 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9915 code below would make the expression more complex. Change it to a
9916 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9917 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9918
9919 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9920 || code == EQ_EXPR || code == NE_EXPR)
9921 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
9922 && ((truth_value_p (TREE_CODE (arg0))
9923 && (truth_value_p (TREE_CODE (arg1))
9924 || (TREE_CODE (arg1) == BIT_AND_EXPR
9925 && integer_onep (TREE_OPERAND (arg1, 1)))))
9926 || (truth_value_p (TREE_CODE (arg1))
9927 && (truth_value_p (TREE_CODE (arg0))
9928 || (TREE_CODE (arg0) == BIT_AND_EXPR
9929 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9930 {
9931 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9932 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9933 : TRUTH_XOR_EXPR,
9934 boolean_type_node,
9935 fold_convert_loc (loc, boolean_type_node, arg0),
9936 fold_convert_loc (loc, boolean_type_node, arg1));
9937
9938 if (code == EQ_EXPR)
9939 tem = invert_truthvalue_loc (loc, tem);
9940
9941 return fold_convert_loc (loc, type, tem);
9942 }
9943
9944 if (TREE_CODE_CLASS (code) == tcc_binary
9945 || TREE_CODE_CLASS (code) == tcc_comparison)
9946 {
9947 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9948 {
9949 tem = fold_build2_loc (loc, code, type,
9950 fold_convert_loc (loc, TREE_TYPE (op0),
9951 TREE_OPERAND (arg0, 1)), op1);
9952 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9953 tem);
9954 }
9955 if (TREE_CODE (arg1) == COMPOUND_EXPR)
9956 {
9957 tem = fold_build2_loc (loc, code, type, op0,
9958 fold_convert_loc (loc, TREE_TYPE (op1),
9959 TREE_OPERAND (arg1, 1)));
9960 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9961 tem);
9962 }
9963
9964 if (TREE_CODE (arg0) == COND_EXPR
9965 || TREE_CODE (arg0) == VEC_COND_EXPR
9966 || COMPARISON_CLASS_P (arg0))
9967 {
9968 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9969 arg0, arg1,
9970 /*cond_first_p=*/1);
9971 if (tem != NULL_TREE)
9972 return tem;
9973 }
9974
9975 if (TREE_CODE (arg1) == COND_EXPR
9976 || TREE_CODE (arg1) == VEC_COND_EXPR
9977 || COMPARISON_CLASS_P (arg1))
9978 {
9979 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9980 arg1, arg0,
9981 /*cond_first_p=*/0);
9982 if (tem != NULL_TREE)
9983 return tem;
9984 }
9985 }
9986
9987 switch (code)
9988 {
9989 case MEM_REF:
9990 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9991 if (TREE_CODE (arg0) == ADDR_EXPR
9992 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9993 {
9994 tree iref = TREE_OPERAND (arg0, 0);
9995 return fold_build2 (MEM_REF, type,
9996 TREE_OPERAND (iref, 0),
9997 int_const_binop (PLUS_EXPR, arg1,
9998 TREE_OPERAND (iref, 1)));
9999 }
10000
10001 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10002 if (TREE_CODE (arg0) == ADDR_EXPR
10003 && handled_component_p (TREE_OPERAND (arg0, 0)))
10004 {
10005 tree base;
10006 poly_int64 coffset;
10007 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10008 &coffset);
10009 if (!base)
10010 return NULL_TREE;
10011 return fold_build2 (MEM_REF, type,
10012 build_fold_addr_expr (base),
10013 int_const_binop (PLUS_EXPR, arg1,
10014 size_int (coffset)));
10015 }
10016
10017 return NULL_TREE;
10018
10019 case POINTER_PLUS_EXPR:
10020 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10021 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10022 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10023 return fold_convert_loc (loc, type,
10024 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10025 fold_convert_loc (loc, sizetype,
10026 arg1),
10027 fold_convert_loc (loc, sizetype,
10028 arg0)));
10029
10030 return NULL_TREE;
10031
10032 case PLUS_EXPR:
10033 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10034 {
10035 /* X + (X / CST) * -CST is X % CST. */
10036 if (TREE_CODE (arg1) == MULT_EXPR
10037 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10038 && operand_equal_p (arg0,
10039 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10040 {
10041 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10042 tree cst1 = TREE_OPERAND (arg1, 1);
10043 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10044 cst1, cst0);
10045 if (sum && integer_zerop (sum))
10046 return fold_convert_loc (loc, type,
10047 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10048 TREE_TYPE (arg0), arg0,
10049 cst0));
10050 }
10051 }
10052
10053 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10054 one. Make sure the type is not saturating and has the signedness of
10055 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10056 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10057 if ((TREE_CODE (arg0) == MULT_EXPR
10058 || TREE_CODE (arg1) == MULT_EXPR)
10059 && !TYPE_SATURATING (type)
10060 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10061 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10062 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10063 {
10064 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10065 if (tem)
10066 return tem;
10067 }
10068
10069 if (! FLOAT_TYPE_P (type))
10070 {
10071 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10072 (plus (plus (mult) (mult)) (foo)) so that we can
10073 take advantage of the factoring cases below. */
10074 if (ANY_INTEGRAL_TYPE_P (type)
10075 && TYPE_OVERFLOW_WRAPS (type)
10076 && (((TREE_CODE (arg0) == PLUS_EXPR
10077 || TREE_CODE (arg0) == MINUS_EXPR)
10078 && TREE_CODE (arg1) == MULT_EXPR)
10079 || ((TREE_CODE (arg1) == PLUS_EXPR
10080 || TREE_CODE (arg1) == MINUS_EXPR)
10081 && TREE_CODE (arg0) == MULT_EXPR)))
10082 {
10083 tree parg0, parg1, parg, marg;
10084 enum tree_code pcode;
10085
10086 if (TREE_CODE (arg1) == MULT_EXPR)
10087 parg = arg0, marg = arg1;
10088 else
10089 parg = arg1, marg = arg0;
10090 pcode = TREE_CODE (parg);
10091 parg0 = TREE_OPERAND (parg, 0);
10092 parg1 = TREE_OPERAND (parg, 1);
10093 STRIP_NOPS (parg0);
10094 STRIP_NOPS (parg1);
10095
10096 if (TREE_CODE (parg0) == MULT_EXPR
10097 && TREE_CODE (parg1) != MULT_EXPR)
10098 return fold_build2_loc (loc, pcode, type,
10099 fold_build2_loc (loc, PLUS_EXPR, type,
10100 fold_convert_loc (loc, type,
10101 parg0),
10102 fold_convert_loc (loc, type,
10103 marg)),
10104 fold_convert_loc (loc, type, parg1));
10105 if (TREE_CODE (parg0) != MULT_EXPR
10106 && TREE_CODE (parg1) == MULT_EXPR)
10107 return
10108 fold_build2_loc (loc, PLUS_EXPR, type,
10109 fold_convert_loc (loc, type, parg0),
10110 fold_build2_loc (loc, pcode, type,
10111 fold_convert_loc (loc, type, marg),
10112 fold_convert_loc (loc, type,
10113 parg1)));
10114 }
10115 }
10116 else
10117 {
10118 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10119 to __complex__ ( x, y ). This is not the same for SNaNs or
10120 if signed zeros are involved. */
10121 if (!HONOR_SNANS (element_mode (arg0))
10122 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10123 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10124 {
10125 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10126 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10127 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10128 bool arg0rz = false, arg0iz = false;
10129 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10130 || (arg0i && (arg0iz = real_zerop (arg0i))))
10131 {
10132 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10133 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10134 if (arg0rz && arg1i && real_zerop (arg1i))
10135 {
10136 tree rp = arg1r ? arg1r
10137 : build1 (REALPART_EXPR, rtype, arg1);
10138 tree ip = arg0i ? arg0i
10139 : build1 (IMAGPART_EXPR, rtype, arg0);
10140 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10141 }
10142 else if (arg0iz && arg1r && real_zerop (arg1r))
10143 {
10144 tree rp = arg0r ? arg0r
10145 : build1 (REALPART_EXPR, rtype, arg0);
10146 tree ip = arg1i ? arg1i
10147 : build1 (IMAGPART_EXPR, rtype, arg1);
10148 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10149 }
10150 }
10151 }
10152
10153 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10154 We associate floats only if the user has specified
10155 -fassociative-math. */
10156 if (flag_associative_math
10157 && TREE_CODE (arg1) == PLUS_EXPR
10158 && TREE_CODE (arg0) != MULT_EXPR)
10159 {
10160 tree tree10 = TREE_OPERAND (arg1, 0);
10161 tree tree11 = TREE_OPERAND (arg1, 1);
10162 if (TREE_CODE (tree11) == MULT_EXPR
10163 && TREE_CODE (tree10) == MULT_EXPR)
10164 {
10165 tree tree0;
10166 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10167 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10168 }
10169 }
10170 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10171 We associate floats only if the user has specified
10172 -fassociative-math. */
10173 if (flag_associative_math
10174 && TREE_CODE (arg0) == PLUS_EXPR
10175 && TREE_CODE (arg1) != MULT_EXPR)
10176 {
10177 tree tree00 = TREE_OPERAND (arg0, 0);
10178 tree tree01 = TREE_OPERAND (arg0, 1);
10179 if (TREE_CODE (tree01) == MULT_EXPR
10180 && TREE_CODE (tree00) == MULT_EXPR)
10181 {
10182 tree tree0;
10183 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10184 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10185 }
10186 }
10187 }
10188
10189 bit_rotate:
10190 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10191 is a rotate of A by C1 bits. */
10192 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10193 is a rotate of A by B bits.
10194 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
10195 though in this case CODE must be | and not + or ^, otherwise
10196 it doesn't return A when B is 0. */
10197 {
10198 enum tree_code code0, code1;
10199 tree rtype;
10200 code0 = TREE_CODE (arg0);
10201 code1 = TREE_CODE (arg1);
10202 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10203 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10204 && operand_equal_p (TREE_OPERAND (arg0, 0),
10205 TREE_OPERAND (arg1, 0), 0)
10206 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10207 TYPE_UNSIGNED (rtype))
10208 /* Only create rotates in complete modes. Other cases are not
10209 expanded properly. */
10210 && (element_precision (rtype)
10211 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
10212 {
10213 tree tree01, tree11;
10214 tree orig_tree01, orig_tree11;
10215 enum tree_code code01, code11;
10216
10217 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
10218 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
10219 STRIP_NOPS (tree01);
10220 STRIP_NOPS (tree11);
10221 code01 = TREE_CODE (tree01);
10222 code11 = TREE_CODE (tree11);
10223 if (code11 != MINUS_EXPR
10224 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
10225 {
10226 std::swap (code0, code1);
10227 std::swap (code01, code11);
10228 std::swap (tree01, tree11);
10229 std::swap (orig_tree01, orig_tree11);
10230 }
10231 if (code01 == INTEGER_CST
10232 && code11 == INTEGER_CST
10233 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10234 == element_precision (rtype)))
10235 {
10236 tem = build2_loc (loc, LROTATE_EXPR,
10237 rtype, TREE_OPERAND (arg0, 0),
10238 code0 == LSHIFT_EXPR
10239 ? orig_tree01 : orig_tree11);
10240 return fold_convert_loc (loc, type, tem);
10241 }
10242 else if (code11 == MINUS_EXPR)
10243 {
10244 tree tree110, tree111;
10245 tree110 = TREE_OPERAND (tree11, 0);
10246 tree111 = TREE_OPERAND (tree11, 1);
10247 STRIP_NOPS (tree110);
10248 STRIP_NOPS (tree111);
10249 if (TREE_CODE (tree110) == INTEGER_CST
10250 && compare_tree_int (tree110,
10251 element_precision (rtype)) == 0
10252 && operand_equal_p (tree01, tree111, 0))
10253 {
10254 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
10255 ? LROTATE_EXPR : RROTATE_EXPR),
10256 rtype, TREE_OPERAND (arg0, 0),
10257 orig_tree01);
10258 return fold_convert_loc (loc, type, tem);
10259 }
10260 }
10261 else if (code == BIT_IOR_EXPR
10262 && code11 == BIT_AND_EXPR
10263 && pow2p_hwi (element_precision (rtype)))
10264 {
10265 tree tree110, tree111;
10266 tree110 = TREE_OPERAND (tree11, 0);
10267 tree111 = TREE_OPERAND (tree11, 1);
10268 STRIP_NOPS (tree110);
10269 STRIP_NOPS (tree111);
10270 if (TREE_CODE (tree110) == NEGATE_EXPR
10271 && TREE_CODE (tree111) == INTEGER_CST
10272 && compare_tree_int (tree111,
10273 element_precision (rtype) - 1) == 0
10274 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
10275 {
10276 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
10277 ? LROTATE_EXPR : RROTATE_EXPR),
10278 rtype, TREE_OPERAND (arg0, 0),
10279 orig_tree01);
10280 return fold_convert_loc (loc, type, tem);
10281 }
10282 }
10283 }
10284 }
10285
10286 associate:
10287 /* In most languages, can't associate operations on floats through
10288 parentheses. Rather than remember where the parentheses were, we
10289 don't associate floats at all, unless the user has specified
10290 -fassociative-math.
10291 And, we need to make sure type is not saturating. */
10292
10293 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10294 && !TYPE_SATURATING (type))
10295 {
10296 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
10297 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
10298 tree atype = type;
10299 bool ok = true;
10300
10301 /* Split both trees into variables, constants, and literals. Then
10302 associate each group together, the constants with literals,
10303 then the result with variables. This increases the chances of
10304 literals being recombined later and of generating relocatable
10305 expressions for the sum of a constant and literal. */
10306 var0 = split_tree (arg0, type, code,
10307 &minus_var0, &con0, &minus_con0,
10308 &lit0, &minus_lit0, 0);
10309 var1 = split_tree (arg1, type, code,
10310 &minus_var1, &con1, &minus_con1,
10311 &lit1, &minus_lit1, code == MINUS_EXPR);
10312
10313 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10314 if (code == MINUS_EXPR)
10315 code = PLUS_EXPR;
10316
10317 /* With undefined overflow prefer doing association in a type
10318 which wraps on overflow, if that is one of the operand types. */
10319 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
10320 && !TYPE_OVERFLOW_WRAPS (type))
10321 {
10322 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10323 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10324 atype = TREE_TYPE (arg0);
10325 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10326 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10327 atype = TREE_TYPE (arg1);
10328 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10329 }
10330
10331 /* With undefined overflow we can only associate constants with one
10332 variable, and constants whose association doesn't overflow. */
10333 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
10334 && !TYPE_OVERFLOW_WRAPS (atype))
10335 {
10336 if ((var0 && var1) || (minus_var0 && minus_var1))
10337 {
10338 /* ??? If split_tree would handle NEGATE_EXPR we could
10339 simply reject these cases and the allowed cases would
10340 be the var0/minus_var1 ones. */
10341 tree tmp0 = var0 ? var0 : minus_var0;
10342 tree tmp1 = var1 ? var1 : minus_var1;
10343 bool one_neg = false;
10344
10345 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10346 {
10347 tmp0 = TREE_OPERAND (tmp0, 0);
10348 one_neg = !one_neg;
10349 }
10350 if (CONVERT_EXPR_P (tmp0)
10351 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10352 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10353 <= TYPE_PRECISION (atype)))
10354 tmp0 = TREE_OPERAND (tmp0, 0);
10355 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10356 {
10357 tmp1 = TREE_OPERAND (tmp1, 0);
10358 one_neg = !one_neg;
10359 }
10360 if (CONVERT_EXPR_P (tmp1)
10361 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10362 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10363 <= TYPE_PRECISION (atype)))
10364 tmp1 = TREE_OPERAND (tmp1, 0);
10365 /* The only case we can still associate with two variables
10366 is if they cancel out. */
10367 if (!one_neg
10368 || !operand_equal_p (tmp0, tmp1, 0))
10369 ok = false;
10370 }
10371 else if ((var0 && minus_var1
10372 && ! operand_equal_p (var0, minus_var1, 0))
10373 || (minus_var0 && var1
10374 && ! operand_equal_p (minus_var0, var1, 0)))
10375 ok = false;
10376 }
10377
10378 /* Only do something if we found more than two objects. Otherwise,
10379 nothing has changed and we risk infinite recursion. */
10380 if (ok
10381 && ((var0 != 0) + (var1 != 0)
10382 + (minus_var0 != 0) + (minus_var1 != 0)
10383 + (con0 != 0) + (con1 != 0)
10384 + (minus_con0 != 0) + (minus_con1 != 0)
10385 + (lit0 != 0) + (lit1 != 0)
10386 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
10387 {
10388 var0 = associate_trees (loc, var0, var1, code, atype);
10389 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
10390 code, atype);
10391 con0 = associate_trees (loc, con0, con1, code, atype);
10392 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
10393 code, atype);
10394 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10395 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10396 code, atype);
10397
10398 if (minus_var0 && var0)
10399 {
10400 var0 = associate_trees (loc, var0, minus_var0,
10401 MINUS_EXPR, atype);
10402 minus_var0 = 0;
10403 }
10404 if (minus_con0 && con0)
10405 {
10406 con0 = associate_trees (loc, con0, minus_con0,
10407 MINUS_EXPR, atype);
10408 minus_con0 = 0;
10409 }
10410
10411 /* Preserve the MINUS_EXPR if the negative part of the literal is
10412 greater than the positive part. Otherwise, the multiplicative
10413 folding code (i.e extract_muldiv) may be fooled in case
10414 unsigned constants are subtracted, like in the following
10415 example: ((X*2 + 4) - 8U)/2. */
10416 if (minus_lit0 && lit0)
10417 {
10418 if (TREE_CODE (lit0) == INTEGER_CST
10419 && TREE_CODE (minus_lit0) == INTEGER_CST
10420 && tree_int_cst_lt (lit0, minus_lit0)
10421 /* But avoid ending up with only negated parts. */
10422 && (var0 || con0))
10423 {
10424 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10425 MINUS_EXPR, atype);
10426 lit0 = 0;
10427 }
10428 else
10429 {
10430 lit0 = associate_trees (loc, lit0, minus_lit0,
10431 MINUS_EXPR, atype);
10432 minus_lit0 = 0;
10433 }
10434 }
10435
10436 /* Don't introduce overflows through reassociation. */
10437 if ((lit0 && TREE_OVERFLOW_P (lit0))
10438 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
10439 return NULL_TREE;
10440
10441 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
10442 con0 = associate_trees (loc, con0, lit0, code, atype);
10443 lit0 = 0;
10444 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
10445 code, atype);
10446 minus_lit0 = 0;
10447
10448 /* Eliminate minus_con0. */
10449 if (minus_con0)
10450 {
10451 if (con0)
10452 con0 = associate_trees (loc, con0, minus_con0,
10453 MINUS_EXPR, atype);
10454 else if (var0)
10455 var0 = associate_trees (loc, var0, minus_con0,
10456 MINUS_EXPR, atype);
10457 else
10458 gcc_unreachable ();
10459 minus_con0 = 0;
10460 }
10461
10462 /* Eliminate minus_var0. */
10463 if (minus_var0)
10464 {
10465 if (con0)
10466 con0 = associate_trees (loc, con0, minus_var0,
10467 MINUS_EXPR, atype);
10468 else
10469 gcc_unreachable ();
10470 minus_var0 = 0;
10471 }
10472
10473 return
10474 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10475 code, atype));
10476 }
10477 }
10478
10479 return NULL_TREE;
10480
10481 case POINTER_DIFF_EXPR:
10482 case MINUS_EXPR:
10483 /* Fold &a[i] - &a[j] to i-j. */
10484 if (TREE_CODE (arg0) == ADDR_EXPR
10485 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10486 && TREE_CODE (arg1) == ADDR_EXPR
10487 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10488 {
10489 tree tem = fold_addr_of_array_ref_difference (loc, type,
10490 TREE_OPERAND (arg0, 0),
10491 TREE_OPERAND (arg1, 0),
10492 code
10493 == POINTER_DIFF_EXPR);
10494 if (tem)
10495 return tem;
10496 }
10497
10498 /* Further transformations are not for pointers. */
10499 if (code == POINTER_DIFF_EXPR)
10500 return NULL_TREE;
10501
10502 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10503 if (TREE_CODE (arg0) == NEGATE_EXPR
10504 && negate_expr_p (op1)
10505 /* If arg0 is e.g. unsigned int and type is int, then this could
10506 introduce UB, because if A is INT_MIN at runtime, the original
10507 expression can be well defined while the latter is not.
10508 See PR83269. */
10509 && !(ANY_INTEGRAL_TYPE_P (type)
10510 && TYPE_OVERFLOW_UNDEFINED (type)
10511 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10512 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10513 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
10514 fold_convert_loc (loc, type,
10515 TREE_OPERAND (arg0, 0)));
10516
10517 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10518 __complex__ ( x, -y ). This is not the same for SNaNs or if
10519 signed zeros are involved. */
10520 if (!HONOR_SNANS (element_mode (arg0))
10521 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10522 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10523 {
10524 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10525 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10526 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10527 bool arg0rz = false, arg0iz = false;
10528 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10529 || (arg0i && (arg0iz = real_zerop (arg0i))))
10530 {
10531 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10532 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10533 if (arg0rz && arg1i && real_zerop (arg1i))
10534 {
10535 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10536 arg1r ? arg1r
10537 : build1 (REALPART_EXPR, rtype, arg1));
10538 tree ip = arg0i ? arg0i
10539 : build1 (IMAGPART_EXPR, rtype, arg0);
10540 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10541 }
10542 else if (arg0iz && arg1r && real_zerop (arg1r))
10543 {
10544 tree rp = arg0r ? arg0r
10545 : build1 (REALPART_EXPR, rtype, arg0);
10546 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10547 arg1i ? arg1i
10548 : build1 (IMAGPART_EXPR, rtype, arg1));
10549 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10550 }
10551 }
10552 }
10553
10554 /* A - B -> A + (-B) if B is easily negatable. */
10555 if (negate_expr_p (op1)
10556 && ! TYPE_OVERFLOW_SANITIZED (type)
10557 && ((FLOAT_TYPE_P (type)
10558 /* Avoid this transformation if B is a positive REAL_CST. */
10559 && (TREE_CODE (op1) != REAL_CST
10560 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
10561 || INTEGRAL_TYPE_P (type)))
10562 return fold_build2_loc (loc, PLUS_EXPR, type,
10563 fold_convert_loc (loc, type, arg0),
10564 negate_expr (op1));
10565
10566 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10567 one. Make sure the type is not saturating and has the signedness of
10568 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10569 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10570 if ((TREE_CODE (arg0) == MULT_EXPR
10571 || TREE_CODE (arg1) == MULT_EXPR)
10572 && !TYPE_SATURATING (type)
10573 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10574 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10575 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10576 {
10577 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10578 if (tem)
10579 return tem;
10580 }
10581
10582 goto associate;
10583
10584 case MULT_EXPR:
10585 if (! FLOAT_TYPE_P (type))
10586 {
10587 /* Transform x * -C into -x * C if x is easily negatable. */
10588 if (TREE_CODE (op1) == INTEGER_CST
10589 && tree_int_cst_sgn (op1) == -1
10590 && negate_expr_p (op0)
10591 && negate_expr_p (op1)
10592 && (tem = negate_expr (op1)) != op1
10593 && ! TREE_OVERFLOW (tem))
10594 return fold_build2_loc (loc, MULT_EXPR, type,
10595 fold_convert_loc (loc, type,
10596 negate_expr (op0)), tem);
10597
10598 strict_overflow_p = false;
10599 if (TREE_CODE (arg1) == INTEGER_CST
10600 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10601 &strict_overflow_p)) != 0)
10602 {
10603 if (strict_overflow_p)
10604 fold_overflow_warning (("assuming signed overflow does not "
10605 "occur when simplifying "
10606 "multiplication"),
10607 WARN_STRICT_OVERFLOW_MISC);
10608 return fold_convert_loc (loc, type, tem);
10609 }
10610
10611 /* Optimize z * conj(z) for integer complex numbers. */
10612 if (TREE_CODE (arg0) == CONJ_EXPR
10613 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10614 return fold_mult_zconjz (loc, type, arg1);
10615 if (TREE_CODE (arg1) == CONJ_EXPR
10616 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10617 return fold_mult_zconjz (loc, type, arg0);
10618 }
10619 else
10620 {
10621 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10622 This is not the same for NaNs or if signed zeros are
10623 involved. */
10624 if (!HONOR_NANS (arg0)
10625 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10626 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10627 && TREE_CODE (arg1) == COMPLEX_CST
10628 && real_zerop (TREE_REALPART (arg1)))
10629 {
10630 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10631 if (real_onep (TREE_IMAGPART (arg1)))
10632 return
10633 fold_build2_loc (loc, COMPLEX_EXPR, type,
10634 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10635 rtype, arg0)),
10636 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10637 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10638 return
10639 fold_build2_loc (loc, COMPLEX_EXPR, type,
10640 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10641 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10642 rtype, arg0)));
10643 }
10644
10645 /* Optimize z * conj(z) for floating point complex numbers.
10646 Guarded by flag_unsafe_math_optimizations as non-finite
10647 imaginary components don't produce scalar results. */
10648 if (flag_unsafe_math_optimizations
10649 && TREE_CODE (arg0) == CONJ_EXPR
10650 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10651 return fold_mult_zconjz (loc, type, arg1);
10652 if (flag_unsafe_math_optimizations
10653 && TREE_CODE (arg1) == CONJ_EXPR
10654 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10655 return fold_mult_zconjz (loc, type, arg0);
10656 }
10657 goto associate;
10658
10659 case BIT_IOR_EXPR:
10660 /* Canonicalize (X & C1) | C2. */
10661 if (TREE_CODE (arg0) == BIT_AND_EXPR
10662 && TREE_CODE (arg1) == INTEGER_CST
10663 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10664 {
10665 int width = TYPE_PRECISION (type), w;
10666 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
10667 wide_int c2 = wi::to_wide (arg1);
10668
10669 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10670 if ((c1 & c2) == c1)
10671 return omit_one_operand_loc (loc, type, arg1,
10672 TREE_OPERAND (arg0, 0));
10673
10674 wide_int msk = wi::mask (width, false,
10675 TYPE_PRECISION (TREE_TYPE (arg1)));
10676
10677 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10678 if (wi::bit_and_not (msk, c1 | c2) == 0)
10679 {
10680 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10681 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10682 }
10683
10684 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10685 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10686 mode which allows further optimizations. */
10687 c1 &= msk;
10688 c2 &= msk;
10689 wide_int c3 = wi::bit_and_not (c1, c2);
10690 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10691 {
10692 wide_int mask = wi::mask (w, false,
10693 TYPE_PRECISION (type));
10694 if (((c1 | c2) & mask) == mask
10695 && wi::bit_and_not (c1, mask) == 0)
10696 {
10697 c3 = mask;
10698 break;
10699 }
10700 }
10701
10702 if (c3 != c1)
10703 {
10704 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10705 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
10706 wide_int_to_tree (type, c3));
10707 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10708 }
10709 }
10710
10711 /* See if this can be simplified into a rotate first. If that
10712 is unsuccessful continue in the association code. */
10713 goto bit_rotate;
10714
10715 case BIT_XOR_EXPR:
10716 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10717 if (TREE_CODE (arg0) == BIT_AND_EXPR
10718 && INTEGRAL_TYPE_P (type)
10719 && integer_onep (TREE_OPERAND (arg0, 1))
10720 && integer_onep (arg1))
10721 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10722 build_zero_cst (TREE_TYPE (arg0)));
10723
10724 /* See if this can be simplified into a rotate first. If that
10725 is unsuccessful continue in the association code. */
10726 goto bit_rotate;
10727
10728 case BIT_AND_EXPR:
10729 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10730 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10731 && INTEGRAL_TYPE_P (type)
10732 && integer_onep (TREE_OPERAND (arg0, 1))
10733 && integer_onep (arg1))
10734 {
10735 tree tem2;
10736 tem = TREE_OPERAND (arg0, 0);
10737 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10738 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10739 tem, tem2);
10740 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10741 build_zero_cst (TREE_TYPE (tem)));
10742 }
10743 /* Fold ~X & 1 as (X & 1) == 0. */
10744 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10745 && INTEGRAL_TYPE_P (type)
10746 && integer_onep (arg1))
10747 {
10748 tree tem2;
10749 tem = TREE_OPERAND (arg0, 0);
10750 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10751 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10752 tem, tem2);
10753 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10754 build_zero_cst (TREE_TYPE (tem)));
10755 }
10756 /* Fold !X & 1 as X == 0. */
10757 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10758 && integer_onep (arg1))
10759 {
10760 tem = TREE_OPERAND (arg0, 0);
10761 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10762 build_zero_cst (TREE_TYPE (tem)));
10763 }
10764
10765 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10766 multiple of 1 << CST. */
10767 if (TREE_CODE (arg1) == INTEGER_CST)
10768 {
10769 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10770 wide_int ncst1 = -cst1;
10771 if ((cst1 & ncst1) == ncst1
10772 && multiple_of_p (type, arg0,
10773 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10774 return fold_convert_loc (loc, type, arg0);
10775 }
10776
10777 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10778 bits from CST2. */
10779 if (TREE_CODE (arg1) == INTEGER_CST
10780 && TREE_CODE (arg0) == MULT_EXPR
10781 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10782 {
10783 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
10784 wide_int masked
10785 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
10786
10787 if (masked == 0)
10788 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10789 arg0, arg1);
10790 else if (masked != warg1)
10791 {
10792 /* Avoid the transform if arg1 is a mask of some
10793 mode which allows further optimizations. */
10794 int pop = wi::popcount (warg1);
10795 if (!(pop >= BITS_PER_UNIT
10796 && pow2p_hwi (pop)
10797 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10798 return fold_build2_loc (loc, code, type, op0,
10799 wide_int_to_tree (type, masked));
10800 }
10801 }
10802
10803 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10804 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10805 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10806 {
10807 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10808
10809 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
10810 if (mask == -1)
10811 return
10812 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10813 }
10814
10815 goto associate;
10816
10817 case RDIV_EXPR:
10818 /* Don't touch a floating-point divide by zero unless the mode
10819 of the constant can represent infinity. */
10820 if (TREE_CODE (arg1) == REAL_CST
10821 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10822 && real_zerop (arg1))
10823 return NULL_TREE;
10824
10825 /* (-A) / (-B) -> A / B */
10826 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10827 return fold_build2_loc (loc, RDIV_EXPR, type,
10828 TREE_OPERAND (arg0, 0),
10829 negate_expr (arg1));
10830 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10831 return fold_build2_loc (loc, RDIV_EXPR, type,
10832 negate_expr (arg0),
10833 TREE_OPERAND (arg1, 0));
10834 return NULL_TREE;
10835
10836 case TRUNC_DIV_EXPR:
10837 /* Fall through */
10838
10839 case FLOOR_DIV_EXPR:
10840 /* Simplify A / (B << N) where A and B are positive and B is
10841 a power of 2, to A >> (N + log2(B)). */
10842 strict_overflow_p = false;
10843 if (TREE_CODE (arg1) == LSHIFT_EXPR
10844 && (TYPE_UNSIGNED (type)
10845 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10846 {
10847 tree sval = TREE_OPERAND (arg1, 0);
10848 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10849 {
10850 tree sh_cnt = TREE_OPERAND (arg1, 1);
10851 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10852 wi::exact_log2 (wi::to_wide (sval)));
10853
10854 if (strict_overflow_p)
10855 fold_overflow_warning (("assuming signed overflow does not "
10856 "occur when simplifying A / (B << N)"),
10857 WARN_STRICT_OVERFLOW_MISC);
10858
10859 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10860 sh_cnt, pow2);
10861 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10862 fold_convert_loc (loc, type, arg0), sh_cnt);
10863 }
10864 }
10865
10866 /* Fall through */
10867
10868 case ROUND_DIV_EXPR:
10869 case CEIL_DIV_EXPR:
10870 case EXACT_DIV_EXPR:
10871 if (integer_zerop (arg1))
10872 return NULL_TREE;
10873
10874 /* Convert -A / -B to A / B when the type is signed and overflow is
10875 undefined. */
10876 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10877 && TREE_CODE (op0) == NEGATE_EXPR
10878 && negate_expr_p (op1))
10879 {
10880 if (INTEGRAL_TYPE_P (type))
10881 fold_overflow_warning (("assuming signed overflow does not occur "
10882 "when distributing negation across "
10883 "division"),
10884 WARN_STRICT_OVERFLOW_MISC);
10885 return fold_build2_loc (loc, code, type,
10886 fold_convert_loc (loc, type,
10887 TREE_OPERAND (arg0, 0)),
10888 negate_expr (op1));
10889 }
10890 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10891 && TREE_CODE (arg1) == NEGATE_EXPR
10892 && negate_expr_p (op0))
10893 {
10894 if (INTEGRAL_TYPE_P (type))
10895 fold_overflow_warning (("assuming signed overflow does not occur "
10896 "when distributing negation across "
10897 "division"),
10898 WARN_STRICT_OVERFLOW_MISC);
10899 return fold_build2_loc (loc, code, type,
10900 negate_expr (op0),
10901 fold_convert_loc (loc, type,
10902 TREE_OPERAND (arg1, 0)));
10903 }
10904
10905 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10906 operation, EXACT_DIV_EXPR.
10907
10908 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10909 At one time others generated faster code, it's not clear if they do
10910 after the last round to changes to the DIV code in expmed.c. */
10911 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10912 && multiple_of_p (type, arg0, arg1))
10913 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10914 fold_convert (type, arg0),
10915 fold_convert (type, arg1));
10916
10917 strict_overflow_p = false;
10918 if (TREE_CODE (arg1) == INTEGER_CST
10919 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10920 &strict_overflow_p)) != 0)
10921 {
10922 if (strict_overflow_p)
10923 fold_overflow_warning (("assuming signed overflow does not occur "
10924 "when simplifying division"),
10925 WARN_STRICT_OVERFLOW_MISC);
10926 return fold_convert_loc (loc, type, tem);
10927 }
10928
10929 return NULL_TREE;
10930
10931 case CEIL_MOD_EXPR:
10932 case FLOOR_MOD_EXPR:
10933 case ROUND_MOD_EXPR:
10934 case TRUNC_MOD_EXPR:
10935 strict_overflow_p = false;
10936 if (TREE_CODE (arg1) == INTEGER_CST
10937 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10938 &strict_overflow_p)) != 0)
10939 {
10940 if (strict_overflow_p)
10941 fold_overflow_warning (("assuming signed overflow does not occur "
10942 "when simplifying modulus"),
10943 WARN_STRICT_OVERFLOW_MISC);
10944 return fold_convert_loc (loc, type, tem);
10945 }
10946
10947 return NULL_TREE;
10948
10949 case LROTATE_EXPR:
10950 case RROTATE_EXPR:
10951 case RSHIFT_EXPR:
10952 case LSHIFT_EXPR:
10953 /* Since negative shift count is not well-defined,
10954 don't try to compute it in the compiler. */
10955 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10956 return NULL_TREE;
10957
10958 prec = element_precision (type);
10959
10960 /* If we have a rotate of a bit operation with the rotate count and
10961 the second operand of the bit operation both constant,
10962 permute the two operations. */
10963 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10964 && (TREE_CODE (arg0) == BIT_AND_EXPR
10965 || TREE_CODE (arg0) == BIT_IOR_EXPR
10966 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10967 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10968 {
10969 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10970 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10971 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10972 fold_build2_loc (loc, code, type,
10973 arg00, arg1),
10974 fold_build2_loc (loc, code, type,
10975 arg01, arg1));
10976 }
10977
10978 /* Two consecutive rotates adding up to the some integer
10979 multiple of the precision of the type can be ignored. */
10980 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10981 && TREE_CODE (arg0) == RROTATE_EXPR
10982 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10983 && wi::umod_trunc (wi::to_wide (arg1)
10984 + wi::to_wide (TREE_OPERAND (arg0, 1)),
10985 prec) == 0)
10986 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10987
10988 return NULL_TREE;
10989
10990 case MIN_EXPR:
10991 case MAX_EXPR:
10992 goto associate;
10993
10994 case TRUTH_ANDIF_EXPR:
10995 /* Note that the operands of this must be ints
10996 and their values must be 0 or 1.
10997 ("true" is a fixed value perhaps depending on the language.) */
10998 /* If first arg is constant zero, return it. */
10999 if (integer_zerop (arg0))
11000 return fold_convert_loc (loc, type, arg0);
11001 /* FALLTHRU */
11002 case TRUTH_AND_EXPR:
11003 /* If either arg is constant true, drop it. */
11004 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11005 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11006 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11007 /* Preserve sequence points. */
11008 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11009 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11010 /* If second arg is constant zero, result is zero, but first arg
11011 must be evaluated. */
11012 if (integer_zerop (arg1))
11013 return omit_one_operand_loc (loc, type, arg1, arg0);
11014 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11015 case will be handled here. */
11016 if (integer_zerop (arg0))
11017 return omit_one_operand_loc (loc, type, arg0, arg1);
11018
11019 /* !X && X is always false. */
11020 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11021 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11022 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11023 /* X && !X is always false. */
11024 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11025 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11026 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11027
11028 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11029 means A >= Y && A != MAX, but in this case we know that
11030 A < X <= MAX. */
11031
11032 if (!TREE_SIDE_EFFECTS (arg0)
11033 && !TREE_SIDE_EFFECTS (arg1))
11034 {
11035 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11036 if (tem && !operand_equal_p (tem, arg0, 0))
11037 return fold_build2_loc (loc, code, type, tem, arg1);
11038
11039 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11040 if (tem && !operand_equal_p (tem, arg1, 0))
11041 return fold_build2_loc (loc, code, type, arg0, tem);
11042 }
11043
11044 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11045 != NULL_TREE)
11046 return tem;
11047
11048 return NULL_TREE;
11049
11050 case TRUTH_ORIF_EXPR:
11051 /* Note that the operands of this must be ints
11052 and their values must be 0 or true.
11053 ("true" is a fixed value perhaps depending on the language.) */
11054 /* If first arg is constant true, return it. */
11055 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11056 return fold_convert_loc (loc, type, arg0);
11057 /* FALLTHRU */
11058 case TRUTH_OR_EXPR:
11059 /* If either arg is constant zero, drop it. */
11060 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11061 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11062 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11063 /* Preserve sequence points. */
11064 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11065 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11066 /* If second arg is constant true, result is true, but we must
11067 evaluate first arg. */
11068 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11069 return omit_one_operand_loc (loc, type, arg1, arg0);
11070 /* Likewise for first arg, but note this only occurs here for
11071 TRUTH_OR_EXPR. */
11072 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11073 return omit_one_operand_loc (loc, type, arg0, arg1);
11074
11075 /* !X || X is always true. */
11076 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11077 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11078 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11079 /* X || !X is always true. */
11080 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11081 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11082 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11083
11084 /* (X && !Y) || (!X && Y) is X ^ Y */
11085 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
11086 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
11087 {
11088 tree a0, a1, l0, l1, n0, n1;
11089
11090 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11091 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11092
11093 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11094 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11095
11096 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
11097 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
11098
11099 if ((operand_equal_p (n0, a0, 0)
11100 && operand_equal_p (n1, a1, 0))
11101 || (operand_equal_p (n0, a1, 0)
11102 && operand_equal_p (n1, a0, 0)))
11103 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
11104 }
11105
11106 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11107 != NULL_TREE)
11108 return tem;
11109
11110 return NULL_TREE;
11111
11112 case TRUTH_XOR_EXPR:
11113 /* If the second arg is constant zero, drop it. */
11114 if (integer_zerop (arg1))
11115 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11116 /* If the second arg is constant true, this is a logical inversion. */
11117 if (integer_onep (arg1))
11118 {
11119 tem = invert_truthvalue_loc (loc, arg0);
11120 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
11121 }
11122 /* Identical arguments cancel to zero. */
11123 if (operand_equal_p (arg0, arg1, 0))
11124 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11125
11126 /* !X ^ X is always true. */
11127 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11128 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11129 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11130
11131 /* X ^ !X is always true. */
11132 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11133 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11134 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11135
11136 return NULL_TREE;
11137
11138 case EQ_EXPR:
11139 case NE_EXPR:
11140 STRIP_NOPS (arg0);
11141 STRIP_NOPS (arg1);
11142
11143 tem = fold_comparison (loc, code, type, op0, op1);
11144 if (tem != NULL_TREE)
11145 return tem;
11146
11147 /* bool_var != 1 becomes !bool_var. */
11148 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11149 && code == NE_EXPR)
11150 return fold_convert_loc (loc, type,
11151 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11152 TREE_TYPE (arg0), arg0));
11153
11154 /* bool_var == 0 becomes !bool_var. */
11155 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11156 && code == EQ_EXPR)
11157 return fold_convert_loc (loc, type,
11158 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11159 TREE_TYPE (arg0), arg0));
11160
11161 /* !exp != 0 becomes !exp */
11162 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
11163 && code == NE_EXPR)
11164 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11165
11166 /* If this is an EQ or NE comparison with zero and ARG0 is
11167 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11168 two operations, but the latter can be done in one less insn
11169 on machines that have only two-operand insns or on which a
11170 constant cannot be the first operand. */
11171 if (TREE_CODE (arg0) == BIT_AND_EXPR
11172 && integer_zerop (arg1))
11173 {
11174 tree arg00 = TREE_OPERAND (arg0, 0);
11175 tree arg01 = TREE_OPERAND (arg0, 1);
11176 if (TREE_CODE (arg00) == LSHIFT_EXPR
11177 && integer_onep (TREE_OPERAND (arg00, 0)))
11178 {
11179 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
11180 arg01, TREE_OPERAND (arg00, 1));
11181 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11182 build_int_cst (TREE_TYPE (arg0), 1));
11183 return fold_build2_loc (loc, code, type,
11184 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11185 arg1);
11186 }
11187 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11188 && integer_onep (TREE_OPERAND (arg01, 0)))
11189 {
11190 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
11191 arg00, TREE_OPERAND (arg01, 1));
11192 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11193 build_int_cst (TREE_TYPE (arg0), 1));
11194 return fold_build2_loc (loc, code, type,
11195 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11196 arg1);
11197 }
11198 }
11199
11200 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11201 C1 is a valid shift constant, and C2 is a power of two, i.e.
11202 a single bit. */
11203 if (TREE_CODE (arg0) == BIT_AND_EXPR
11204 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11205 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11206 == INTEGER_CST
11207 && integer_pow2p (TREE_OPERAND (arg0, 1))
11208 && integer_zerop (arg1))
11209 {
11210 tree itype = TREE_TYPE (arg0);
11211 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11212 prec = TYPE_PRECISION (itype);
11213
11214 /* Check for a valid shift count. */
11215 if (wi::ltu_p (wi::to_wide (arg001), prec))
11216 {
11217 tree arg01 = TREE_OPERAND (arg0, 1);
11218 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11219 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11220 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11221 can be rewritten as (X & (C2 << C1)) != 0. */
11222 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11223 {
11224 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
11225 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
11226 return fold_build2_loc (loc, code, type, tem,
11227 fold_convert_loc (loc, itype, arg1));
11228 }
11229 /* Otherwise, for signed (arithmetic) shifts,
11230 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11231 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11232 else if (!TYPE_UNSIGNED (itype))
11233 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11234 arg000, build_int_cst (itype, 0));
11235 /* Otherwise, of unsigned (logical) shifts,
11236 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11237 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11238 else
11239 return omit_one_operand_loc (loc, type,
11240 code == EQ_EXPR ? integer_one_node
11241 : integer_zero_node,
11242 arg000);
11243 }
11244 }
11245
11246 /* If this is a comparison of a field, we may be able to simplify it. */
11247 if ((TREE_CODE (arg0) == COMPONENT_REF
11248 || TREE_CODE (arg0) == BIT_FIELD_REF)
11249 /* Handle the constant case even without -O
11250 to make sure the warnings are given. */
11251 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11252 {
11253 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
11254 if (t1)
11255 return t1;
11256 }
11257
11258 /* Optimize comparisons of strlen vs zero to a compare of the
11259 first character of the string vs zero. To wit,
11260 strlen(ptr) == 0 => *ptr == 0
11261 strlen(ptr) != 0 => *ptr != 0
11262 Other cases should reduce to one of these two (or a constant)
11263 due to the return value of strlen being unsigned. */
11264 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
11265 {
11266 tree fndecl = get_callee_fndecl (arg0);
11267
11268 if (fndecl
11269 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
11270 && call_expr_nargs (arg0) == 1
11271 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
11272 == POINTER_TYPE))
11273 {
11274 tree ptrtype
11275 = build_pointer_type (build_qualified_type (char_type_node,
11276 TYPE_QUAL_CONST));
11277 tree ptr = fold_convert_loc (loc, ptrtype,
11278 CALL_EXPR_ARG (arg0, 0));
11279 tree iref = build_fold_indirect_ref_loc (loc, ptr);
11280 return fold_build2_loc (loc, code, type, iref,
11281 build_int_cst (TREE_TYPE (iref), 0));
11282 }
11283 }
11284
11285 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11286 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11287 if (TREE_CODE (arg0) == RSHIFT_EXPR
11288 && integer_zerop (arg1)
11289 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11290 {
11291 tree arg00 = TREE_OPERAND (arg0, 0);
11292 tree arg01 = TREE_OPERAND (arg0, 1);
11293 tree itype = TREE_TYPE (arg00);
11294 if (wi::to_wide (arg01) == element_precision (itype) - 1)
11295 {
11296 if (TYPE_UNSIGNED (itype))
11297 {
11298 itype = signed_type_for (itype);
11299 arg00 = fold_convert_loc (loc, itype, arg00);
11300 }
11301 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11302 type, arg00, build_zero_cst (itype));
11303 }
11304 }
11305
11306 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11307 (X & C) == 0 when C is a single bit. */
11308 if (TREE_CODE (arg0) == BIT_AND_EXPR
11309 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11310 && integer_zerop (arg1)
11311 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11312 {
11313 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
11314 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11315 TREE_OPERAND (arg0, 1));
11316 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11317 type, tem,
11318 fold_convert_loc (loc, TREE_TYPE (arg0),
11319 arg1));
11320 }
11321
11322 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11323 constant C is a power of two, i.e. a single bit. */
11324 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11325 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11326 && integer_zerop (arg1)
11327 && integer_pow2p (TREE_OPERAND (arg0, 1))
11328 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11329 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11330 {
11331 tree arg00 = TREE_OPERAND (arg0, 0);
11332 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11333 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11334 }
11335
11336 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11337 when is C is a power of two, i.e. a single bit. */
11338 if (TREE_CODE (arg0) == BIT_AND_EXPR
11339 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11340 && integer_zerop (arg1)
11341 && integer_pow2p (TREE_OPERAND (arg0, 1))
11342 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11343 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11344 {
11345 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11346 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
11347 arg000, TREE_OPERAND (arg0, 1));
11348 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11349 tem, build_int_cst (TREE_TYPE (tem), 0));
11350 }
11351
11352 if (integer_zerop (arg1)
11353 && tree_expr_nonzero_p (arg0))
11354 {
11355 tree res = constant_boolean_node (code==NE_EXPR, type);
11356 return omit_one_operand_loc (loc, type, res, arg0);
11357 }
11358
11359 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11360 if (TREE_CODE (arg0) == BIT_AND_EXPR
11361 && TREE_CODE (arg1) == BIT_AND_EXPR)
11362 {
11363 tree arg00 = TREE_OPERAND (arg0, 0);
11364 tree arg01 = TREE_OPERAND (arg0, 1);
11365 tree arg10 = TREE_OPERAND (arg1, 0);
11366 tree arg11 = TREE_OPERAND (arg1, 1);
11367 tree itype = TREE_TYPE (arg0);
11368
11369 if (operand_equal_p (arg01, arg11, 0))
11370 {
11371 tem = fold_convert_loc (loc, itype, arg10);
11372 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11373 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
11374 return fold_build2_loc (loc, code, type, tem,
11375 build_zero_cst (itype));
11376 }
11377 if (operand_equal_p (arg01, arg10, 0))
11378 {
11379 tem = fold_convert_loc (loc, itype, arg11);
11380 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11381 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
11382 return fold_build2_loc (loc, code, type, tem,
11383 build_zero_cst (itype));
11384 }
11385 if (operand_equal_p (arg00, arg11, 0))
11386 {
11387 tem = fold_convert_loc (loc, itype, arg10);
11388 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
11389 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
11390 return fold_build2_loc (loc, code, type, tem,
11391 build_zero_cst (itype));
11392 }
11393 if (operand_equal_p (arg00, arg10, 0))
11394 {
11395 tem = fold_convert_loc (loc, itype, arg11);
11396 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
11397 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
11398 return fold_build2_loc (loc, code, type, tem,
11399 build_zero_cst (itype));
11400 }
11401 }
11402
11403 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11404 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11405 {
11406 tree arg00 = TREE_OPERAND (arg0, 0);
11407 tree arg01 = TREE_OPERAND (arg0, 1);
11408 tree arg10 = TREE_OPERAND (arg1, 0);
11409 tree arg11 = TREE_OPERAND (arg1, 1);
11410 tree itype = TREE_TYPE (arg0);
11411
11412 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11413 operand_equal_p guarantees no side-effects so we don't need
11414 to use omit_one_operand on Z. */
11415 if (operand_equal_p (arg01, arg11, 0))
11416 return fold_build2_loc (loc, code, type, arg00,
11417 fold_convert_loc (loc, TREE_TYPE (arg00),
11418 arg10));
11419 if (operand_equal_p (arg01, arg10, 0))
11420 return fold_build2_loc (loc, code, type, arg00,
11421 fold_convert_loc (loc, TREE_TYPE (arg00),
11422 arg11));
11423 if (operand_equal_p (arg00, arg11, 0))
11424 return fold_build2_loc (loc, code, type, arg01,
11425 fold_convert_loc (loc, TREE_TYPE (arg01),
11426 arg10));
11427 if (operand_equal_p (arg00, arg10, 0))
11428 return fold_build2_loc (loc, code, type, arg01,
11429 fold_convert_loc (loc, TREE_TYPE (arg01),
11430 arg11));
11431
11432 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11433 if (TREE_CODE (arg01) == INTEGER_CST
11434 && TREE_CODE (arg11) == INTEGER_CST)
11435 {
11436 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11437 fold_convert_loc (loc, itype, arg11));
11438 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11439 return fold_build2_loc (loc, code, type, tem,
11440 fold_convert_loc (loc, itype, arg10));
11441 }
11442 }
11443
11444 /* Attempt to simplify equality/inequality comparisons of complex
11445 values. Only lower the comparison if the result is known or
11446 can be simplified to a single scalar comparison. */
11447 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11448 || TREE_CODE (arg0) == COMPLEX_CST)
11449 && (TREE_CODE (arg1) == COMPLEX_EXPR
11450 || TREE_CODE (arg1) == COMPLEX_CST))
11451 {
11452 tree real0, imag0, real1, imag1;
11453 tree rcond, icond;
11454
11455 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11456 {
11457 real0 = TREE_OPERAND (arg0, 0);
11458 imag0 = TREE_OPERAND (arg0, 1);
11459 }
11460 else
11461 {
11462 real0 = TREE_REALPART (arg0);
11463 imag0 = TREE_IMAGPART (arg0);
11464 }
11465
11466 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11467 {
11468 real1 = TREE_OPERAND (arg1, 0);
11469 imag1 = TREE_OPERAND (arg1, 1);
11470 }
11471 else
11472 {
11473 real1 = TREE_REALPART (arg1);
11474 imag1 = TREE_IMAGPART (arg1);
11475 }
11476
11477 rcond = fold_binary_loc (loc, code, type, real0, real1);
11478 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11479 {
11480 if (integer_zerop (rcond))
11481 {
11482 if (code == EQ_EXPR)
11483 return omit_two_operands_loc (loc, type, boolean_false_node,
11484 imag0, imag1);
11485 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11486 }
11487 else
11488 {
11489 if (code == NE_EXPR)
11490 return omit_two_operands_loc (loc, type, boolean_true_node,
11491 imag0, imag1);
11492 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11493 }
11494 }
11495
11496 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11497 if (icond && TREE_CODE (icond) == INTEGER_CST)
11498 {
11499 if (integer_zerop (icond))
11500 {
11501 if (code == EQ_EXPR)
11502 return omit_two_operands_loc (loc, type, boolean_false_node,
11503 real0, real1);
11504 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11505 }
11506 else
11507 {
11508 if (code == NE_EXPR)
11509 return omit_two_operands_loc (loc, type, boolean_true_node,
11510 real0, real1);
11511 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11512 }
11513 }
11514 }
11515
11516 return NULL_TREE;
11517
11518 case LT_EXPR:
11519 case GT_EXPR:
11520 case LE_EXPR:
11521 case GE_EXPR:
11522 tem = fold_comparison (loc, code, type, op0, op1);
11523 if (tem != NULL_TREE)
11524 return tem;
11525
11526 /* Transform comparisons of the form X +- C CMP X. */
11527 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11528 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11529 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11530 && !HONOR_SNANS (arg0))
11531 {
11532 tree arg01 = TREE_OPERAND (arg0, 1);
11533 enum tree_code code0 = TREE_CODE (arg0);
11534 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11535
11536 /* (X - c) > X becomes false. */
11537 if (code == GT_EXPR
11538 && ((code0 == MINUS_EXPR && is_positive >= 0)
11539 || (code0 == PLUS_EXPR && is_positive <= 0)))
11540 return constant_boolean_node (0, type);
11541
11542 /* Likewise (X + c) < X becomes false. */
11543 if (code == LT_EXPR
11544 && ((code0 == PLUS_EXPR && is_positive >= 0)
11545 || (code0 == MINUS_EXPR && is_positive <= 0)))
11546 return constant_boolean_node (0, type);
11547
11548 /* Convert (X - c) <= X to true. */
11549 if (!HONOR_NANS (arg1)
11550 && code == LE_EXPR
11551 && ((code0 == MINUS_EXPR && is_positive >= 0)
11552 || (code0 == PLUS_EXPR && is_positive <= 0)))
11553 return constant_boolean_node (1, type);
11554
11555 /* Convert (X + c) >= X to true. */
11556 if (!HONOR_NANS (arg1)
11557 && code == GE_EXPR
11558 && ((code0 == PLUS_EXPR && is_positive >= 0)
11559 || (code0 == MINUS_EXPR && is_positive <= 0)))
11560 return constant_boolean_node (1, type);
11561 }
11562
11563 /* If we are comparing an ABS_EXPR with a constant, we can
11564 convert all the cases into explicit comparisons, but they may
11565 well not be faster than doing the ABS and one comparison.
11566 But ABS (X) <= C is a range comparison, which becomes a subtraction
11567 and a comparison, and is probably faster. */
11568 if (code == LE_EXPR
11569 && TREE_CODE (arg1) == INTEGER_CST
11570 && TREE_CODE (arg0) == ABS_EXPR
11571 && ! TREE_SIDE_EFFECTS (arg0)
11572 && (tem = negate_expr (arg1)) != 0
11573 && TREE_CODE (tem) == INTEGER_CST
11574 && !TREE_OVERFLOW (tem))
11575 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11576 build2 (GE_EXPR, type,
11577 TREE_OPERAND (arg0, 0), tem),
11578 build2 (LE_EXPR, type,
11579 TREE_OPERAND (arg0, 0), arg1));
11580
11581 /* Convert ABS_EXPR<x> >= 0 to true. */
11582 strict_overflow_p = false;
11583 if (code == GE_EXPR
11584 && (integer_zerop (arg1)
11585 || (! HONOR_NANS (arg0)
11586 && real_zerop (arg1)))
11587 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11588 {
11589 if (strict_overflow_p)
11590 fold_overflow_warning (("assuming signed overflow does not occur "
11591 "when simplifying comparison of "
11592 "absolute value and zero"),
11593 WARN_STRICT_OVERFLOW_CONDITIONAL);
11594 return omit_one_operand_loc (loc, type,
11595 constant_boolean_node (true, type),
11596 arg0);
11597 }
11598
11599 /* Convert ABS_EXPR<x> < 0 to false. */
11600 strict_overflow_p = false;
11601 if (code == LT_EXPR
11602 && (integer_zerop (arg1) || real_zerop (arg1))
11603 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11604 {
11605 if (strict_overflow_p)
11606 fold_overflow_warning (("assuming signed overflow does not occur "
11607 "when simplifying comparison of "
11608 "absolute value and zero"),
11609 WARN_STRICT_OVERFLOW_CONDITIONAL);
11610 return omit_one_operand_loc (loc, type,
11611 constant_boolean_node (false, type),
11612 arg0);
11613 }
11614
11615 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11616 and similarly for >= into !=. */
11617 if ((code == LT_EXPR || code == GE_EXPR)
11618 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11619 && TREE_CODE (arg1) == LSHIFT_EXPR
11620 && integer_onep (TREE_OPERAND (arg1, 0)))
11621 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11622 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11623 TREE_OPERAND (arg1, 1)),
11624 build_zero_cst (TREE_TYPE (arg0)));
11625
11626 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11627 otherwise Y might be >= # of bits in X's type and thus e.g.
11628 (unsigned char) (1 << Y) for Y 15 might be 0.
11629 If the cast is widening, then 1 << Y should have unsigned type,
11630 otherwise if Y is number of bits in the signed shift type minus 1,
11631 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11632 31 might be 0xffffffff80000000. */
11633 if ((code == LT_EXPR || code == GE_EXPR)
11634 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11635 && CONVERT_EXPR_P (arg1)
11636 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11637 && (element_precision (TREE_TYPE (arg1))
11638 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11639 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11640 || (element_precision (TREE_TYPE (arg1))
11641 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11642 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11643 {
11644 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11645 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11646 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11647 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11648 build_zero_cst (TREE_TYPE (arg0)));
11649 }
11650
11651 return NULL_TREE;
11652
11653 case UNORDERED_EXPR:
11654 case ORDERED_EXPR:
11655 case UNLT_EXPR:
11656 case UNLE_EXPR:
11657 case UNGT_EXPR:
11658 case UNGE_EXPR:
11659 case UNEQ_EXPR:
11660 case LTGT_EXPR:
11661 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11662 {
11663 tree targ0 = strip_float_extensions (arg0);
11664 tree targ1 = strip_float_extensions (arg1);
11665 tree newtype = TREE_TYPE (targ0);
11666
11667 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11668 newtype = TREE_TYPE (targ1);
11669
11670 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11671 return fold_build2_loc (loc, code, type,
11672 fold_convert_loc (loc, newtype, targ0),
11673 fold_convert_loc (loc, newtype, targ1));
11674 }
11675
11676 return NULL_TREE;
11677
11678 case COMPOUND_EXPR:
11679 /* When pedantic, a compound expression can be neither an lvalue
11680 nor an integer constant expression. */
11681 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11682 return NULL_TREE;
11683 /* Don't let (0, 0) be null pointer constant. */
11684 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11685 : fold_convert_loc (loc, type, arg1);
11686 return pedantic_non_lvalue_loc (loc, tem);
11687
11688 case ASSERT_EXPR:
11689 /* An ASSERT_EXPR should never be passed to fold_binary. */
11690 gcc_unreachable ();
11691
11692 default:
11693 return NULL_TREE;
11694 } /* switch (code) */
11695 }
11696
11697 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11698 ((A & N) + B) & M -> (A + B) & M
11699 Similarly if (N & M) == 0,
11700 ((A | N) + B) & M -> (A + B) & M
11701 and for - instead of + (or unary - instead of +)
11702 and/or ^ instead of |.
11703 If B is constant and (B & M) == 0, fold into A & M.
11704
11705 This function is a helper for match.pd patterns. Return non-NULL
11706 type in which the simplified operation should be performed only
11707 if any optimization is possible.
11708
11709 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
11710 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
11711 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
11712 +/-. */
11713 tree
11714 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
11715 tree arg00, enum tree_code code00, tree arg000, tree arg001,
11716 tree arg01, enum tree_code code01, tree arg010, tree arg011,
11717 tree *pmop)
11718 {
11719 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
11720 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
11721 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11722 if (~cst1 == 0
11723 || (cst1 & (cst1 + 1)) != 0
11724 || !INTEGRAL_TYPE_P (type)
11725 || (!TYPE_OVERFLOW_WRAPS (type)
11726 && TREE_CODE (type) != INTEGER_TYPE)
11727 || (wi::max_value (type) & cst1) != cst1)
11728 return NULL_TREE;
11729
11730 enum tree_code codes[2] = { code00, code01 };
11731 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
11732 int which = 0;
11733 wide_int cst0;
11734
11735 /* Now we know that arg0 is (C + D) or (C - D) or -C and
11736 arg1 (M) is == (1LL << cst) - 1.
11737 Store C into PMOP[0] and D into PMOP[1]. */
11738 pmop[0] = arg00;
11739 pmop[1] = arg01;
11740 which = code != NEGATE_EXPR;
11741
11742 for (; which >= 0; which--)
11743 switch (codes[which])
11744 {
11745 case BIT_AND_EXPR:
11746 case BIT_IOR_EXPR:
11747 case BIT_XOR_EXPR:
11748 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
11749 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
11750 if (codes[which] == BIT_AND_EXPR)
11751 {
11752 if (cst0 != cst1)
11753 break;
11754 }
11755 else if (cst0 != 0)
11756 break;
11757 /* If C or D is of the form (A & N) where
11758 (N & M) == M, or of the form (A | N) or
11759 (A ^ N) where (N & M) == 0, replace it with A. */
11760 pmop[which] = arg0xx[2 * which];
11761 break;
11762 case ERROR_MARK:
11763 if (TREE_CODE (pmop[which]) != INTEGER_CST)
11764 break;
11765 /* If C or D is a N where (N & M) == 0, it can be
11766 omitted (replaced with 0). */
11767 if ((code == PLUS_EXPR
11768 || (code == MINUS_EXPR && which == 0))
11769 && (cst1 & wi::to_wide (pmop[which])) == 0)
11770 pmop[which] = build_int_cst (type, 0);
11771 /* Similarly, with C - N where (-N & M) == 0. */
11772 if (code == MINUS_EXPR
11773 && which == 1
11774 && (cst1 & -wi::to_wide (pmop[which])) == 0)
11775 pmop[which] = build_int_cst (type, 0);
11776 break;
11777 default:
11778 gcc_unreachable ();
11779 }
11780
11781 /* Only build anything new if we optimized one or both arguments above. */
11782 if (pmop[0] == arg00 && pmop[1] == arg01)
11783 return NULL_TREE;
11784
11785 if (TYPE_OVERFLOW_WRAPS (type))
11786 return type;
11787 else
11788 return unsigned_type_for (type);
11789 }
11790
11791 /* Used by contains_label_[p1]. */
11792
11793 struct contains_label_data
11794 {
11795 hash_set<tree> *pset;
11796 bool inside_switch_p;
11797 };
11798
11799 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11800 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
11801 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
11802
11803 static tree
11804 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
11805 {
11806 contains_label_data *d = (contains_label_data *) data;
11807 switch (TREE_CODE (*tp))
11808 {
11809 case LABEL_EXPR:
11810 return *tp;
11811
11812 case CASE_LABEL_EXPR:
11813 if (!d->inside_switch_p)
11814 return *tp;
11815 return NULL_TREE;
11816
11817 case SWITCH_EXPR:
11818 if (!d->inside_switch_p)
11819 {
11820 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
11821 return *tp;
11822 d->inside_switch_p = true;
11823 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
11824 return *tp;
11825 d->inside_switch_p = false;
11826 *walk_subtrees = 0;
11827 }
11828 return NULL_TREE;
11829
11830 case GOTO_EXPR:
11831 *walk_subtrees = 0;
11832 return NULL_TREE;
11833
11834 default:
11835 return NULL_TREE;
11836 }
11837 }
11838
11839 /* Return whether the sub-tree ST contains a label which is accessible from
11840 outside the sub-tree. */
11841
11842 static bool
11843 contains_label_p (tree st)
11844 {
11845 hash_set<tree> pset;
11846 contains_label_data data = { &pset, false };
11847 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
11848 }
11849
11850 /* Fold a ternary expression of code CODE and type TYPE with operands
11851 OP0, OP1, and OP2. Return the folded expression if folding is
11852 successful. Otherwise, return NULL_TREE. */
11853
11854 tree
11855 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11856 tree op0, tree op1, tree op2)
11857 {
11858 tree tem;
11859 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11860 enum tree_code_class kind = TREE_CODE_CLASS (code);
11861
11862 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11863 && TREE_CODE_LENGTH (code) == 3);
11864
11865 /* If this is a commutative operation, and OP0 is a constant, move it
11866 to OP1 to reduce the number of tests below. */
11867 if (commutative_ternary_tree_code (code)
11868 && tree_swap_operands_p (op0, op1))
11869 return fold_build3_loc (loc, code, type, op1, op0, op2);
11870
11871 tem = generic_simplify (loc, code, type, op0, op1, op2);
11872 if (tem)
11873 return tem;
11874
11875 /* Strip any conversions that don't change the mode. This is safe
11876 for every expression, except for a comparison expression because
11877 its signedness is derived from its operands. So, in the latter
11878 case, only strip conversions that don't change the signedness.
11879
11880 Note that this is done as an internal manipulation within the
11881 constant folder, in order to find the simplest representation of
11882 the arguments so that their form can be studied. In any cases,
11883 the appropriate type conversions should be put back in the tree
11884 that will get out of the constant folder. */
11885 if (op0)
11886 {
11887 arg0 = op0;
11888 STRIP_NOPS (arg0);
11889 }
11890
11891 if (op1)
11892 {
11893 arg1 = op1;
11894 STRIP_NOPS (arg1);
11895 }
11896
11897 if (op2)
11898 {
11899 arg2 = op2;
11900 STRIP_NOPS (arg2);
11901 }
11902
11903 switch (code)
11904 {
11905 case COMPONENT_REF:
11906 if (TREE_CODE (arg0) == CONSTRUCTOR
11907 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11908 {
11909 unsigned HOST_WIDE_INT idx;
11910 tree field, value;
11911 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11912 if (field == arg1)
11913 return value;
11914 }
11915 return NULL_TREE;
11916
11917 case COND_EXPR:
11918 case VEC_COND_EXPR:
11919 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11920 so all simple results must be passed through pedantic_non_lvalue. */
11921 if (TREE_CODE (arg0) == INTEGER_CST)
11922 {
11923 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11924 tem = integer_zerop (arg0) ? op2 : op1;
11925 /* Only optimize constant conditions when the selected branch
11926 has the same type as the COND_EXPR. This avoids optimizing
11927 away "c ? x : throw", where the throw has a void type.
11928 Avoid throwing away that operand which contains label. */
11929 if ((!TREE_SIDE_EFFECTS (unused_op)
11930 || !contains_label_p (unused_op))
11931 && (! VOID_TYPE_P (TREE_TYPE (tem))
11932 || VOID_TYPE_P (type)))
11933 return pedantic_non_lvalue_loc (loc, tem);
11934 return NULL_TREE;
11935 }
11936 else if (TREE_CODE (arg0) == VECTOR_CST)
11937 {
11938 unsigned HOST_WIDE_INT nelts;
11939 if ((TREE_CODE (arg1) == VECTOR_CST
11940 || TREE_CODE (arg1) == CONSTRUCTOR)
11941 && (TREE_CODE (arg2) == VECTOR_CST
11942 || TREE_CODE (arg2) == CONSTRUCTOR)
11943 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
11944 {
11945 vec_perm_builder sel (nelts, nelts, 1);
11946 for (unsigned int i = 0; i < nelts; i++)
11947 {
11948 tree val = VECTOR_CST_ELT (arg0, i);
11949 if (integer_all_onesp (val))
11950 sel.quick_push (i);
11951 else if (integer_zerop (val))
11952 sel.quick_push (nelts + i);
11953 else /* Currently unreachable. */
11954 return NULL_TREE;
11955 }
11956 vec_perm_indices indices (sel, 2, nelts);
11957 tree t = fold_vec_perm (type, arg1, arg2, indices);
11958 if (t != NULL_TREE)
11959 return t;
11960 }
11961 }
11962
11963 /* If we have A op B ? A : C, we may be able to convert this to a
11964 simpler expression, depending on the operation and the values
11965 of B and C. Signed zeros prevent all of these transformations,
11966 for reasons given above each one.
11967
11968 Also try swapping the arguments and inverting the conditional. */
11969 if (COMPARISON_CLASS_P (arg0)
11970 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
11971 && !HONOR_SIGNED_ZEROS (element_mode (op1)))
11972 {
11973 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11974 if (tem)
11975 return tem;
11976 }
11977
11978 if (COMPARISON_CLASS_P (arg0)
11979 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
11980 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11981 {
11982 location_t loc0 = expr_location_or (arg0, loc);
11983 tem = fold_invert_truthvalue (loc0, arg0);
11984 if (tem && COMPARISON_CLASS_P (tem))
11985 {
11986 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11987 if (tem)
11988 return tem;
11989 }
11990 }
11991
11992 /* If the second operand is simpler than the third, swap them
11993 since that produces better jump optimization results. */
11994 if (truth_value_p (TREE_CODE (arg0))
11995 && tree_swap_operands_p (op1, op2))
11996 {
11997 location_t loc0 = expr_location_or (arg0, loc);
11998 /* See if this can be inverted. If it can't, possibly because
11999 it was a floating-point inequality comparison, don't do
12000 anything. */
12001 tem = fold_invert_truthvalue (loc0, arg0);
12002 if (tem)
12003 return fold_build3_loc (loc, code, type, tem, op2, op1);
12004 }
12005
12006 /* Convert A ? 1 : 0 to simply A. */
12007 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12008 : (integer_onep (op1)
12009 && !VECTOR_TYPE_P (type)))
12010 && integer_zerop (op2)
12011 /* If we try to convert OP0 to our type, the
12012 call to fold will try to move the conversion inside
12013 a COND, which will recurse. In that case, the COND_EXPR
12014 is probably the best choice, so leave it alone. */
12015 && type == TREE_TYPE (arg0))
12016 return pedantic_non_lvalue_loc (loc, arg0);
12017
12018 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12019 over COND_EXPR in cases such as floating point comparisons. */
12020 if (integer_zerop (op1)
12021 && code == COND_EXPR
12022 && integer_onep (op2)
12023 && !VECTOR_TYPE_P (type)
12024 && truth_value_p (TREE_CODE (arg0)))
12025 return pedantic_non_lvalue_loc (loc,
12026 fold_convert_loc (loc, type,
12027 invert_truthvalue_loc (loc,
12028 arg0)));
12029
12030 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12031 if (TREE_CODE (arg0) == LT_EXPR
12032 && integer_zerop (TREE_OPERAND (arg0, 1))
12033 && integer_zerop (op2)
12034 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12035 {
12036 /* sign_bit_p looks through both zero and sign extensions,
12037 but for this optimization only sign extensions are
12038 usable. */
12039 tree tem2 = TREE_OPERAND (arg0, 0);
12040 while (tem != tem2)
12041 {
12042 if (TREE_CODE (tem2) != NOP_EXPR
12043 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12044 {
12045 tem = NULL_TREE;
12046 break;
12047 }
12048 tem2 = TREE_OPERAND (tem2, 0);
12049 }
12050 /* sign_bit_p only checks ARG1 bits within A's precision.
12051 If <sign bit of A> has wider type than A, bits outside
12052 of A's precision in <sign bit of A> need to be checked.
12053 If they are all 0, this optimization needs to be done
12054 in unsigned A's type, if they are all 1 in signed A's type,
12055 otherwise this can't be done. */
12056 if (tem
12057 && TYPE_PRECISION (TREE_TYPE (tem))
12058 < TYPE_PRECISION (TREE_TYPE (arg1))
12059 && TYPE_PRECISION (TREE_TYPE (tem))
12060 < TYPE_PRECISION (type))
12061 {
12062 int inner_width, outer_width;
12063 tree tem_type;
12064
12065 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12066 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12067 if (outer_width > TYPE_PRECISION (type))
12068 outer_width = TYPE_PRECISION (type);
12069
12070 wide_int mask = wi::shifted_mask
12071 (inner_width, outer_width - inner_width, false,
12072 TYPE_PRECISION (TREE_TYPE (arg1)));
12073
12074 wide_int common = mask & wi::to_wide (arg1);
12075 if (common == mask)
12076 {
12077 tem_type = signed_type_for (TREE_TYPE (tem));
12078 tem = fold_convert_loc (loc, tem_type, tem);
12079 }
12080 else if (common == 0)
12081 {
12082 tem_type = unsigned_type_for (TREE_TYPE (tem));
12083 tem = fold_convert_loc (loc, tem_type, tem);
12084 }
12085 else
12086 tem = NULL;
12087 }
12088
12089 if (tem)
12090 return
12091 fold_convert_loc (loc, type,
12092 fold_build2_loc (loc, BIT_AND_EXPR,
12093 TREE_TYPE (tem), tem,
12094 fold_convert_loc (loc,
12095 TREE_TYPE (tem),
12096 arg1)));
12097 }
12098
12099 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12100 already handled above. */
12101 if (TREE_CODE (arg0) == BIT_AND_EXPR
12102 && integer_onep (TREE_OPERAND (arg0, 1))
12103 && integer_zerop (op2)
12104 && integer_pow2p (arg1))
12105 {
12106 tree tem = TREE_OPERAND (arg0, 0);
12107 STRIP_NOPS (tem);
12108 if (TREE_CODE (tem) == RSHIFT_EXPR
12109 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
12110 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
12111 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
12112 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12113 fold_convert_loc (loc, type,
12114 TREE_OPERAND (tem, 0)),
12115 op1);
12116 }
12117
12118 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12119 is probably obsolete because the first operand should be a
12120 truth value (that's why we have the two cases above), but let's
12121 leave it in until we can confirm this for all front-ends. */
12122 if (integer_zerop (op2)
12123 && TREE_CODE (arg0) == NE_EXPR
12124 && integer_zerop (TREE_OPERAND (arg0, 1))
12125 && integer_pow2p (arg1)
12126 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12127 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12128 arg1, OEP_ONLY_CONST)
12129 /* operand_equal_p compares just value, not precision, so e.g.
12130 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
12131 second operand 32-bit -128, which is not a power of two (or vice
12132 versa. */
12133 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
12134 return pedantic_non_lvalue_loc (loc,
12135 fold_convert_loc (loc, type,
12136 TREE_OPERAND (arg0,
12137 0)));
12138
12139 /* Disable the transformations below for vectors, since
12140 fold_binary_op_with_conditional_arg may undo them immediately,
12141 yielding an infinite loop. */
12142 if (code == VEC_COND_EXPR)
12143 return NULL_TREE;
12144
12145 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12146 if (integer_zerop (op2)
12147 && truth_value_p (TREE_CODE (arg0))
12148 && truth_value_p (TREE_CODE (arg1))
12149 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12150 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
12151 : TRUTH_ANDIF_EXPR,
12152 type, fold_convert_loc (loc, type, arg0), op1);
12153
12154 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12155 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
12156 && truth_value_p (TREE_CODE (arg0))
12157 && truth_value_p (TREE_CODE (arg1))
12158 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12159 {
12160 location_t loc0 = expr_location_or (arg0, loc);
12161 /* Only perform transformation if ARG0 is easily inverted. */
12162 tem = fold_invert_truthvalue (loc0, arg0);
12163 if (tem)
12164 return fold_build2_loc (loc, code == VEC_COND_EXPR
12165 ? BIT_IOR_EXPR
12166 : TRUTH_ORIF_EXPR,
12167 type, fold_convert_loc (loc, type, tem),
12168 op1);
12169 }
12170
12171 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12172 if (integer_zerop (arg1)
12173 && truth_value_p (TREE_CODE (arg0))
12174 && truth_value_p (TREE_CODE (op2))
12175 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12176 {
12177 location_t loc0 = expr_location_or (arg0, loc);
12178 /* Only perform transformation if ARG0 is easily inverted. */
12179 tem = fold_invert_truthvalue (loc0, arg0);
12180 if (tem)
12181 return fold_build2_loc (loc, code == VEC_COND_EXPR
12182 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
12183 type, fold_convert_loc (loc, type, tem),
12184 op2);
12185 }
12186
12187 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12188 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
12189 && truth_value_p (TREE_CODE (arg0))
12190 && truth_value_p (TREE_CODE (op2))
12191 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12192 return fold_build2_loc (loc, code == VEC_COND_EXPR
12193 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
12194 type, fold_convert_loc (loc, type, arg0), op2);
12195
12196 return NULL_TREE;
12197
12198 case CALL_EXPR:
12199 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12200 of fold_ternary on them. */
12201 gcc_unreachable ();
12202
12203 case BIT_FIELD_REF:
12204 if (TREE_CODE (arg0) == VECTOR_CST
12205 && (type == TREE_TYPE (TREE_TYPE (arg0))
12206 || (VECTOR_TYPE_P (type)
12207 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
12208 && tree_fits_uhwi_p (op1)
12209 && tree_fits_uhwi_p (op2))
12210 {
12211 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
12212 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
12213 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
12214 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
12215
12216 if (n != 0
12217 && (idx % width) == 0
12218 && (n % width) == 0
12219 && known_le ((idx + n) / width,
12220 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
12221 {
12222 idx = idx / width;
12223 n = n / width;
12224
12225 if (TREE_CODE (arg0) == VECTOR_CST)
12226 {
12227 if (n == 1)
12228 {
12229 tem = VECTOR_CST_ELT (arg0, idx);
12230 if (VECTOR_TYPE_P (type))
12231 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
12232 return tem;
12233 }
12234
12235 tree_vector_builder vals (type, n, 1);
12236 for (unsigned i = 0; i < n; ++i)
12237 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
12238 return vals.build ();
12239 }
12240 }
12241 }
12242
12243 /* On constants we can use native encode/interpret to constant
12244 fold (nearly) all BIT_FIELD_REFs. */
12245 if (CONSTANT_CLASS_P (arg0)
12246 && can_native_interpret_type_p (type)
12247 && BITS_PER_UNIT == 8
12248 && tree_fits_uhwi_p (op1)
12249 && tree_fits_uhwi_p (op2))
12250 {
12251 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12252 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
12253 /* Limit us to a reasonable amount of work. To relax the
12254 other limitations we need bit-shifting of the buffer
12255 and rounding up the size. */
12256 if (bitpos % BITS_PER_UNIT == 0
12257 && bitsize % BITS_PER_UNIT == 0
12258 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
12259 {
12260 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
12261 unsigned HOST_WIDE_INT len
12262 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
12263 bitpos / BITS_PER_UNIT);
12264 if (len > 0
12265 && len * BITS_PER_UNIT >= bitsize)
12266 {
12267 tree v = native_interpret_expr (type, b,
12268 bitsize / BITS_PER_UNIT);
12269 if (v)
12270 return v;
12271 }
12272 }
12273 }
12274
12275 return NULL_TREE;
12276
12277 case VEC_PERM_EXPR:
12278 /* Perform constant folding of BIT_INSERT_EXPR. */
12279 if (TREE_CODE (arg2) == VECTOR_CST
12280 && TREE_CODE (op0) == VECTOR_CST
12281 && TREE_CODE (op1) == VECTOR_CST)
12282 {
12283 /* Build a vector of integers from the tree mask. */
12284 vec_perm_builder builder;
12285 if (!tree_to_vec_perm_builder (&builder, arg2))
12286 return NULL_TREE;
12287
12288 /* Create a vec_perm_indices for the integer vector. */
12289 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
12290 bool single_arg = (op0 == op1);
12291 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
12292 return fold_vec_perm (type, op0, op1, sel);
12293 }
12294 return NULL_TREE;
12295
12296 case BIT_INSERT_EXPR:
12297 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
12298 if (TREE_CODE (arg0) == INTEGER_CST
12299 && TREE_CODE (arg1) == INTEGER_CST)
12300 {
12301 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12302 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
12303 wide_int tem = (wi::to_wide (arg0)
12304 & wi::shifted_mask (bitpos, bitsize, true,
12305 TYPE_PRECISION (type)));
12306 wide_int tem2
12307 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
12308 bitsize), bitpos);
12309 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
12310 }
12311 else if (TREE_CODE (arg0) == VECTOR_CST
12312 && CONSTANT_CLASS_P (arg1)
12313 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
12314 TREE_TYPE (arg1)))
12315 {
12316 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12317 unsigned HOST_WIDE_INT elsize
12318 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
12319 if (bitpos % elsize == 0)
12320 {
12321 unsigned k = bitpos / elsize;
12322 unsigned HOST_WIDE_INT nelts;
12323 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
12324 return arg0;
12325 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
12326 {
12327 tree_vector_builder elts (type, nelts, 1);
12328 elts.quick_grow (nelts);
12329 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
12330 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
12331 return elts.build ();
12332 }
12333 }
12334 }
12335 return NULL_TREE;
12336
12337 default:
12338 return NULL_TREE;
12339 } /* switch (code) */
12340 }
12341
12342 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
12343 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
12344 constructor element index of the value returned. If the element is
12345 not found NULL_TREE is returned and *CTOR_IDX is updated to
12346 the index of the element after the ACCESS_INDEX position (which
12347 may be outside of the CTOR array). */
12348
12349 tree
12350 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
12351 unsigned *ctor_idx)
12352 {
12353 tree index_type = NULL_TREE;
12354 signop index_sgn = UNSIGNED;
12355 offset_int low_bound = 0;
12356
12357 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
12358 {
12359 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
12360 if (domain_type && TYPE_MIN_VALUE (domain_type))
12361 {
12362 /* Static constructors for variably sized objects makes no sense. */
12363 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
12364 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
12365 /* ??? When it is obvious that the range is signed, treat it so. */
12366 if (TYPE_UNSIGNED (index_type)
12367 && TYPE_MAX_VALUE (domain_type)
12368 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
12369 TYPE_MIN_VALUE (domain_type)))
12370 {
12371 index_sgn = SIGNED;
12372 low_bound
12373 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
12374 SIGNED);
12375 }
12376 else
12377 {
12378 index_sgn = TYPE_SIGN (index_type);
12379 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
12380 }
12381 }
12382 }
12383
12384 if (index_type)
12385 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
12386 index_sgn);
12387
12388 offset_int index = low_bound;
12389 if (index_type)
12390 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
12391
12392 offset_int max_index = index;
12393 unsigned cnt;
12394 tree cfield, cval;
12395 bool first_p = true;
12396
12397 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
12398 {
12399 /* Array constructor might explicitly set index, or specify a range,
12400 or leave index NULL meaning that it is next index after previous
12401 one. */
12402 if (cfield)
12403 {
12404 if (TREE_CODE (cfield) == INTEGER_CST)
12405 max_index = index
12406 = offset_int::from (wi::to_wide (cfield), index_sgn);
12407 else
12408 {
12409 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
12410 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
12411 index_sgn);
12412 max_index
12413 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
12414 index_sgn);
12415 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
12416 }
12417 }
12418 else if (!first_p)
12419 {
12420 index = max_index + 1;
12421 if (index_type)
12422 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
12423 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
12424 max_index = index;
12425 }
12426 else
12427 first_p = false;
12428
12429 /* Do we have match? */
12430 if (wi::cmp (access_index, index, index_sgn) >= 0)
12431 {
12432 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
12433 {
12434 if (ctor_idx)
12435 *ctor_idx = cnt;
12436 return cval;
12437 }
12438 }
12439 else if (in_gimple_form)
12440 /* We're past the element we search for. Note during parsing
12441 the elements might not be sorted.
12442 ??? We should use a binary search and a flag on the
12443 CONSTRUCTOR as to whether elements are sorted in declaration
12444 order. */
12445 break;
12446 }
12447 if (ctor_idx)
12448 *ctor_idx = cnt;
12449 return NULL_TREE;
12450 }
12451
12452 /* Perform constant folding and related simplification of EXPR.
12453 The related simplifications include x*1 => x, x*0 => 0, etc.,
12454 and application of the associative law.
12455 NOP_EXPR conversions may be removed freely (as long as we
12456 are careful not to change the type of the overall expression).
12457 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12458 but we can constant-fold them if they have constant operands. */
12459
12460 #ifdef ENABLE_FOLD_CHECKING
12461 # define fold(x) fold_1 (x)
12462 static tree fold_1 (tree);
12463 static
12464 #endif
12465 tree
12466 fold (tree expr)
12467 {
12468 const tree t = expr;
12469 enum tree_code code = TREE_CODE (t);
12470 enum tree_code_class kind = TREE_CODE_CLASS (code);
12471 tree tem;
12472 location_t loc = EXPR_LOCATION (expr);
12473
12474 /* Return right away if a constant. */
12475 if (kind == tcc_constant)
12476 return t;
12477
12478 /* CALL_EXPR-like objects with variable numbers of operands are
12479 treated specially. */
12480 if (kind == tcc_vl_exp)
12481 {
12482 if (code == CALL_EXPR)
12483 {
12484 tem = fold_call_expr (loc, expr, false);
12485 return tem ? tem : expr;
12486 }
12487 return expr;
12488 }
12489
12490 if (IS_EXPR_CODE_CLASS (kind))
12491 {
12492 tree type = TREE_TYPE (t);
12493 tree op0, op1, op2;
12494
12495 switch (TREE_CODE_LENGTH (code))
12496 {
12497 case 1:
12498 op0 = TREE_OPERAND (t, 0);
12499 tem = fold_unary_loc (loc, code, type, op0);
12500 return tem ? tem : expr;
12501 case 2:
12502 op0 = TREE_OPERAND (t, 0);
12503 op1 = TREE_OPERAND (t, 1);
12504 tem = fold_binary_loc (loc, code, type, op0, op1);
12505 return tem ? tem : expr;
12506 case 3:
12507 op0 = TREE_OPERAND (t, 0);
12508 op1 = TREE_OPERAND (t, 1);
12509 op2 = TREE_OPERAND (t, 2);
12510 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12511 return tem ? tem : expr;
12512 default:
12513 break;
12514 }
12515 }
12516
12517 switch (code)
12518 {
12519 case ARRAY_REF:
12520 {
12521 tree op0 = TREE_OPERAND (t, 0);
12522 tree op1 = TREE_OPERAND (t, 1);
12523
12524 if (TREE_CODE (op1) == INTEGER_CST
12525 && TREE_CODE (op0) == CONSTRUCTOR
12526 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12527 {
12528 tree val = get_array_ctor_element_at_index (op0,
12529 wi::to_offset (op1));
12530 if (val)
12531 return val;
12532 }
12533
12534 return t;
12535 }
12536
12537 /* Return a VECTOR_CST if possible. */
12538 case CONSTRUCTOR:
12539 {
12540 tree type = TREE_TYPE (t);
12541 if (TREE_CODE (type) != VECTOR_TYPE)
12542 return t;
12543
12544 unsigned i;
12545 tree val;
12546 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12547 if (! CONSTANT_CLASS_P (val))
12548 return t;
12549
12550 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12551 }
12552
12553 case CONST_DECL:
12554 return fold (DECL_INITIAL (t));
12555
12556 default:
12557 return t;
12558 } /* switch (code) */
12559 }
12560
12561 #ifdef ENABLE_FOLD_CHECKING
12562 #undef fold
12563
12564 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12565 hash_table<nofree_ptr_hash<const tree_node> > *);
12566 static void fold_check_failed (const_tree, const_tree);
12567 void print_fold_checksum (const_tree);
12568
12569 /* When --enable-checking=fold, compute a digest of expr before
12570 and after actual fold call to see if fold did not accidentally
12571 change original expr. */
12572
12573 tree
12574 fold (tree expr)
12575 {
12576 tree ret;
12577 struct md5_ctx ctx;
12578 unsigned char checksum_before[16], checksum_after[16];
12579 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12580
12581 md5_init_ctx (&ctx);
12582 fold_checksum_tree (expr, &ctx, &ht);
12583 md5_finish_ctx (&ctx, checksum_before);
12584 ht.empty ();
12585
12586 ret = fold_1 (expr);
12587
12588 md5_init_ctx (&ctx);
12589 fold_checksum_tree (expr, &ctx, &ht);
12590 md5_finish_ctx (&ctx, checksum_after);
12591
12592 if (memcmp (checksum_before, checksum_after, 16))
12593 fold_check_failed (expr, ret);
12594
12595 return ret;
12596 }
12597
12598 void
12599 print_fold_checksum (const_tree expr)
12600 {
12601 struct md5_ctx ctx;
12602 unsigned char checksum[16], cnt;
12603 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12604
12605 md5_init_ctx (&ctx);
12606 fold_checksum_tree (expr, &ctx, &ht);
12607 md5_finish_ctx (&ctx, checksum);
12608 for (cnt = 0; cnt < 16; ++cnt)
12609 fprintf (stderr, "%02x", checksum[cnt]);
12610 putc ('\n', stderr);
12611 }
12612
12613 static void
12614 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12615 {
12616 internal_error ("fold check: original tree changed by fold");
12617 }
12618
12619 static void
12620 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12621 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12622 {
12623 const tree_node **slot;
12624 enum tree_code code;
12625 union tree_node *buf;
12626 int i, len;
12627
12628 recursive_label:
12629 if (expr == NULL)
12630 return;
12631 slot = ht->find_slot (expr, INSERT);
12632 if (*slot != NULL)
12633 return;
12634 *slot = expr;
12635 code = TREE_CODE (expr);
12636 if (TREE_CODE_CLASS (code) == tcc_declaration
12637 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12638 {
12639 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12640 size_t sz = tree_size (expr);
12641 buf = XALLOCAVAR (union tree_node, sz);
12642 memcpy ((char *) buf, expr, sz);
12643 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
12644 buf->decl_with_vis.symtab_node = NULL;
12645 buf->base.nowarning_flag = 0;
12646 expr = (tree) buf;
12647 }
12648 else if (TREE_CODE_CLASS (code) == tcc_type
12649 && (TYPE_POINTER_TO (expr)
12650 || TYPE_REFERENCE_TO (expr)
12651 || TYPE_CACHED_VALUES_P (expr)
12652 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12653 || TYPE_NEXT_VARIANT (expr)
12654 || TYPE_ALIAS_SET_KNOWN_P (expr)))
12655 {
12656 /* Allow these fields to be modified. */
12657 tree tmp;
12658 size_t sz = tree_size (expr);
12659 buf = XALLOCAVAR (union tree_node, sz);
12660 memcpy ((char *) buf, expr, sz);
12661 expr = tmp = (tree) buf;
12662 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12663 TYPE_POINTER_TO (tmp) = NULL;
12664 TYPE_REFERENCE_TO (tmp) = NULL;
12665 TYPE_NEXT_VARIANT (tmp) = NULL;
12666 TYPE_ALIAS_SET (tmp) = -1;
12667 if (TYPE_CACHED_VALUES_P (tmp))
12668 {
12669 TYPE_CACHED_VALUES_P (tmp) = 0;
12670 TYPE_CACHED_VALUES (tmp) = NULL;
12671 }
12672 }
12673 else if (TREE_NO_WARNING (expr) && (DECL_P (expr) || EXPR_P (expr)))
12674 {
12675 /* Allow TREE_NO_WARNING to be set. Perhaps we shouldn't allow that
12676 and change builtins.c etc. instead - see PR89543. */
12677 size_t sz = tree_size (expr);
12678 buf = XALLOCAVAR (union tree_node, sz);
12679 memcpy ((char *) buf, expr, sz);
12680 buf->base.nowarning_flag = 0;
12681 expr = (tree) buf;
12682 }
12683 md5_process_bytes (expr, tree_size (expr), ctx);
12684 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12685 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12686 if (TREE_CODE_CLASS (code) != tcc_type
12687 && TREE_CODE_CLASS (code) != tcc_declaration
12688 && code != TREE_LIST
12689 && code != SSA_NAME
12690 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12691 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12692 switch (TREE_CODE_CLASS (code))
12693 {
12694 case tcc_constant:
12695 switch (code)
12696 {
12697 case STRING_CST:
12698 md5_process_bytes (TREE_STRING_POINTER (expr),
12699 TREE_STRING_LENGTH (expr), ctx);
12700 break;
12701 case COMPLEX_CST:
12702 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12703 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12704 break;
12705 case VECTOR_CST:
12706 len = vector_cst_encoded_nelts (expr);
12707 for (i = 0; i < len; ++i)
12708 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
12709 break;
12710 default:
12711 break;
12712 }
12713 break;
12714 case tcc_exceptional:
12715 switch (code)
12716 {
12717 case TREE_LIST:
12718 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12719 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12720 expr = TREE_CHAIN (expr);
12721 goto recursive_label;
12722 break;
12723 case TREE_VEC:
12724 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12725 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12726 break;
12727 default:
12728 break;
12729 }
12730 break;
12731 case tcc_expression:
12732 case tcc_reference:
12733 case tcc_comparison:
12734 case tcc_unary:
12735 case tcc_binary:
12736 case tcc_statement:
12737 case tcc_vl_exp:
12738 len = TREE_OPERAND_LENGTH (expr);
12739 for (i = 0; i < len; ++i)
12740 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12741 break;
12742 case tcc_declaration:
12743 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12744 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12745 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12746 {
12747 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12748 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12749 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12750 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12751 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12752 }
12753
12754 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12755 {
12756 if (TREE_CODE (expr) == FUNCTION_DECL)
12757 {
12758 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12759 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12760 }
12761 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12762 }
12763 break;
12764 case tcc_type:
12765 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12766 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12767 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12768 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12769 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12770 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12771 if (INTEGRAL_TYPE_P (expr)
12772 || SCALAR_FLOAT_TYPE_P (expr))
12773 {
12774 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12775 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12776 }
12777 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12778 if (TREE_CODE (expr) == RECORD_TYPE
12779 || TREE_CODE (expr) == UNION_TYPE
12780 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12781 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12782 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12783 break;
12784 default:
12785 break;
12786 }
12787 }
12788
12789 /* Helper function for outputting the checksum of a tree T. When
12790 debugging with gdb, you can "define mynext" to be "next" followed
12791 by "call debug_fold_checksum (op0)", then just trace down till the
12792 outputs differ. */
12793
12794 DEBUG_FUNCTION void
12795 debug_fold_checksum (const_tree t)
12796 {
12797 int i;
12798 unsigned char checksum[16];
12799 struct md5_ctx ctx;
12800 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12801
12802 md5_init_ctx (&ctx);
12803 fold_checksum_tree (t, &ctx, &ht);
12804 md5_finish_ctx (&ctx, checksum);
12805 ht.empty ();
12806
12807 for (i = 0; i < 16; i++)
12808 fprintf (stderr, "%d ", checksum[i]);
12809
12810 fprintf (stderr, "\n");
12811 }
12812
12813 #endif
12814
12815 /* Fold a unary tree expression with code CODE of type TYPE with an
12816 operand OP0. LOC is the location of the resulting expression.
12817 Return a folded expression if successful. Otherwise, return a tree
12818 expression with code CODE of type TYPE with an operand OP0. */
12819
12820 tree
12821 fold_build1_loc (location_t loc,
12822 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12823 {
12824 tree tem;
12825 #ifdef ENABLE_FOLD_CHECKING
12826 unsigned char checksum_before[16], checksum_after[16];
12827 struct md5_ctx ctx;
12828 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12829
12830 md5_init_ctx (&ctx);
12831 fold_checksum_tree (op0, &ctx, &ht);
12832 md5_finish_ctx (&ctx, checksum_before);
12833 ht.empty ();
12834 #endif
12835
12836 tem = fold_unary_loc (loc, code, type, op0);
12837 if (!tem)
12838 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
12839
12840 #ifdef ENABLE_FOLD_CHECKING
12841 md5_init_ctx (&ctx);
12842 fold_checksum_tree (op0, &ctx, &ht);
12843 md5_finish_ctx (&ctx, checksum_after);
12844
12845 if (memcmp (checksum_before, checksum_after, 16))
12846 fold_check_failed (op0, tem);
12847 #endif
12848 return tem;
12849 }
12850
12851 /* Fold a binary tree expression with code CODE of type TYPE with
12852 operands OP0 and OP1. LOC is the location of the resulting
12853 expression. Return a folded expression if successful. Otherwise,
12854 return a tree expression with code CODE of type TYPE with operands
12855 OP0 and OP1. */
12856
12857 tree
12858 fold_build2_loc (location_t loc,
12859 enum tree_code code, tree type, tree op0, tree op1
12860 MEM_STAT_DECL)
12861 {
12862 tree tem;
12863 #ifdef ENABLE_FOLD_CHECKING
12864 unsigned char checksum_before_op0[16],
12865 checksum_before_op1[16],
12866 checksum_after_op0[16],
12867 checksum_after_op1[16];
12868 struct md5_ctx ctx;
12869 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12870
12871 md5_init_ctx (&ctx);
12872 fold_checksum_tree (op0, &ctx, &ht);
12873 md5_finish_ctx (&ctx, checksum_before_op0);
12874 ht.empty ();
12875
12876 md5_init_ctx (&ctx);
12877 fold_checksum_tree (op1, &ctx, &ht);
12878 md5_finish_ctx (&ctx, checksum_before_op1);
12879 ht.empty ();
12880 #endif
12881
12882 tem = fold_binary_loc (loc, code, type, op0, op1);
12883 if (!tem)
12884 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12885
12886 #ifdef ENABLE_FOLD_CHECKING
12887 md5_init_ctx (&ctx);
12888 fold_checksum_tree (op0, &ctx, &ht);
12889 md5_finish_ctx (&ctx, checksum_after_op0);
12890 ht.empty ();
12891
12892 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12893 fold_check_failed (op0, tem);
12894
12895 md5_init_ctx (&ctx);
12896 fold_checksum_tree (op1, &ctx, &ht);
12897 md5_finish_ctx (&ctx, checksum_after_op1);
12898
12899 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12900 fold_check_failed (op1, tem);
12901 #endif
12902 return tem;
12903 }
12904
12905 /* Fold a ternary tree expression with code CODE of type TYPE with
12906 operands OP0, OP1, and OP2. Return a folded expression if
12907 successful. Otherwise, return a tree expression with code CODE of
12908 type TYPE with operands OP0, OP1, and OP2. */
12909
12910 tree
12911 fold_build3_loc (location_t loc, enum tree_code code, tree type,
12912 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12913 {
12914 tree tem;
12915 #ifdef ENABLE_FOLD_CHECKING
12916 unsigned char checksum_before_op0[16],
12917 checksum_before_op1[16],
12918 checksum_before_op2[16],
12919 checksum_after_op0[16],
12920 checksum_after_op1[16],
12921 checksum_after_op2[16];
12922 struct md5_ctx ctx;
12923 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12924
12925 md5_init_ctx (&ctx);
12926 fold_checksum_tree (op0, &ctx, &ht);
12927 md5_finish_ctx (&ctx, checksum_before_op0);
12928 ht.empty ();
12929
12930 md5_init_ctx (&ctx);
12931 fold_checksum_tree (op1, &ctx, &ht);
12932 md5_finish_ctx (&ctx, checksum_before_op1);
12933 ht.empty ();
12934
12935 md5_init_ctx (&ctx);
12936 fold_checksum_tree (op2, &ctx, &ht);
12937 md5_finish_ctx (&ctx, checksum_before_op2);
12938 ht.empty ();
12939 #endif
12940
12941 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12942 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12943 if (!tem)
12944 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12945
12946 #ifdef ENABLE_FOLD_CHECKING
12947 md5_init_ctx (&ctx);
12948 fold_checksum_tree (op0, &ctx, &ht);
12949 md5_finish_ctx (&ctx, checksum_after_op0);
12950 ht.empty ();
12951
12952 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12953 fold_check_failed (op0, tem);
12954
12955 md5_init_ctx (&ctx);
12956 fold_checksum_tree (op1, &ctx, &ht);
12957 md5_finish_ctx (&ctx, checksum_after_op1);
12958 ht.empty ();
12959
12960 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12961 fold_check_failed (op1, tem);
12962
12963 md5_init_ctx (&ctx);
12964 fold_checksum_tree (op2, &ctx, &ht);
12965 md5_finish_ctx (&ctx, checksum_after_op2);
12966
12967 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12968 fold_check_failed (op2, tem);
12969 #endif
12970 return tem;
12971 }
12972
12973 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12974 arguments in ARGARRAY, and a null static chain.
12975 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12976 of type TYPE from the given operands as constructed by build_call_array. */
12977
12978 tree
12979 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12980 int nargs, tree *argarray)
12981 {
12982 tree tem;
12983 #ifdef ENABLE_FOLD_CHECKING
12984 unsigned char checksum_before_fn[16],
12985 checksum_before_arglist[16],
12986 checksum_after_fn[16],
12987 checksum_after_arglist[16];
12988 struct md5_ctx ctx;
12989 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12990 int i;
12991
12992 md5_init_ctx (&ctx);
12993 fold_checksum_tree (fn, &ctx, &ht);
12994 md5_finish_ctx (&ctx, checksum_before_fn);
12995 ht.empty ();
12996
12997 md5_init_ctx (&ctx);
12998 for (i = 0; i < nargs; i++)
12999 fold_checksum_tree (argarray[i], &ctx, &ht);
13000 md5_finish_ctx (&ctx, checksum_before_arglist);
13001 ht.empty ();
13002 #endif
13003
13004 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13005 if (!tem)
13006 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13007
13008 #ifdef ENABLE_FOLD_CHECKING
13009 md5_init_ctx (&ctx);
13010 fold_checksum_tree (fn, &ctx, &ht);
13011 md5_finish_ctx (&ctx, checksum_after_fn);
13012 ht.empty ();
13013
13014 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13015 fold_check_failed (fn, tem);
13016
13017 md5_init_ctx (&ctx);
13018 for (i = 0; i < nargs; i++)
13019 fold_checksum_tree (argarray[i], &ctx, &ht);
13020 md5_finish_ctx (&ctx, checksum_after_arglist);
13021
13022 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13023 fold_check_failed (NULL_TREE, tem);
13024 #endif
13025 return tem;
13026 }
13027
13028 /* Perform constant folding and related simplification of initializer
13029 expression EXPR. These behave identically to "fold_buildN" but ignore
13030 potential run-time traps and exceptions that fold must preserve. */
13031
13032 #define START_FOLD_INIT \
13033 int saved_signaling_nans = flag_signaling_nans;\
13034 int saved_trapping_math = flag_trapping_math;\
13035 int saved_rounding_math = flag_rounding_math;\
13036 int saved_trapv = flag_trapv;\
13037 int saved_folding_initializer = folding_initializer;\
13038 flag_signaling_nans = 0;\
13039 flag_trapping_math = 0;\
13040 flag_rounding_math = 0;\
13041 flag_trapv = 0;\
13042 folding_initializer = 1;
13043
13044 #define END_FOLD_INIT \
13045 flag_signaling_nans = saved_signaling_nans;\
13046 flag_trapping_math = saved_trapping_math;\
13047 flag_rounding_math = saved_rounding_math;\
13048 flag_trapv = saved_trapv;\
13049 folding_initializer = saved_folding_initializer;
13050
13051 tree
13052 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13053 tree type, tree op)
13054 {
13055 tree result;
13056 START_FOLD_INIT;
13057
13058 result = fold_build1_loc (loc, code, type, op);
13059
13060 END_FOLD_INIT;
13061 return result;
13062 }
13063
13064 tree
13065 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13066 tree type, tree op0, tree op1)
13067 {
13068 tree result;
13069 START_FOLD_INIT;
13070
13071 result = fold_build2_loc (loc, code, type, op0, op1);
13072
13073 END_FOLD_INIT;
13074 return result;
13075 }
13076
13077 tree
13078 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
13079 int nargs, tree *argarray)
13080 {
13081 tree result;
13082 START_FOLD_INIT;
13083
13084 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
13085
13086 END_FOLD_INIT;
13087 return result;
13088 }
13089
13090 #undef START_FOLD_INIT
13091 #undef END_FOLD_INIT
13092
13093 /* Determine if first argument is a multiple of second argument. Return 0 if
13094 it is not, or we cannot easily determined it to be.
13095
13096 An example of the sort of thing we care about (at this point; this routine
13097 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13098 fold cases do now) is discovering that
13099
13100 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13101
13102 is a multiple of
13103
13104 SAVE_EXPR (J * 8)
13105
13106 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13107
13108 This code also handles discovering that
13109
13110 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13111
13112 is a multiple of 8 so we don't have to worry about dealing with a
13113 possible remainder.
13114
13115 Note that we *look* inside a SAVE_EXPR only to determine how it was
13116 calculated; it is not safe for fold to do much of anything else with the
13117 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13118 at run time. For example, the latter example above *cannot* be implemented
13119 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13120 evaluation time of the original SAVE_EXPR is not necessarily the same at
13121 the time the new expression is evaluated. The only optimization of this
13122 sort that would be valid is changing
13123
13124 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13125
13126 divided by 8 to
13127
13128 SAVE_EXPR (I) * SAVE_EXPR (J)
13129
13130 (where the same SAVE_EXPR (J) is used in the original and the
13131 transformed version). */
13132
13133 int
13134 multiple_of_p (tree type, const_tree top, const_tree bottom)
13135 {
13136 gimple *stmt;
13137 tree t1, op1, op2;
13138
13139 if (operand_equal_p (top, bottom, 0))
13140 return 1;
13141
13142 if (TREE_CODE (type) != INTEGER_TYPE)
13143 return 0;
13144
13145 switch (TREE_CODE (top))
13146 {
13147 case BIT_AND_EXPR:
13148 /* Bitwise and provides a power of two multiple. If the mask is
13149 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13150 if (!integer_pow2p (bottom))
13151 return 0;
13152 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13153 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13154
13155 case MULT_EXPR:
13156 if (TREE_CODE (bottom) == INTEGER_CST)
13157 {
13158 op1 = TREE_OPERAND (top, 0);
13159 op2 = TREE_OPERAND (top, 1);
13160 if (TREE_CODE (op1) == INTEGER_CST)
13161 std::swap (op1, op2);
13162 if (TREE_CODE (op2) == INTEGER_CST)
13163 {
13164 if (multiple_of_p (type, op2, bottom))
13165 return 1;
13166 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
13167 if (multiple_of_p (type, bottom, op2))
13168 {
13169 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
13170 wi::to_widest (op2));
13171 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
13172 {
13173 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
13174 return multiple_of_p (type, op1, op2);
13175 }
13176 }
13177 return multiple_of_p (type, op1, bottom);
13178 }
13179 }
13180 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13181 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13182
13183 case MINUS_EXPR:
13184 /* It is impossible to prove if op0 - op1 is multiple of bottom
13185 precisely, so be conservative here checking if both op0 and op1
13186 are multiple of bottom. Note we check the second operand first
13187 since it's usually simpler. */
13188 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13189 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13190
13191 case PLUS_EXPR:
13192 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
13193 as op0 - 3 if the expression has unsigned type. For example,
13194 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
13195 op1 = TREE_OPERAND (top, 1);
13196 if (TYPE_UNSIGNED (type)
13197 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
13198 op1 = fold_build1 (NEGATE_EXPR, type, op1);
13199 return (multiple_of_p (type, op1, bottom)
13200 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13201
13202 case LSHIFT_EXPR:
13203 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13204 {
13205 op1 = TREE_OPERAND (top, 1);
13206 /* const_binop may not detect overflow correctly,
13207 so check for it explicitly here. */
13208 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
13209 wi::to_wide (op1))
13210 && (t1 = fold_convert (type,
13211 const_binop (LSHIFT_EXPR, size_one_node,
13212 op1))) != 0
13213 && !TREE_OVERFLOW (t1))
13214 return multiple_of_p (type, t1, bottom);
13215 }
13216 return 0;
13217
13218 case NOP_EXPR:
13219 /* Can't handle conversions from non-integral or wider integral type. */
13220 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13221 || (TYPE_PRECISION (type)
13222 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13223 return 0;
13224
13225 /* fall through */
13226
13227 case SAVE_EXPR:
13228 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13229
13230 case COND_EXPR:
13231 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13232 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
13233
13234 case INTEGER_CST:
13235 if (TREE_CODE (bottom) != INTEGER_CST
13236 || integer_zerop (bottom)
13237 || (TYPE_UNSIGNED (type)
13238 && (tree_int_cst_sgn (top) < 0
13239 || tree_int_cst_sgn (bottom) < 0)))
13240 return 0;
13241 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
13242 SIGNED);
13243
13244 case SSA_NAME:
13245 if (TREE_CODE (bottom) == INTEGER_CST
13246 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
13247 && gimple_code (stmt) == GIMPLE_ASSIGN)
13248 {
13249 enum tree_code code = gimple_assign_rhs_code (stmt);
13250
13251 /* Check for special cases to see if top is defined as multiple
13252 of bottom:
13253
13254 top = (X & ~(bottom - 1) ; bottom is power of 2
13255
13256 or
13257
13258 Y = X % bottom
13259 top = X - Y. */
13260 if (code == BIT_AND_EXPR
13261 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
13262 && TREE_CODE (op2) == INTEGER_CST
13263 && integer_pow2p (bottom)
13264 && wi::multiple_of_p (wi::to_widest (op2),
13265 wi::to_widest (bottom), UNSIGNED))
13266 return 1;
13267
13268 op1 = gimple_assign_rhs1 (stmt);
13269 if (code == MINUS_EXPR
13270 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
13271 && TREE_CODE (op2) == SSA_NAME
13272 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
13273 && gimple_code (stmt) == GIMPLE_ASSIGN
13274 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
13275 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
13276 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
13277 return 1;
13278 }
13279
13280 /* fall through */
13281
13282 default:
13283 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
13284 return multiple_p (wi::to_poly_widest (top),
13285 wi::to_poly_widest (bottom));
13286
13287 return 0;
13288 }
13289 }
13290
13291 #define tree_expr_nonnegative_warnv_p(X, Y) \
13292 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13293
13294 #define RECURSE(X) \
13295 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
13296
13297 /* Return true if CODE or TYPE is known to be non-negative. */
13298
13299 static bool
13300 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13301 {
13302 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13303 && truth_value_p (code))
13304 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13305 have a signed:1 type (where the value is -1 and 0). */
13306 return true;
13307 return false;
13308 }
13309
13310 /* Return true if (CODE OP0) is known to be non-negative. If the return
13311 value is based on the assumption that signed overflow is undefined,
13312 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13313 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13314
13315 bool
13316 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13317 bool *strict_overflow_p, int depth)
13318 {
13319 if (TYPE_UNSIGNED (type))
13320 return true;
13321
13322 switch (code)
13323 {
13324 case ABS_EXPR:
13325 /* We can't return 1 if flag_wrapv is set because
13326 ABS_EXPR<INT_MIN> = INT_MIN. */
13327 if (!ANY_INTEGRAL_TYPE_P (type))
13328 return true;
13329 if (TYPE_OVERFLOW_UNDEFINED (type))
13330 {
13331 *strict_overflow_p = true;
13332 return true;
13333 }
13334 break;
13335
13336 case NON_LVALUE_EXPR:
13337 case FLOAT_EXPR:
13338 case FIX_TRUNC_EXPR:
13339 return RECURSE (op0);
13340
13341 CASE_CONVERT:
13342 {
13343 tree inner_type = TREE_TYPE (op0);
13344 tree outer_type = type;
13345
13346 if (TREE_CODE (outer_type) == REAL_TYPE)
13347 {
13348 if (TREE_CODE (inner_type) == REAL_TYPE)
13349 return RECURSE (op0);
13350 if (INTEGRAL_TYPE_P (inner_type))
13351 {
13352 if (TYPE_UNSIGNED (inner_type))
13353 return true;
13354 return RECURSE (op0);
13355 }
13356 }
13357 else if (INTEGRAL_TYPE_P (outer_type))
13358 {
13359 if (TREE_CODE (inner_type) == REAL_TYPE)
13360 return RECURSE (op0);
13361 if (INTEGRAL_TYPE_P (inner_type))
13362 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13363 && TYPE_UNSIGNED (inner_type);
13364 }
13365 }
13366 break;
13367
13368 default:
13369 return tree_simple_nonnegative_warnv_p (code, type);
13370 }
13371
13372 /* We don't know sign of `t', so be conservative and return false. */
13373 return false;
13374 }
13375
13376 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13377 value is based on the assumption that signed overflow is undefined,
13378 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13379 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13380
13381 bool
13382 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13383 tree op1, bool *strict_overflow_p,
13384 int depth)
13385 {
13386 if (TYPE_UNSIGNED (type))
13387 return true;
13388
13389 switch (code)
13390 {
13391 case POINTER_PLUS_EXPR:
13392 case PLUS_EXPR:
13393 if (FLOAT_TYPE_P (type))
13394 return RECURSE (op0) && RECURSE (op1);
13395
13396 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13397 both unsigned and at least 2 bits shorter than the result. */
13398 if (TREE_CODE (type) == INTEGER_TYPE
13399 && TREE_CODE (op0) == NOP_EXPR
13400 && TREE_CODE (op1) == NOP_EXPR)
13401 {
13402 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13403 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13404 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13405 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13406 {
13407 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13408 TYPE_PRECISION (inner2)) + 1;
13409 return prec < TYPE_PRECISION (type);
13410 }
13411 }
13412 break;
13413
13414 case MULT_EXPR:
13415 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
13416 {
13417 /* x * x is always non-negative for floating point x
13418 or without overflow. */
13419 if (operand_equal_p (op0, op1, 0)
13420 || (RECURSE (op0) && RECURSE (op1)))
13421 {
13422 if (ANY_INTEGRAL_TYPE_P (type)
13423 && TYPE_OVERFLOW_UNDEFINED (type))
13424 *strict_overflow_p = true;
13425 return true;
13426 }
13427 }
13428
13429 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13430 both unsigned and their total bits is shorter than the result. */
13431 if (TREE_CODE (type) == INTEGER_TYPE
13432 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
13433 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
13434 {
13435 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
13436 ? TREE_TYPE (TREE_OPERAND (op0, 0))
13437 : TREE_TYPE (op0);
13438 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
13439 ? TREE_TYPE (TREE_OPERAND (op1, 0))
13440 : TREE_TYPE (op1);
13441
13442 bool unsigned0 = TYPE_UNSIGNED (inner0);
13443 bool unsigned1 = TYPE_UNSIGNED (inner1);
13444
13445 if (TREE_CODE (op0) == INTEGER_CST)
13446 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
13447
13448 if (TREE_CODE (op1) == INTEGER_CST)
13449 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
13450
13451 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
13452 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
13453 {
13454 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
13455 ? tree_int_cst_min_precision (op0, UNSIGNED)
13456 : TYPE_PRECISION (inner0);
13457
13458 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
13459 ? tree_int_cst_min_precision (op1, UNSIGNED)
13460 : TYPE_PRECISION (inner1);
13461
13462 return precision0 + precision1 < TYPE_PRECISION (type);
13463 }
13464 }
13465 return false;
13466
13467 case BIT_AND_EXPR:
13468 case MAX_EXPR:
13469 return RECURSE (op0) || RECURSE (op1);
13470
13471 case BIT_IOR_EXPR:
13472 case BIT_XOR_EXPR:
13473 case MIN_EXPR:
13474 case RDIV_EXPR:
13475 case TRUNC_DIV_EXPR:
13476 case CEIL_DIV_EXPR:
13477 case FLOOR_DIV_EXPR:
13478 case ROUND_DIV_EXPR:
13479 return RECURSE (op0) && RECURSE (op1);
13480
13481 case TRUNC_MOD_EXPR:
13482 return RECURSE (op0);
13483
13484 case FLOOR_MOD_EXPR:
13485 return RECURSE (op1);
13486
13487 case CEIL_MOD_EXPR:
13488 case ROUND_MOD_EXPR:
13489 default:
13490 return tree_simple_nonnegative_warnv_p (code, type);
13491 }
13492
13493 /* We don't know sign of `t', so be conservative and return false. */
13494 return false;
13495 }
13496
13497 /* Return true if T is known to be non-negative. If the return
13498 value is based on the assumption that signed overflow is undefined,
13499 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13500 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13501
13502 bool
13503 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13504 {
13505 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13506 return true;
13507
13508 switch (TREE_CODE (t))
13509 {
13510 case INTEGER_CST:
13511 return tree_int_cst_sgn (t) >= 0;
13512
13513 case REAL_CST:
13514 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13515
13516 case FIXED_CST:
13517 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13518
13519 case COND_EXPR:
13520 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13521
13522 case SSA_NAME:
13523 /* Limit the depth of recursion to avoid quadratic behavior.
13524 This is expected to catch almost all occurrences in practice.
13525 If this code misses important cases that unbounded recursion
13526 would not, passes that need this information could be revised
13527 to provide it through dataflow propagation. */
13528 return (!name_registered_for_update_p (t)
13529 && depth < param_max_ssa_name_query_depth
13530 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
13531 strict_overflow_p, depth));
13532
13533 default:
13534 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13535 }
13536 }
13537
13538 /* Return true if T is known to be non-negative. If the return
13539 value is based on the assumption that signed overflow is undefined,
13540 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13541 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13542
13543 bool
13544 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13545 bool *strict_overflow_p, int depth)
13546 {
13547 switch (fn)
13548 {
13549 CASE_CFN_ACOS:
13550 CASE_CFN_ACOSH:
13551 CASE_CFN_CABS:
13552 CASE_CFN_COSH:
13553 CASE_CFN_ERFC:
13554 CASE_CFN_EXP:
13555 CASE_CFN_EXP10:
13556 CASE_CFN_EXP2:
13557 CASE_CFN_FABS:
13558 CASE_CFN_FDIM:
13559 CASE_CFN_HYPOT:
13560 CASE_CFN_POW10:
13561 CASE_CFN_FFS:
13562 CASE_CFN_PARITY:
13563 CASE_CFN_POPCOUNT:
13564 CASE_CFN_CLZ:
13565 CASE_CFN_CLRSB:
13566 case CFN_BUILT_IN_BSWAP32:
13567 case CFN_BUILT_IN_BSWAP64:
13568 /* Always true. */
13569 return true;
13570
13571 CASE_CFN_SQRT:
13572 CASE_CFN_SQRT_FN:
13573 /* sqrt(-0.0) is -0.0. */
13574 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13575 return true;
13576 return RECURSE (arg0);
13577
13578 CASE_CFN_ASINH:
13579 CASE_CFN_ATAN:
13580 CASE_CFN_ATANH:
13581 CASE_CFN_CBRT:
13582 CASE_CFN_CEIL:
13583 CASE_CFN_CEIL_FN:
13584 CASE_CFN_ERF:
13585 CASE_CFN_EXPM1:
13586 CASE_CFN_FLOOR:
13587 CASE_CFN_FLOOR_FN:
13588 CASE_CFN_FMOD:
13589 CASE_CFN_FREXP:
13590 CASE_CFN_ICEIL:
13591 CASE_CFN_IFLOOR:
13592 CASE_CFN_IRINT:
13593 CASE_CFN_IROUND:
13594 CASE_CFN_LCEIL:
13595 CASE_CFN_LDEXP:
13596 CASE_CFN_LFLOOR:
13597 CASE_CFN_LLCEIL:
13598 CASE_CFN_LLFLOOR:
13599 CASE_CFN_LLRINT:
13600 CASE_CFN_LLROUND:
13601 CASE_CFN_LRINT:
13602 CASE_CFN_LROUND:
13603 CASE_CFN_MODF:
13604 CASE_CFN_NEARBYINT:
13605 CASE_CFN_NEARBYINT_FN:
13606 CASE_CFN_RINT:
13607 CASE_CFN_RINT_FN:
13608 CASE_CFN_ROUND:
13609 CASE_CFN_ROUND_FN:
13610 CASE_CFN_ROUNDEVEN:
13611 CASE_CFN_ROUNDEVEN_FN:
13612 CASE_CFN_SCALB:
13613 CASE_CFN_SCALBLN:
13614 CASE_CFN_SCALBN:
13615 CASE_CFN_SIGNBIT:
13616 CASE_CFN_SIGNIFICAND:
13617 CASE_CFN_SINH:
13618 CASE_CFN_TANH:
13619 CASE_CFN_TRUNC:
13620 CASE_CFN_TRUNC_FN:
13621 /* True if the 1st argument is nonnegative. */
13622 return RECURSE (arg0);
13623
13624 CASE_CFN_FMAX:
13625 CASE_CFN_FMAX_FN:
13626 /* True if the 1st OR 2nd arguments are nonnegative. */
13627 return RECURSE (arg0) || RECURSE (arg1);
13628
13629 CASE_CFN_FMIN:
13630 CASE_CFN_FMIN_FN:
13631 /* True if the 1st AND 2nd arguments are nonnegative. */
13632 return RECURSE (arg0) && RECURSE (arg1);
13633
13634 CASE_CFN_COPYSIGN:
13635 CASE_CFN_COPYSIGN_FN:
13636 /* True if the 2nd argument is nonnegative. */
13637 return RECURSE (arg1);
13638
13639 CASE_CFN_POWI:
13640 /* True if the 1st argument is nonnegative or the second
13641 argument is an even integer. */
13642 if (TREE_CODE (arg1) == INTEGER_CST
13643 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13644 return true;
13645 return RECURSE (arg0);
13646
13647 CASE_CFN_POW:
13648 /* True if the 1st argument is nonnegative or the second
13649 argument is an even integer valued real. */
13650 if (TREE_CODE (arg1) == REAL_CST)
13651 {
13652 REAL_VALUE_TYPE c;
13653 HOST_WIDE_INT n;
13654
13655 c = TREE_REAL_CST (arg1);
13656 n = real_to_integer (&c);
13657 if ((n & 1) == 0)
13658 {
13659 REAL_VALUE_TYPE cint;
13660 real_from_integer (&cint, VOIDmode, n, SIGNED);
13661 if (real_identical (&c, &cint))
13662 return true;
13663 }
13664 }
13665 return RECURSE (arg0);
13666
13667 default:
13668 break;
13669 }
13670 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13671 }
13672
13673 /* Return true if T is known to be non-negative. If the return
13674 value is based on the assumption that signed overflow is undefined,
13675 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13676 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13677
13678 static bool
13679 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13680 {
13681 enum tree_code code = TREE_CODE (t);
13682 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13683 return true;
13684
13685 switch (code)
13686 {
13687 case TARGET_EXPR:
13688 {
13689 tree temp = TARGET_EXPR_SLOT (t);
13690 t = TARGET_EXPR_INITIAL (t);
13691
13692 /* If the initializer is non-void, then it's a normal expression
13693 that will be assigned to the slot. */
13694 if (!VOID_TYPE_P (t))
13695 return RECURSE (t);
13696
13697 /* Otherwise, the initializer sets the slot in some way. One common
13698 way is an assignment statement at the end of the initializer. */
13699 while (1)
13700 {
13701 if (TREE_CODE (t) == BIND_EXPR)
13702 t = expr_last (BIND_EXPR_BODY (t));
13703 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13704 || TREE_CODE (t) == TRY_CATCH_EXPR)
13705 t = expr_last (TREE_OPERAND (t, 0));
13706 else if (TREE_CODE (t) == STATEMENT_LIST)
13707 t = expr_last (t);
13708 else
13709 break;
13710 }
13711 if (TREE_CODE (t) == MODIFY_EXPR
13712 && TREE_OPERAND (t, 0) == temp)
13713 return RECURSE (TREE_OPERAND (t, 1));
13714
13715 return false;
13716 }
13717
13718 case CALL_EXPR:
13719 {
13720 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13721 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13722
13723 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13724 get_call_combined_fn (t),
13725 arg0,
13726 arg1,
13727 strict_overflow_p, depth);
13728 }
13729 case COMPOUND_EXPR:
13730 case MODIFY_EXPR:
13731 return RECURSE (TREE_OPERAND (t, 1));
13732
13733 case BIND_EXPR:
13734 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13735
13736 case SAVE_EXPR:
13737 return RECURSE (TREE_OPERAND (t, 0));
13738
13739 default:
13740 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13741 }
13742 }
13743
13744 #undef RECURSE
13745 #undef tree_expr_nonnegative_warnv_p
13746
13747 /* Return true if T is known to be non-negative. If the return
13748 value is based on the assumption that signed overflow is undefined,
13749 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13750 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13751
13752 bool
13753 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13754 {
13755 enum tree_code code;
13756 if (t == error_mark_node)
13757 return false;
13758
13759 code = TREE_CODE (t);
13760 switch (TREE_CODE_CLASS (code))
13761 {
13762 case tcc_binary:
13763 case tcc_comparison:
13764 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13765 TREE_TYPE (t),
13766 TREE_OPERAND (t, 0),
13767 TREE_OPERAND (t, 1),
13768 strict_overflow_p, depth);
13769
13770 case tcc_unary:
13771 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13772 TREE_TYPE (t),
13773 TREE_OPERAND (t, 0),
13774 strict_overflow_p, depth);
13775
13776 case tcc_constant:
13777 case tcc_declaration:
13778 case tcc_reference:
13779 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13780
13781 default:
13782 break;
13783 }
13784
13785 switch (code)
13786 {
13787 case TRUTH_AND_EXPR:
13788 case TRUTH_OR_EXPR:
13789 case TRUTH_XOR_EXPR:
13790 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13791 TREE_TYPE (t),
13792 TREE_OPERAND (t, 0),
13793 TREE_OPERAND (t, 1),
13794 strict_overflow_p, depth);
13795 case TRUTH_NOT_EXPR:
13796 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13797 TREE_TYPE (t),
13798 TREE_OPERAND (t, 0),
13799 strict_overflow_p, depth);
13800
13801 case COND_EXPR:
13802 case CONSTRUCTOR:
13803 case OBJ_TYPE_REF:
13804 case ASSERT_EXPR:
13805 case ADDR_EXPR:
13806 case WITH_SIZE_EXPR:
13807 case SSA_NAME:
13808 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13809
13810 default:
13811 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13812 }
13813 }
13814
13815 /* Return true if `t' is known to be non-negative. Handle warnings
13816 about undefined signed overflow. */
13817
13818 bool
13819 tree_expr_nonnegative_p (tree t)
13820 {
13821 bool ret, strict_overflow_p;
13822
13823 strict_overflow_p = false;
13824 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13825 if (strict_overflow_p)
13826 fold_overflow_warning (("assuming signed overflow does not occur when "
13827 "determining that expression is always "
13828 "non-negative"),
13829 WARN_STRICT_OVERFLOW_MISC);
13830 return ret;
13831 }
13832
13833
13834 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13835 For floating point we further ensure that T is not denormal.
13836 Similar logic is present in nonzero_address in rtlanal.h.
13837
13838 If the return value is based on the assumption that signed overflow
13839 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13840 change *STRICT_OVERFLOW_P. */
13841
13842 bool
13843 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13844 bool *strict_overflow_p)
13845 {
13846 switch (code)
13847 {
13848 case ABS_EXPR:
13849 return tree_expr_nonzero_warnv_p (op0,
13850 strict_overflow_p);
13851
13852 case NOP_EXPR:
13853 {
13854 tree inner_type = TREE_TYPE (op0);
13855 tree outer_type = type;
13856
13857 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13858 && tree_expr_nonzero_warnv_p (op0,
13859 strict_overflow_p));
13860 }
13861 break;
13862
13863 case NON_LVALUE_EXPR:
13864 return tree_expr_nonzero_warnv_p (op0,
13865 strict_overflow_p);
13866
13867 default:
13868 break;
13869 }
13870
13871 return false;
13872 }
13873
13874 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13875 For floating point we further ensure that T is not denormal.
13876 Similar logic is present in nonzero_address in rtlanal.h.
13877
13878 If the return value is based on the assumption that signed overflow
13879 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13880 change *STRICT_OVERFLOW_P. */
13881
13882 bool
13883 tree_binary_nonzero_warnv_p (enum tree_code code,
13884 tree type,
13885 tree op0,
13886 tree op1, bool *strict_overflow_p)
13887 {
13888 bool sub_strict_overflow_p;
13889 switch (code)
13890 {
13891 case POINTER_PLUS_EXPR:
13892 case PLUS_EXPR:
13893 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13894 {
13895 /* With the presence of negative values it is hard
13896 to say something. */
13897 sub_strict_overflow_p = false;
13898 if (!tree_expr_nonnegative_warnv_p (op0,
13899 &sub_strict_overflow_p)
13900 || !tree_expr_nonnegative_warnv_p (op1,
13901 &sub_strict_overflow_p))
13902 return false;
13903 /* One of operands must be positive and the other non-negative. */
13904 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13905 overflows, on a twos-complement machine the sum of two
13906 nonnegative numbers can never be zero. */
13907 return (tree_expr_nonzero_warnv_p (op0,
13908 strict_overflow_p)
13909 || tree_expr_nonzero_warnv_p (op1,
13910 strict_overflow_p));
13911 }
13912 break;
13913
13914 case MULT_EXPR:
13915 if (TYPE_OVERFLOW_UNDEFINED (type))
13916 {
13917 if (tree_expr_nonzero_warnv_p (op0,
13918 strict_overflow_p)
13919 && tree_expr_nonzero_warnv_p (op1,
13920 strict_overflow_p))
13921 {
13922 *strict_overflow_p = true;
13923 return true;
13924 }
13925 }
13926 break;
13927
13928 case MIN_EXPR:
13929 sub_strict_overflow_p = false;
13930 if (tree_expr_nonzero_warnv_p (op0,
13931 &sub_strict_overflow_p)
13932 && tree_expr_nonzero_warnv_p (op1,
13933 &sub_strict_overflow_p))
13934 {
13935 if (sub_strict_overflow_p)
13936 *strict_overflow_p = true;
13937 }
13938 break;
13939
13940 case MAX_EXPR:
13941 sub_strict_overflow_p = false;
13942 if (tree_expr_nonzero_warnv_p (op0,
13943 &sub_strict_overflow_p))
13944 {
13945 if (sub_strict_overflow_p)
13946 *strict_overflow_p = true;
13947
13948 /* When both operands are nonzero, then MAX must be too. */
13949 if (tree_expr_nonzero_warnv_p (op1,
13950 strict_overflow_p))
13951 return true;
13952
13953 /* MAX where operand 0 is positive is positive. */
13954 return tree_expr_nonnegative_warnv_p (op0,
13955 strict_overflow_p);
13956 }
13957 /* MAX where operand 1 is positive is positive. */
13958 else if (tree_expr_nonzero_warnv_p (op1,
13959 &sub_strict_overflow_p)
13960 && tree_expr_nonnegative_warnv_p (op1,
13961 &sub_strict_overflow_p))
13962 {
13963 if (sub_strict_overflow_p)
13964 *strict_overflow_p = true;
13965 return true;
13966 }
13967 break;
13968
13969 case BIT_IOR_EXPR:
13970 return (tree_expr_nonzero_warnv_p (op1,
13971 strict_overflow_p)
13972 || tree_expr_nonzero_warnv_p (op0,
13973 strict_overflow_p));
13974
13975 default:
13976 break;
13977 }
13978
13979 return false;
13980 }
13981
13982 /* Return true when T is an address and is known to be nonzero.
13983 For floating point we further ensure that T is not denormal.
13984 Similar logic is present in nonzero_address in rtlanal.h.
13985
13986 If the return value is based on the assumption that signed overflow
13987 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13988 change *STRICT_OVERFLOW_P. */
13989
13990 bool
13991 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13992 {
13993 bool sub_strict_overflow_p;
13994 switch (TREE_CODE (t))
13995 {
13996 case INTEGER_CST:
13997 return !integer_zerop (t);
13998
13999 case ADDR_EXPR:
14000 {
14001 tree base = TREE_OPERAND (t, 0);
14002
14003 if (!DECL_P (base))
14004 base = get_base_address (base);
14005
14006 if (base && TREE_CODE (base) == TARGET_EXPR)
14007 base = TARGET_EXPR_SLOT (base);
14008
14009 if (!base)
14010 return false;
14011
14012 /* For objects in symbol table check if we know they are non-zero.
14013 Don't do anything for variables and functions before symtab is built;
14014 it is quite possible that they will be declared weak later. */
14015 int nonzero_addr = maybe_nonzero_address (base);
14016 if (nonzero_addr >= 0)
14017 return nonzero_addr;
14018
14019 /* Constants are never weak. */
14020 if (CONSTANT_CLASS_P (base))
14021 return true;
14022
14023 return false;
14024 }
14025
14026 case COND_EXPR:
14027 sub_strict_overflow_p = false;
14028 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14029 &sub_strict_overflow_p)
14030 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14031 &sub_strict_overflow_p))
14032 {
14033 if (sub_strict_overflow_p)
14034 *strict_overflow_p = true;
14035 return true;
14036 }
14037 break;
14038
14039 case SSA_NAME:
14040 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
14041 break;
14042 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
14043
14044 default:
14045 break;
14046 }
14047 return false;
14048 }
14049
14050 #define integer_valued_real_p(X) \
14051 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14052
14053 #define RECURSE(X) \
14054 ((integer_valued_real_p) (X, depth + 1))
14055
14056 /* Return true if the floating point result of (CODE OP0) has an
14057 integer value. We also allow +Inf, -Inf and NaN to be considered
14058 integer values. Return false for signaling NaN.
14059
14060 DEPTH is the current nesting depth of the query. */
14061
14062 bool
14063 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
14064 {
14065 switch (code)
14066 {
14067 case FLOAT_EXPR:
14068 return true;
14069
14070 case ABS_EXPR:
14071 return RECURSE (op0);
14072
14073 CASE_CONVERT:
14074 {
14075 tree type = TREE_TYPE (op0);
14076 if (TREE_CODE (type) == INTEGER_TYPE)
14077 return true;
14078 if (TREE_CODE (type) == REAL_TYPE)
14079 return RECURSE (op0);
14080 break;
14081 }
14082
14083 default:
14084 break;
14085 }
14086 return false;
14087 }
14088
14089 /* Return true if the floating point result of (CODE OP0 OP1) has an
14090 integer value. We also allow +Inf, -Inf and NaN to be considered
14091 integer values. Return false for signaling NaN.
14092
14093 DEPTH is the current nesting depth of the query. */
14094
14095 bool
14096 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
14097 {
14098 switch (code)
14099 {
14100 case PLUS_EXPR:
14101 case MINUS_EXPR:
14102 case MULT_EXPR:
14103 case MIN_EXPR:
14104 case MAX_EXPR:
14105 return RECURSE (op0) && RECURSE (op1);
14106
14107 default:
14108 break;
14109 }
14110 return false;
14111 }
14112
14113 /* Return true if the floating point result of calling FNDECL with arguments
14114 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
14115 considered integer values. Return false for signaling NaN. If FNDECL
14116 takes fewer than 2 arguments, the remaining ARGn are null.
14117
14118 DEPTH is the current nesting depth of the query. */
14119
14120 bool
14121 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
14122 {
14123 switch (fn)
14124 {
14125 CASE_CFN_CEIL:
14126 CASE_CFN_CEIL_FN:
14127 CASE_CFN_FLOOR:
14128 CASE_CFN_FLOOR_FN:
14129 CASE_CFN_NEARBYINT:
14130 CASE_CFN_NEARBYINT_FN:
14131 CASE_CFN_RINT:
14132 CASE_CFN_RINT_FN:
14133 CASE_CFN_ROUND:
14134 CASE_CFN_ROUND_FN:
14135 CASE_CFN_ROUNDEVEN:
14136 CASE_CFN_ROUNDEVEN_FN:
14137 CASE_CFN_TRUNC:
14138 CASE_CFN_TRUNC_FN:
14139 return true;
14140
14141 CASE_CFN_FMIN:
14142 CASE_CFN_FMIN_FN:
14143 CASE_CFN_FMAX:
14144 CASE_CFN_FMAX_FN:
14145 return RECURSE (arg0) && RECURSE (arg1);
14146
14147 default:
14148 break;
14149 }
14150 return false;
14151 }
14152
14153 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
14154 has an integer value. We also allow +Inf, -Inf and NaN to be
14155 considered integer values. Return false for signaling NaN.
14156
14157 DEPTH is the current nesting depth of the query. */
14158
14159 bool
14160 integer_valued_real_single_p (tree t, int depth)
14161 {
14162 switch (TREE_CODE (t))
14163 {
14164 case REAL_CST:
14165 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
14166
14167 case COND_EXPR:
14168 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14169
14170 case SSA_NAME:
14171 /* Limit the depth of recursion to avoid quadratic behavior.
14172 This is expected to catch almost all occurrences in practice.
14173 If this code misses important cases that unbounded recursion
14174 would not, passes that need this information could be revised
14175 to provide it through dataflow propagation. */
14176 return (!name_registered_for_update_p (t)
14177 && depth < param_max_ssa_name_query_depth
14178 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
14179 depth));
14180
14181 default:
14182 break;
14183 }
14184 return false;
14185 }
14186
14187 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
14188 has an integer value. We also allow +Inf, -Inf and NaN to be
14189 considered integer values. Return false for signaling NaN.
14190
14191 DEPTH is the current nesting depth of the query. */
14192
14193 static bool
14194 integer_valued_real_invalid_p (tree t, int depth)
14195 {
14196 switch (TREE_CODE (t))
14197 {
14198 case COMPOUND_EXPR:
14199 case MODIFY_EXPR:
14200 case BIND_EXPR:
14201 return RECURSE (TREE_OPERAND (t, 1));
14202
14203 case SAVE_EXPR:
14204 return RECURSE (TREE_OPERAND (t, 0));
14205
14206 default:
14207 break;
14208 }
14209 return false;
14210 }
14211
14212 #undef RECURSE
14213 #undef integer_valued_real_p
14214
14215 /* Return true if the floating point expression T has an integer value.
14216 We also allow +Inf, -Inf and NaN to be considered integer values.
14217 Return false for signaling NaN.
14218
14219 DEPTH is the current nesting depth of the query. */
14220
14221 bool
14222 integer_valued_real_p (tree t, int depth)
14223 {
14224 if (t == error_mark_node)
14225 return false;
14226
14227 STRIP_ANY_LOCATION_WRAPPER (t);
14228
14229 tree_code code = TREE_CODE (t);
14230 switch (TREE_CODE_CLASS (code))
14231 {
14232 case tcc_binary:
14233 case tcc_comparison:
14234 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
14235 TREE_OPERAND (t, 1), depth);
14236
14237 case tcc_unary:
14238 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
14239
14240 case tcc_constant:
14241 case tcc_declaration:
14242 case tcc_reference:
14243 return integer_valued_real_single_p (t, depth);
14244
14245 default:
14246 break;
14247 }
14248
14249 switch (code)
14250 {
14251 case COND_EXPR:
14252 case SSA_NAME:
14253 return integer_valued_real_single_p (t, depth);
14254
14255 case CALL_EXPR:
14256 {
14257 tree arg0 = (call_expr_nargs (t) > 0
14258 ? CALL_EXPR_ARG (t, 0)
14259 : NULL_TREE);
14260 tree arg1 = (call_expr_nargs (t) > 1
14261 ? CALL_EXPR_ARG (t, 1)
14262 : NULL_TREE);
14263 return integer_valued_real_call_p (get_call_combined_fn (t),
14264 arg0, arg1, depth);
14265 }
14266
14267 default:
14268 return integer_valued_real_invalid_p (t, depth);
14269 }
14270 }
14271
14272 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14273 attempt to fold the expression to a constant without modifying TYPE,
14274 OP0 or OP1.
14275
14276 If the expression could be simplified to a constant, then return
14277 the constant. If the expression would not be simplified to a
14278 constant, then return NULL_TREE. */
14279
14280 tree
14281 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14282 {
14283 tree tem = fold_binary (code, type, op0, op1);
14284 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14285 }
14286
14287 /* Given the components of a unary expression CODE, TYPE and OP0,
14288 attempt to fold the expression to a constant without modifying
14289 TYPE or OP0.
14290
14291 If the expression could be simplified to a constant, then return
14292 the constant. If the expression would not be simplified to a
14293 constant, then return NULL_TREE. */
14294
14295 tree
14296 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14297 {
14298 tree tem = fold_unary (code, type, op0);
14299 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14300 }
14301
14302 /* If EXP represents referencing an element in a constant string
14303 (either via pointer arithmetic or array indexing), return the
14304 tree representing the value accessed, otherwise return NULL. */
14305
14306 tree
14307 fold_read_from_constant_string (tree exp)
14308 {
14309 if ((TREE_CODE (exp) == INDIRECT_REF
14310 || TREE_CODE (exp) == ARRAY_REF)
14311 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14312 {
14313 tree exp1 = TREE_OPERAND (exp, 0);
14314 tree index;
14315 tree string;
14316 location_t loc = EXPR_LOCATION (exp);
14317
14318 if (TREE_CODE (exp) == INDIRECT_REF)
14319 string = string_constant (exp1, &index, NULL, NULL);
14320 else
14321 {
14322 tree low_bound = array_ref_low_bound (exp);
14323 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
14324
14325 /* Optimize the special-case of a zero lower bound.
14326
14327 We convert the low_bound to sizetype to avoid some problems
14328 with constant folding. (E.g. suppose the lower bound is 1,
14329 and its mode is QI. Without the conversion,l (ARRAY
14330 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14331 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14332 if (! integer_zerop (low_bound))
14333 index = size_diffop_loc (loc, index,
14334 fold_convert_loc (loc, sizetype, low_bound));
14335
14336 string = exp1;
14337 }
14338
14339 scalar_int_mode char_mode;
14340 if (string
14341 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14342 && TREE_CODE (string) == STRING_CST
14343 && TREE_CODE (index) == INTEGER_CST
14344 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14345 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
14346 &char_mode)
14347 && GET_MODE_SIZE (char_mode) == 1)
14348 return build_int_cst_type (TREE_TYPE (exp),
14349 (TREE_STRING_POINTER (string)
14350 [TREE_INT_CST_LOW (index)]));
14351 }
14352 return NULL;
14353 }
14354
14355 /* Folds a read from vector element at IDX of vector ARG. */
14356
14357 tree
14358 fold_read_from_vector (tree arg, poly_uint64 idx)
14359 {
14360 unsigned HOST_WIDE_INT i;
14361 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
14362 && known_ge (idx, 0u)
14363 && idx.is_constant (&i))
14364 {
14365 if (TREE_CODE (arg) == VECTOR_CST)
14366 return VECTOR_CST_ELT (arg, i);
14367 else if (TREE_CODE (arg) == CONSTRUCTOR)
14368 {
14369 if (i >= CONSTRUCTOR_NELTS (arg))
14370 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
14371 return CONSTRUCTOR_ELT (arg, i)->value;
14372 }
14373 }
14374 return NULL_TREE;
14375 }
14376
14377 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14378 an integer constant, real, or fixed-point constant.
14379
14380 TYPE is the type of the result. */
14381
14382 static tree
14383 fold_negate_const (tree arg0, tree type)
14384 {
14385 tree t = NULL_TREE;
14386
14387 switch (TREE_CODE (arg0))
14388 {
14389 case REAL_CST:
14390 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14391 break;
14392
14393 case FIXED_CST:
14394 {
14395 FIXED_VALUE_TYPE f;
14396 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14397 &(TREE_FIXED_CST (arg0)), NULL,
14398 TYPE_SATURATING (type));
14399 t = build_fixed (type, f);
14400 /* Propagate overflow flags. */
14401 if (overflow_p | TREE_OVERFLOW (arg0))
14402 TREE_OVERFLOW (t) = 1;
14403 break;
14404 }
14405
14406 default:
14407 if (poly_int_tree_p (arg0))
14408 {
14409 wi::overflow_type overflow;
14410 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
14411 t = force_fit_type (type, res, 1,
14412 (overflow && ! TYPE_UNSIGNED (type))
14413 || TREE_OVERFLOW (arg0));
14414 break;
14415 }
14416
14417 gcc_unreachable ();
14418 }
14419
14420 return t;
14421 }
14422
14423 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14424 an integer constant or real constant.
14425
14426 TYPE is the type of the result. */
14427
14428 tree
14429 fold_abs_const (tree arg0, tree type)
14430 {
14431 tree t = NULL_TREE;
14432
14433 switch (TREE_CODE (arg0))
14434 {
14435 case INTEGER_CST:
14436 {
14437 /* If the value is unsigned or non-negative, then the absolute value
14438 is the same as the ordinary value. */
14439 wide_int val = wi::to_wide (arg0);
14440 wi::overflow_type overflow = wi::OVF_NONE;
14441 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
14442 ;
14443
14444 /* If the value is negative, then the absolute value is
14445 its negation. */
14446 else
14447 val = wi::neg (val, &overflow);
14448
14449 /* Force to the destination type, set TREE_OVERFLOW for signed
14450 TYPE only. */
14451 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
14452 }
14453 break;
14454
14455 case REAL_CST:
14456 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14457 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14458 else
14459 t = arg0;
14460 break;
14461
14462 default:
14463 gcc_unreachable ();
14464 }
14465
14466 return t;
14467 }
14468
14469 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14470 constant. TYPE is the type of the result. */
14471
14472 static tree
14473 fold_not_const (const_tree arg0, tree type)
14474 {
14475 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14476
14477 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
14478 }
14479
14480 /* Given CODE, a relational operator, the target type, TYPE and two
14481 constant operands OP0 and OP1, return the result of the
14482 relational operation. If the result is not a compile time
14483 constant, then return NULL_TREE. */
14484
14485 static tree
14486 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14487 {
14488 int result, invert;
14489
14490 /* From here on, the only cases we handle are when the result is
14491 known to be a constant. */
14492
14493 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14494 {
14495 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14496 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14497
14498 /* Handle the cases where either operand is a NaN. */
14499 if (real_isnan (c0) || real_isnan (c1))
14500 {
14501 switch (code)
14502 {
14503 case EQ_EXPR:
14504 case ORDERED_EXPR:
14505 result = 0;
14506 break;
14507
14508 case NE_EXPR:
14509 case UNORDERED_EXPR:
14510 case UNLT_EXPR:
14511 case UNLE_EXPR:
14512 case UNGT_EXPR:
14513 case UNGE_EXPR:
14514 case UNEQ_EXPR:
14515 result = 1;
14516 break;
14517
14518 case LT_EXPR:
14519 case LE_EXPR:
14520 case GT_EXPR:
14521 case GE_EXPR:
14522 case LTGT_EXPR:
14523 if (flag_trapping_math)
14524 return NULL_TREE;
14525 result = 0;
14526 break;
14527
14528 default:
14529 gcc_unreachable ();
14530 }
14531
14532 return constant_boolean_node (result, type);
14533 }
14534
14535 return constant_boolean_node (real_compare (code, c0, c1), type);
14536 }
14537
14538 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14539 {
14540 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14541 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14542 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14543 }
14544
14545 /* Handle equality/inequality of complex constants. */
14546 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14547 {
14548 tree rcond = fold_relational_const (code, type,
14549 TREE_REALPART (op0),
14550 TREE_REALPART (op1));
14551 tree icond = fold_relational_const (code, type,
14552 TREE_IMAGPART (op0),
14553 TREE_IMAGPART (op1));
14554 if (code == EQ_EXPR)
14555 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14556 else if (code == NE_EXPR)
14557 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14558 else
14559 return NULL_TREE;
14560 }
14561
14562 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14563 {
14564 if (!VECTOR_TYPE_P (type))
14565 {
14566 /* Have vector comparison with scalar boolean result. */
14567 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
14568 && known_eq (VECTOR_CST_NELTS (op0),
14569 VECTOR_CST_NELTS (op1)));
14570 unsigned HOST_WIDE_INT nunits;
14571 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
14572 return NULL_TREE;
14573 for (unsigned i = 0; i < nunits; i++)
14574 {
14575 tree elem0 = VECTOR_CST_ELT (op0, i);
14576 tree elem1 = VECTOR_CST_ELT (op1, i);
14577 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
14578 if (tmp == NULL_TREE)
14579 return NULL_TREE;
14580 if (integer_zerop (tmp))
14581 return constant_boolean_node (code == NE_EXPR, type);
14582 }
14583 return constant_boolean_node (code == EQ_EXPR, type);
14584 }
14585 tree_vector_builder elts;
14586 if (!elts.new_binary_operation (type, op0, op1, false))
14587 return NULL_TREE;
14588 unsigned int count = elts.encoded_nelts ();
14589 for (unsigned i = 0; i < count; i++)
14590 {
14591 tree elem_type = TREE_TYPE (type);
14592 tree elem0 = VECTOR_CST_ELT (op0, i);
14593 tree elem1 = VECTOR_CST_ELT (op1, i);
14594
14595 tree tem = fold_relational_const (code, elem_type,
14596 elem0, elem1);
14597
14598 if (tem == NULL_TREE)
14599 return NULL_TREE;
14600
14601 elts.quick_push (build_int_cst (elem_type,
14602 integer_zerop (tem) ? 0 : -1));
14603 }
14604
14605 return elts.build ();
14606 }
14607
14608 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14609
14610 To compute GT, swap the arguments and do LT.
14611 To compute GE, do LT and invert the result.
14612 To compute LE, swap the arguments, do LT and invert the result.
14613 To compute NE, do EQ and invert the result.
14614
14615 Therefore, the code below must handle only EQ and LT. */
14616
14617 if (code == LE_EXPR || code == GT_EXPR)
14618 {
14619 std::swap (op0, op1);
14620 code = swap_tree_comparison (code);
14621 }
14622
14623 /* Note that it is safe to invert for real values here because we
14624 have already handled the one case that it matters. */
14625
14626 invert = 0;
14627 if (code == NE_EXPR || code == GE_EXPR)
14628 {
14629 invert = 1;
14630 code = invert_tree_comparison (code, false);
14631 }
14632
14633 /* Compute a result for LT or EQ if args permit;
14634 Otherwise return T. */
14635 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14636 {
14637 if (code == EQ_EXPR)
14638 result = tree_int_cst_equal (op0, op1);
14639 else
14640 result = tree_int_cst_lt (op0, op1);
14641 }
14642 else
14643 return NULL_TREE;
14644
14645 if (invert)
14646 result ^= 1;
14647 return constant_boolean_node (result, type);
14648 }
14649
14650 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14651 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14652 itself. */
14653
14654 tree
14655 fold_build_cleanup_point_expr (tree type, tree expr)
14656 {
14657 /* If the expression does not have side effects then we don't have to wrap
14658 it with a cleanup point expression. */
14659 if (!TREE_SIDE_EFFECTS (expr))
14660 return expr;
14661
14662 /* If the expression is a return, check to see if the expression inside the
14663 return has no side effects or the right hand side of the modify expression
14664 inside the return. If either don't have side effects set we don't need to
14665 wrap the expression in a cleanup point expression. Note we don't check the
14666 left hand side of the modify because it should always be a return decl. */
14667 if (TREE_CODE (expr) == RETURN_EXPR)
14668 {
14669 tree op = TREE_OPERAND (expr, 0);
14670 if (!op || !TREE_SIDE_EFFECTS (op))
14671 return expr;
14672 op = TREE_OPERAND (op, 1);
14673 if (!TREE_SIDE_EFFECTS (op))
14674 return expr;
14675 }
14676
14677 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
14678 }
14679
14680 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14681 of an indirection through OP0, or NULL_TREE if no simplification is
14682 possible. */
14683
14684 tree
14685 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14686 {
14687 tree sub = op0;
14688 tree subtype;
14689 poly_uint64 const_op01;
14690
14691 STRIP_NOPS (sub);
14692 subtype = TREE_TYPE (sub);
14693 if (!POINTER_TYPE_P (subtype)
14694 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
14695 return NULL_TREE;
14696
14697 if (TREE_CODE (sub) == ADDR_EXPR)
14698 {
14699 tree op = TREE_OPERAND (sub, 0);
14700 tree optype = TREE_TYPE (op);
14701
14702 /* *&CONST_DECL -> to the value of the const decl. */
14703 if (TREE_CODE (op) == CONST_DECL)
14704 return DECL_INITIAL (op);
14705 /* *&p => p; make sure to handle *&"str"[cst] here. */
14706 if (type == optype)
14707 {
14708 tree fop = fold_read_from_constant_string (op);
14709 if (fop)
14710 return fop;
14711 else
14712 return op;
14713 }
14714 /* *(foo *)&fooarray => fooarray[0] */
14715 else if (TREE_CODE (optype) == ARRAY_TYPE
14716 && type == TREE_TYPE (optype)
14717 && (!in_gimple_form
14718 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14719 {
14720 tree type_domain = TYPE_DOMAIN (optype);
14721 tree min_val = size_zero_node;
14722 if (type_domain && TYPE_MIN_VALUE (type_domain))
14723 min_val = TYPE_MIN_VALUE (type_domain);
14724 if (in_gimple_form
14725 && TREE_CODE (min_val) != INTEGER_CST)
14726 return NULL_TREE;
14727 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14728 NULL_TREE, NULL_TREE);
14729 }
14730 /* *(foo *)&complexfoo => __real__ complexfoo */
14731 else if (TREE_CODE (optype) == COMPLEX_TYPE
14732 && type == TREE_TYPE (optype))
14733 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14734 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14735 else if (VECTOR_TYPE_P (optype)
14736 && type == TREE_TYPE (optype))
14737 {
14738 tree part_width = TYPE_SIZE (type);
14739 tree index = bitsize_int (0);
14740 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
14741 index);
14742 }
14743 }
14744
14745 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14746 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
14747 {
14748 tree op00 = TREE_OPERAND (sub, 0);
14749 tree op01 = TREE_OPERAND (sub, 1);
14750
14751 STRIP_NOPS (op00);
14752 if (TREE_CODE (op00) == ADDR_EXPR)
14753 {
14754 tree op00type;
14755 op00 = TREE_OPERAND (op00, 0);
14756 op00type = TREE_TYPE (op00);
14757
14758 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14759 if (VECTOR_TYPE_P (op00type)
14760 && type == TREE_TYPE (op00type)
14761 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
14762 but we want to treat offsets with MSB set as negative.
14763 For the code below negative offsets are invalid and
14764 TYPE_SIZE of the element is something unsigned, so
14765 check whether op01 fits into poly_int64, which implies
14766 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
14767 then just use poly_uint64 because we want to treat the
14768 value as unsigned. */
14769 && tree_fits_poly_int64_p (op01))
14770 {
14771 tree part_width = TYPE_SIZE (type);
14772 poly_uint64 max_offset
14773 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
14774 * TYPE_VECTOR_SUBPARTS (op00type));
14775 if (known_lt (const_op01, max_offset))
14776 {
14777 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
14778 return fold_build3_loc (loc,
14779 BIT_FIELD_REF, type, op00,
14780 part_width, index);
14781 }
14782 }
14783 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14784 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14785 && type == TREE_TYPE (op00type))
14786 {
14787 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
14788 const_op01))
14789 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14790 }
14791 /* ((foo *)&fooarray)[1] => fooarray[1] */
14792 else if (TREE_CODE (op00type) == ARRAY_TYPE
14793 && type == TREE_TYPE (op00type))
14794 {
14795 tree type_domain = TYPE_DOMAIN (op00type);
14796 tree min_val = size_zero_node;
14797 if (type_domain && TYPE_MIN_VALUE (type_domain))
14798 min_val = TYPE_MIN_VALUE (type_domain);
14799 poly_uint64 type_size, index;
14800 if (poly_int_tree_p (min_val)
14801 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
14802 && multiple_p (const_op01, type_size, &index))
14803 {
14804 poly_offset_int off = index + wi::to_poly_offset (min_val);
14805 op01 = wide_int_to_tree (sizetype, off);
14806 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14807 NULL_TREE, NULL_TREE);
14808 }
14809 }
14810 }
14811 }
14812
14813 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14814 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14815 && type == TREE_TYPE (TREE_TYPE (subtype))
14816 && (!in_gimple_form
14817 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14818 {
14819 tree type_domain;
14820 tree min_val = size_zero_node;
14821 sub = build_fold_indirect_ref_loc (loc, sub);
14822 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14823 if (type_domain && TYPE_MIN_VALUE (type_domain))
14824 min_val = TYPE_MIN_VALUE (type_domain);
14825 if (in_gimple_form
14826 && TREE_CODE (min_val) != INTEGER_CST)
14827 return NULL_TREE;
14828 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14829 NULL_TREE);
14830 }
14831
14832 return NULL_TREE;
14833 }
14834
14835 /* Builds an expression for an indirection through T, simplifying some
14836 cases. */
14837
14838 tree
14839 build_fold_indirect_ref_loc (location_t loc, tree t)
14840 {
14841 tree type = TREE_TYPE (TREE_TYPE (t));
14842 tree sub = fold_indirect_ref_1 (loc, type, t);
14843
14844 if (sub)
14845 return sub;
14846
14847 return build1_loc (loc, INDIRECT_REF, type, t);
14848 }
14849
14850 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14851
14852 tree
14853 fold_indirect_ref_loc (location_t loc, tree t)
14854 {
14855 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14856
14857 if (sub)
14858 return sub;
14859 else
14860 return t;
14861 }
14862
14863 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14864 whose result is ignored. The type of the returned tree need not be
14865 the same as the original expression. */
14866
14867 tree
14868 fold_ignored_result (tree t)
14869 {
14870 if (!TREE_SIDE_EFFECTS (t))
14871 return integer_zero_node;
14872
14873 for (;;)
14874 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14875 {
14876 case tcc_unary:
14877 t = TREE_OPERAND (t, 0);
14878 break;
14879
14880 case tcc_binary:
14881 case tcc_comparison:
14882 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14883 t = TREE_OPERAND (t, 0);
14884 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14885 t = TREE_OPERAND (t, 1);
14886 else
14887 return t;
14888 break;
14889
14890 case tcc_expression:
14891 switch (TREE_CODE (t))
14892 {
14893 case COMPOUND_EXPR:
14894 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14895 return t;
14896 t = TREE_OPERAND (t, 0);
14897 break;
14898
14899 case COND_EXPR:
14900 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14901 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14902 return t;
14903 t = TREE_OPERAND (t, 0);
14904 break;
14905
14906 default:
14907 return t;
14908 }
14909 break;
14910
14911 default:
14912 return t;
14913 }
14914 }
14915
14916 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14917
14918 tree
14919 round_up_loc (location_t loc, tree value, unsigned int divisor)
14920 {
14921 tree div = NULL_TREE;
14922
14923 if (divisor == 1)
14924 return value;
14925
14926 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14927 have to do anything. Only do this when we are not given a const,
14928 because in that case, this check is more expensive than just
14929 doing it. */
14930 if (TREE_CODE (value) != INTEGER_CST)
14931 {
14932 div = build_int_cst (TREE_TYPE (value), divisor);
14933
14934 if (multiple_of_p (TREE_TYPE (value), value, div))
14935 return value;
14936 }
14937
14938 /* If divisor is a power of two, simplify this to bit manipulation. */
14939 if (pow2_or_zerop (divisor))
14940 {
14941 if (TREE_CODE (value) == INTEGER_CST)
14942 {
14943 wide_int val = wi::to_wide (value);
14944 bool overflow_p;
14945
14946 if ((val & (divisor - 1)) == 0)
14947 return value;
14948
14949 overflow_p = TREE_OVERFLOW (value);
14950 val += divisor - 1;
14951 val &= (int) -divisor;
14952 if (val == 0)
14953 overflow_p = true;
14954
14955 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14956 }
14957 else
14958 {
14959 tree t;
14960
14961 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14962 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14963 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14964 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14965 }
14966 }
14967 else
14968 {
14969 if (!div)
14970 div = build_int_cst (TREE_TYPE (value), divisor);
14971 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14972 value = size_binop_loc (loc, MULT_EXPR, value, div);
14973 }
14974
14975 return value;
14976 }
14977
14978 /* Likewise, but round down. */
14979
14980 tree
14981 round_down_loc (location_t loc, tree value, int divisor)
14982 {
14983 tree div = NULL_TREE;
14984
14985 gcc_assert (divisor > 0);
14986 if (divisor == 1)
14987 return value;
14988
14989 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14990 have to do anything. Only do this when we are not given a const,
14991 because in that case, this check is more expensive than just
14992 doing it. */
14993 if (TREE_CODE (value) != INTEGER_CST)
14994 {
14995 div = build_int_cst (TREE_TYPE (value), divisor);
14996
14997 if (multiple_of_p (TREE_TYPE (value), value, div))
14998 return value;
14999 }
15000
15001 /* If divisor is a power of two, simplify this to bit manipulation. */
15002 if (pow2_or_zerop (divisor))
15003 {
15004 tree t;
15005
15006 t = build_int_cst (TREE_TYPE (value), -divisor);
15007 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15008 }
15009 else
15010 {
15011 if (!div)
15012 div = build_int_cst (TREE_TYPE (value), divisor);
15013 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15014 value = size_binop_loc (loc, MULT_EXPR, value, div);
15015 }
15016
15017 return value;
15018 }
15019
15020 /* Returns the pointer to the base of the object addressed by EXP and
15021 extracts the information about the offset of the access, storing it
15022 to PBITPOS and POFFSET. */
15023
15024 static tree
15025 split_address_to_core_and_offset (tree exp,
15026 poly_int64_pod *pbitpos, tree *poffset)
15027 {
15028 tree core;
15029 machine_mode mode;
15030 int unsignedp, reversep, volatilep;
15031 poly_int64 bitsize;
15032 location_t loc = EXPR_LOCATION (exp);
15033
15034 if (TREE_CODE (exp) == ADDR_EXPR)
15035 {
15036 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15037 poffset, &mode, &unsignedp, &reversep,
15038 &volatilep);
15039 core = build_fold_addr_expr_loc (loc, core);
15040 }
15041 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
15042 {
15043 core = TREE_OPERAND (exp, 0);
15044 STRIP_NOPS (core);
15045 *pbitpos = 0;
15046 *poffset = TREE_OPERAND (exp, 1);
15047 if (poly_int_tree_p (*poffset))
15048 {
15049 poly_offset_int tem
15050 = wi::sext (wi::to_poly_offset (*poffset),
15051 TYPE_PRECISION (TREE_TYPE (*poffset)));
15052 tem <<= LOG2_BITS_PER_UNIT;
15053 if (tem.to_shwi (pbitpos))
15054 *poffset = NULL_TREE;
15055 }
15056 }
15057 else
15058 {
15059 core = exp;
15060 *pbitpos = 0;
15061 *poffset = NULL_TREE;
15062 }
15063
15064 return core;
15065 }
15066
15067 /* Returns true if addresses of E1 and E2 differ by a constant, false
15068 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15069
15070 bool
15071 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
15072 {
15073 tree core1, core2;
15074 poly_int64 bitpos1, bitpos2;
15075 tree toffset1, toffset2, tdiff, type;
15076
15077 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15078 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15079
15080 poly_int64 bytepos1, bytepos2;
15081 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
15082 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
15083 || !operand_equal_p (core1, core2, 0))
15084 return false;
15085
15086 if (toffset1 && toffset2)
15087 {
15088 type = TREE_TYPE (toffset1);
15089 if (type != TREE_TYPE (toffset2))
15090 toffset2 = fold_convert (type, toffset2);
15091
15092 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15093 if (!cst_and_fits_in_hwi (tdiff))
15094 return false;
15095
15096 *diff = int_cst_value (tdiff);
15097 }
15098 else if (toffset1 || toffset2)
15099 {
15100 /* If only one of the offsets is non-constant, the difference cannot
15101 be a constant. */
15102 return false;
15103 }
15104 else
15105 *diff = 0;
15106
15107 *diff += bytepos1 - bytepos2;
15108 return true;
15109 }
15110
15111 /* Return OFF converted to a pointer offset type suitable as offset for
15112 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
15113 tree
15114 convert_to_ptrofftype_loc (location_t loc, tree off)
15115 {
15116 return fold_convert_loc (loc, sizetype, off);
15117 }
15118
15119 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15120 tree
15121 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
15122 {
15123 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15124 ptr, convert_to_ptrofftype_loc (loc, off));
15125 }
15126
15127 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15128 tree
15129 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
15130 {
15131 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15132 ptr, size_int (off));
15133 }
15134
15135 /* Return a pointer P to a NUL-terminated string representing the sequence
15136 of constant characters referred to by SRC (or a subsequence of such
15137 characters within it if SRC is a reference to a string plus some
15138 constant offset). If STRLEN is non-null, store the number of bytes
15139 in the string constant including the terminating NUL char. *STRLEN is
15140 typically strlen(P) + 1 in the absence of embedded NUL characters. */
15141
15142 const char *
15143 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen /* = NULL */)
15144 {
15145 tree offset_node;
15146 tree mem_size;
15147
15148 if (strlen)
15149 *strlen = 0;
15150
15151 src = string_constant (src, &offset_node, &mem_size, NULL);
15152 if (src == 0)
15153 return NULL;
15154
15155 unsigned HOST_WIDE_INT offset = 0;
15156 if (offset_node != NULL_TREE)
15157 {
15158 if (!tree_fits_uhwi_p (offset_node))
15159 return NULL;
15160 else
15161 offset = tree_to_uhwi (offset_node);
15162 }
15163
15164 if (!tree_fits_uhwi_p (mem_size))
15165 return NULL;
15166
15167 /* STRING_LENGTH is the size of the string literal, including any
15168 embedded NULs. STRING_SIZE is the size of the array the string
15169 literal is stored in. */
15170 unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
15171 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size);
15172
15173 /* Ideally this would turn into a gcc_checking_assert over time. */
15174 if (string_length > string_size)
15175 string_length = string_size;
15176
15177 const char *string = TREE_STRING_POINTER (src);
15178
15179 /* Ideally this would turn into a gcc_checking_assert over time. */
15180 if (string_length > string_size)
15181 string_length = string_size;
15182
15183 if (string_length == 0
15184 || offset >= string_size)
15185 return NULL;
15186
15187 if (strlen)
15188 {
15189 /* Compute and store the length of the substring at OFFSET.
15190 All offsets past the initial length refer to null strings. */
15191 if (offset < string_length)
15192 *strlen = string_length - offset;
15193 else
15194 *strlen = 1;
15195 }
15196 else
15197 {
15198 tree eltype = TREE_TYPE (TREE_TYPE (src));
15199 /* Support only properly NUL-terminated single byte strings. */
15200 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
15201 return NULL;
15202 if (string[string_length - 1] != '\0')
15203 return NULL;
15204 }
15205
15206 return offset < string_length ? string + offset : "";
15207 }
15208
15209 /* Given a tree T, compute which bits in T may be nonzero. */
15210
15211 wide_int
15212 tree_nonzero_bits (const_tree t)
15213 {
15214 switch (TREE_CODE (t))
15215 {
15216 case INTEGER_CST:
15217 return wi::to_wide (t);
15218 case SSA_NAME:
15219 return get_nonzero_bits (t);
15220 case NON_LVALUE_EXPR:
15221 case SAVE_EXPR:
15222 return tree_nonzero_bits (TREE_OPERAND (t, 0));
15223 case BIT_AND_EXPR:
15224 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15225 tree_nonzero_bits (TREE_OPERAND (t, 1)));
15226 case BIT_IOR_EXPR:
15227 case BIT_XOR_EXPR:
15228 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15229 tree_nonzero_bits (TREE_OPERAND (t, 1)));
15230 case COND_EXPR:
15231 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
15232 tree_nonzero_bits (TREE_OPERAND (t, 2)));
15233 CASE_CONVERT:
15234 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15235 TYPE_PRECISION (TREE_TYPE (t)),
15236 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
15237 case PLUS_EXPR:
15238 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
15239 {
15240 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
15241 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
15242 if (wi::bit_and (nzbits1, nzbits2) == 0)
15243 return wi::bit_or (nzbits1, nzbits2);
15244 }
15245 break;
15246 case LSHIFT_EXPR:
15247 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
15248 {
15249 tree type = TREE_TYPE (t);
15250 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
15251 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
15252 TYPE_PRECISION (type));
15253 return wi::neg_p (arg1)
15254 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
15255 : wi::lshift (nzbits, arg1);
15256 }
15257 break;
15258 case RSHIFT_EXPR:
15259 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
15260 {
15261 tree type = TREE_TYPE (t);
15262 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
15263 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
15264 TYPE_PRECISION (type));
15265 return wi::neg_p (arg1)
15266 ? wi::lshift (nzbits, -arg1)
15267 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
15268 }
15269 break;
15270 default:
15271 break;
15272 }
15273
15274 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
15275 }
15276
15277 #if CHECKING_P
15278
15279 namespace selftest {
15280
15281 /* Helper functions for writing tests of folding trees. */
15282
15283 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
15284
15285 static void
15286 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
15287 tree constant)
15288 {
15289 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
15290 }
15291
15292 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
15293 wrapping WRAPPED_EXPR. */
15294
15295 static void
15296 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
15297 tree wrapped_expr)
15298 {
15299 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
15300 ASSERT_NE (wrapped_expr, result);
15301 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
15302 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
15303 }
15304
15305 /* Verify that various arithmetic binary operations are folded
15306 correctly. */
15307
15308 static void
15309 test_arithmetic_folding ()
15310 {
15311 tree type = integer_type_node;
15312 tree x = create_tmp_var_raw (type, "x");
15313 tree zero = build_zero_cst (type);
15314 tree one = build_int_cst (type, 1);
15315
15316 /* Addition. */
15317 /* 1 <-- (0 + 1) */
15318 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
15319 one);
15320 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
15321 one);
15322
15323 /* (nonlvalue)x <-- (x + 0) */
15324 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
15325 x);
15326
15327 /* Subtraction. */
15328 /* 0 <-- (x - x) */
15329 assert_binop_folds_to_const (x, MINUS_EXPR, x,
15330 zero);
15331 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
15332 x);
15333
15334 /* Multiplication. */
15335 /* 0 <-- (x * 0) */
15336 assert_binop_folds_to_const (x, MULT_EXPR, zero,
15337 zero);
15338
15339 /* (nonlvalue)x <-- (x * 1) */
15340 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
15341 x);
15342 }
15343
15344 /* Verify that various binary operations on vectors are folded
15345 correctly. */
15346
15347 static void
15348 test_vector_folding ()
15349 {
15350 tree inner_type = integer_type_node;
15351 tree type = build_vector_type (inner_type, 4);
15352 tree zero = build_zero_cst (type);
15353 tree one = build_one_cst (type);
15354 tree index = build_index_vector (type, 0, 1);
15355
15356 /* Verify equality tests that return a scalar boolean result. */
15357 tree res_type = boolean_type_node;
15358 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
15359 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
15360 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
15361 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
15362 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
15363 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
15364 index, one)));
15365 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
15366 index, index)));
15367 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
15368 index, index)));
15369 }
15370
15371 /* Verify folding of VEC_DUPLICATE_EXPRs. */
15372
15373 static void
15374 test_vec_duplicate_folding ()
15375 {
15376 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
15377 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
15378 /* This will be 1 if VEC_MODE isn't a vector mode. */
15379 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
15380
15381 tree type = build_vector_type (ssizetype, nunits);
15382 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
15383 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
15384 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
15385 }
15386
15387 /* Run all of the selftests within this file. */
15388
15389 void
15390 fold_const_c_tests ()
15391 {
15392 test_arithmetic_folding ();
15393 test_vector_folding ();
15394 test_vec_duplicate_folding ();
15395 }
15396
15397 } // namespace selftest
15398
15399 #endif /* CHECKING_P */